Merged r158229 through r158464 into branch.
[official-gcc.git] / gcc / ada / gcc-interface / utils.c
blob668226bd906fd3efacebe7ba2d81fdb95411bc4d
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2010, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "toplev.h"
33 #include "rtl.h"
34 #include "output.h"
35 #include "ggc.h"
36 #include "debug.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "function.h"
40 #include "langhooks.h"
41 #include "pointer-set.h"
42 #include "cgraph.h"
43 #include "tree-dump.h"
44 #include "tree-inline.h"
45 #include "tree-iterator.h"
46 #include "gimple.h"
48 #include "ada.h"
49 #include "types.h"
50 #include "atree.h"
51 #include "elists.h"
52 #include "namet.h"
53 #include "nlists.h"
54 #include "stringt.h"
55 #include "uintp.h"
56 #include "fe.h"
57 #include "sinfo.h"
58 #include "einfo.h"
59 #include "ada-tree.h"
60 #include "gigi.h"
62 #ifndef MAX_BITS_PER_WORD
63 #define MAX_BITS_PER_WORD BITS_PER_WORD
64 #endif
66 /* If nonzero, pretend we are allocating at global level. */
67 int force_global;
69 /* The default alignment of "double" floating-point types, i.e. floating
70 point types whose size is equal to 64 bits, or 0 if this alignment is
71 not specifically capped. */
72 int double_float_alignment;
74 /* The default alignment of "double" or larger scalar types, i.e. scalar
75 types whose size is greater or equal to 64 bits, or 0 if this alignment
76 is not specifically capped. */
77 int double_scalar_alignment;
79 /* Tree nodes for the various types and decls we create. */
80 tree gnat_std_decls[(int) ADT_LAST];
82 /* Functions to call for each of the possible raise reasons. */
83 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
98 /* Fake handler for attributes we don't properly support, typically because
99 they'd require dragging a lot of the common-c front-end circuitry. */
100 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
102 /* Table of machine-independent internal attributes for Ada. We support
103 this minimal set of attributes to accommodate the needs of builtins. */
104 const struct attribute_spec gnat_internal_attribute_table[] =
106 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
107 { "const", 0, 0, true, false, false, handle_const_attribute },
108 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
109 { "pure", 0, 0, true, false, false, handle_pure_attribute },
110 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
111 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
112 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
113 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
114 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
115 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
117 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute },
118 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute },
119 { "may_alias", 0, 0, false, true, false, NULL },
121 /* ??? format and format_arg are heavy and not supported, which actually
122 prevents support for stdio builtins, which we however declare as part
123 of the common builtins.def contents. */
124 { "format", 3, 3, false, true, true, fake_attribute_handler },
125 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
127 { NULL, 0, 0, false, false, false, NULL }
130 /* Associates a GNAT tree node to a GCC tree node. It is used in
131 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
132 of `save_gnu_tree' for more info. */
133 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
135 #define GET_GNU_TREE(GNAT_ENTITY) \
136 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
138 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
139 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
141 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
142 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
144 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
145 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
147 #define GET_DUMMY_NODE(GNAT_ENTITY) \
148 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
150 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
151 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
153 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
154 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
156 /* This variable keeps a table for types for each precision so that we only
157 allocate each of them once. Signed and unsigned types are kept separate.
159 Note that these types are only used when fold-const requests something
160 special. Perhaps we should NOT share these types; we'll see how it
161 goes later. */
162 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
164 /* Likewise for float types, but record these by mode. */
165 static GTY(()) tree float_types[NUM_MACHINE_MODES];
167 /* For each binding contour we allocate a binding_level structure to indicate
168 the binding depth. */
170 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
171 /* The binding level containing this one (the enclosing binding level). */
172 struct gnat_binding_level *chain;
173 /* The BLOCK node for this level. */
174 tree block;
175 /* If nonzero, the setjmp buffer that needs to be updated for any
176 variable-sized definition within this context. */
177 tree jmpbuf_decl;
180 /* The binding level currently in effect. */
181 static GTY(()) struct gnat_binding_level *current_binding_level;
183 /* A chain of gnat_binding_level structures awaiting reuse. */
184 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
186 /* An array of global declarations. */
187 static GTY(()) VEC(tree,gc) *global_decls;
189 /* An array of builtin function declarations. */
190 static GTY(()) VEC(tree,gc) *builtin_decls;
192 /* An array of global renaming pointers. */
193 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
195 /* A chain of unused BLOCK nodes. */
196 static GTY((deletable)) tree free_block_chain;
198 static tree merge_sizes (tree, tree, tree, bool, bool);
199 static tree compute_related_constant (tree, tree);
200 static tree split_plus (tree, tree *);
201 static tree float_type_for_precision (int, enum machine_mode);
202 static tree convert_to_fat_pointer (tree, tree);
203 static tree convert_to_thin_pointer (tree, tree);
204 static tree make_descriptor_field (const char *,tree, tree, tree);
205 static bool potential_alignment_gap (tree, tree, tree);
206 static void process_attributes (tree, struct attrib *);
208 /* Initialize the association of GNAT nodes to GCC trees. */
210 void
211 init_gnat_to_gnu (void)
213 associate_gnat_to_gnu
214 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
217 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
218 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
219 a ..._DECL node. If NO_CHECK is true, the latter check is suppressed.
221 If GNU_DECL is zero, a previous association is to be reset. */
223 void
224 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
226 /* Check that GNAT_ENTITY is not already defined and that it is being set
227 to something which is a decl. Raise gigi 401 if not. Usually, this
228 means GNAT_ENTITY is defined twice, but occasionally is due to some
229 Gigi problem. */
230 gcc_assert (!(gnu_decl
231 && (PRESENT_GNU_TREE (gnat_entity)
232 || (!no_check && !DECL_P (gnu_decl)))));
234 SET_GNU_TREE (gnat_entity, gnu_decl);
237 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
238 Return the ..._DECL node that was associated with it. If there is no tree
239 node associated with GNAT_ENTITY, abort.
241 In some cases, such as delayed elaboration or expressions that need to
242 be elaborated only once, GNAT_ENTITY is really not an entity. */
244 tree
245 get_gnu_tree (Entity_Id gnat_entity)
247 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
248 return GET_GNU_TREE (gnat_entity);
251 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
253 bool
254 present_gnu_tree (Entity_Id gnat_entity)
256 return PRESENT_GNU_TREE (gnat_entity);
259 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
261 void
262 init_dummy_type (void)
264 dummy_node_table
265 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
268 /* Make a dummy type corresponding to GNAT_TYPE. */
270 tree
271 make_dummy_type (Entity_Id gnat_type)
273 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
274 tree gnu_type;
276 /* If there is an equivalent type, get its underlying type. */
277 if (Present (gnat_underlying))
278 gnat_underlying = Underlying_Type (gnat_underlying);
280 /* If there was no equivalent type (can only happen when just annotating
281 types) or underlying type, go back to the original type. */
282 if (No (gnat_underlying))
283 gnat_underlying = gnat_type;
285 /* If it there already a dummy type, use that one. Else make one. */
286 if (PRESENT_DUMMY_NODE (gnat_underlying))
287 return GET_DUMMY_NODE (gnat_underlying);
289 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
290 an ENUMERAL_TYPE. */
291 gnu_type = make_node (Is_Record_Type (gnat_underlying)
292 ? tree_code_for_record_type (gnat_underlying)
293 : ENUMERAL_TYPE);
294 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
295 TYPE_DUMMY_P (gnu_type) = 1;
296 TYPE_STUB_DECL (gnu_type)
297 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
298 if (Is_By_Reference_Type (gnat_type))
299 TREE_ADDRESSABLE (gnu_type) = 1;
301 SET_DUMMY_NODE (gnat_underlying, gnu_type);
303 return gnu_type;
306 /* Return nonzero if we are currently in the global binding level. */
309 global_bindings_p (void)
311 return ((force_global || !current_function_decl) ? -1 : 0);
314 /* Enter a new binding level. */
316 void
317 gnat_pushlevel (void)
319 struct gnat_binding_level *newlevel = NULL;
321 /* Reuse a struct for this binding level, if there is one. */
322 if (free_binding_level)
324 newlevel = free_binding_level;
325 free_binding_level = free_binding_level->chain;
327 else
328 newlevel
329 = (struct gnat_binding_level *)
330 ggc_alloc (sizeof (struct gnat_binding_level));
332 /* Use a free BLOCK, if any; otherwise, allocate one. */
333 if (free_block_chain)
335 newlevel->block = free_block_chain;
336 free_block_chain = BLOCK_CHAIN (free_block_chain);
337 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
339 else
340 newlevel->block = make_node (BLOCK);
342 /* Point the BLOCK we just made to its parent. */
343 if (current_binding_level)
344 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
346 BLOCK_VARS (newlevel->block) = NULL_TREE;
347 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
348 TREE_USED (newlevel->block) = 1;
350 /* Add this level to the front of the chain (stack) of active levels. */
351 newlevel->chain = current_binding_level;
352 newlevel->jmpbuf_decl = NULL_TREE;
353 current_binding_level = newlevel;
356 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
357 and point FNDECL to this BLOCK. */
359 void
360 set_current_block_context (tree fndecl)
362 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
363 DECL_INITIAL (fndecl) = current_binding_level->block;
364 set_block_for_group (current_binding_level->block);
367 /* Set the jmpbuf_decl for the current binding level to DECL. */
369 void
370 set_block_jmpbuf_decl (tree decl)
372 current_binding_level->jmpbuf_decl = decl;
375 /* Get the jmpbuf_decl, if any, for the current binding level. */
377 tree
378 get_block_jmpbuf_decl (void)
380 return current_binding_level->jmpbuf_decl;
383 /* Exit a binding level. Set any BLOCK into the current code group. */
385 void
386 gnat_poplevel (void)
388 struct gnat_binding_level *level = current_binding_level;
389 tree block = level->block;
391 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
392 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
394 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
395 are no variables free the block and merge its subblocks into those of its
396 parent block. Otherwise, add it to the list of its parent. */
397 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
399 else if (BLOCK_VARS (block) == NULL_TREE)
401 BLOCK_SUBBLOCKS (level->chain->block)
402 = chainon (BLOCK_SUBBLOCKS (block),
403 BLOCK_SUBBLOCKS (level->chain->block));
404 BLOCK_CHAIN (block) = free_block_chain;
405 free_block_chain = block;
407 else
409 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
410 BLOCK_SUBBLOCKS (level->chain->block) = block;
411 TREE_USED (block) = 1;
412 set_block_for_group (block);
415 /* Free this binding structure. */
416 current_binding_level = level->chain;
417 level->chain = free_binding_level;
418 free_binding_level = level;
422 /* Records a ..._DECL node DECL as belonging to the current lexical scope
423 and uses GNAT_NODE for location information and propagating flags. */
425 void
426 gnat_pushdecl (tree decl, Node_Id gnat_node)
428 /* If this decl is public external or at toplevel, there is no context.
429 But PARM_DECLs always go in the level of its function. */
430 if (TREE_CODE (decl) != PARM_DECL
431 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
432 || global_bindings_p ()))
433 DECL_CONTEXT (decl) = 0;
434 else
436 DECL_CONTEXT (decl) = current_function_decl;
438 /* Functions imported in another function are not really nested.
439 For really nested functions mark them initially as needing
440 a static chain for uses of that flag before unnesting;
441 lower_nested_functions will then recompute it. */
442 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
443 DECL_STATIC_CHAIN (decl) = 1;
446 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
448 /* Set the location of DECL and emit a declaration for it. */
449 if (Present (gnat_node))
450 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
451 add_decl_expr (decl, gnat_node);
453 /* Put the declaration on the list. The list of declarations is in reverse
454 order. The list will be reversed later. Put global variables in the
455 globals list and builtin functions in a dedicated list to speed up
456 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
457 the list, as they will cause trouble with the debugger and aren't needed
458 anyway. */
459 if (TREE_CODE (decl) != TYPE_DECL
460 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
462 if (global_bindings_p ())
464 VEC_safe_push (tree, gc, global_decls, decl);
466 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
467 VEC_safe_push (tree, gc, builtin_decls, decl);
469 else
471 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
472 BLOCK_VARS (current_binding_level->block) = decl;
476 /* For the declaration of a type, set its name if it either is not already
477 set or if the previous type name was not derived from a source name.
478 We'd rather have the type named with a real name and all the pointer
479 types to the same object have the same POINTER_TYPE node. Code in the
480 equivalent function of c-decl.c makes a copy of the type node here, but
481 that may cause us trouble with incomplete types. We make an exception
482 for fat pointer types because the compiler automatically builds them
483 for unconstrained array types and the debugger uses them to represent
484 both these and pointers to these. */
485 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
487 tree t = TREE_TYPE (decl);
489 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
491 else if (TYPE_IS_FAT_POINTER_P (t))
493 tree tt = build_variant_type_copy (t);
494 TYPE_NAME (tt) = decl;
495 TREE_USED (tt) = TREE_USED (t);
496 TREE_TYPE (decl) = tt;
497 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
498 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
499 else
500 DECL_ORIGINAL_TYPE (decl) = t;
501 t = NULL_TREE;
502 DECL_ARTIFICIAL (decl) = 0;
504 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
506 else
507 t = NULL_TREE;
509 /* Propagate the name to all the variants. This is needed for
510 the type qualifiers machinery to work properly. */
511 if (t)
512 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
513 TYPE_NAME (t) = decl;
517 /* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
519 void
520 record_builtin_type (const char *name, tree type)
522 tree type_decl = build_decl (input_location,
523 TYPE_DECL, get_identifier (name), type);
525 gnat_pushdecl (type_decl, Empty);
527 if (debug_hooks->type_decl)
528 debug_hooks->type_decl (type_decl, false);
531 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
532 finish constructing the record or union type. If REP_LEVEL is zero, this
533 record has no representation clause and so will be entirely laid out here.
534 If REP_LEVEL is one, this record has a representation clause and has been
535 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
536 this record is derived from a parent record and thus inherits its layout;
537 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
538 we need to write debug information about this type. */
540 void
541 finish_record_type (tree record_type, tree field_list, int rep_level,
542 bool debug_info_p)
544 enum tree_code code = TREE_CODE (record_type);
545 tree name = TYPE_NAME (record_type);
546 tree ada_size = bitsize_zero_node;
547 tree size = bitsize_zero_node;
548 bool had_size = TYPE_SIZE (record_type) != 0;
549 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
550 bool had_align = TYPE_ALIGN (record_type) != 0;
551 tree field;
553 TYPE_FIELDS (record_type) = field_list;
555 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
556 generate debug info and have a parallel type. */
557 if (name && TREE_CODE (name) == TYPE_DECL)
558 name = DECL_NAME (name);
559 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
561 /* Globally initialize the record first. If this is a rep'ed record,
562 that just means some initializations; otherwise, layout the record. */
563 if (rep_level > 0)
565 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
567 if (!had_size_unit)
568 TYPE_SIZE_UNIT (record_type) = size_zero_node;
570 if (!had_size)
571 TYPE_SIZE (record_type) = bitsize_zero_node;
573 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
574 out just like a UNION_TYPE, since the size will be fixed. */
575 else if (code == QUAL_UNION_TYPE)
576 code = UNION_TYPE;
578 else
580 /* Ensure there isn't a size already set. There can be in an error
581 case where there is a rep clause but all fields have errors and
582 no longer have a position. */
583 TYPE_SIZE (record_type) = 0;
584 layout_type (record_type);
587 /* At this point, the position and size of each field is known. It was
588 either set before entry by a rep clause, or by laying out the type above.
590 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
591 to compute the Ada size; the GCC size and alignment (for rep'ed records
592 that are not padding types); and the mode (for rep'ed records). We also
593 clear the DECL_BIT_FIELD indication for the cases we know have not been
594 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
596 if (code == QUAL_UNION_TYPE)
597 field_list = nreverse (field_list);
599 for (field = field_list; field; field = TREE_CHAIN (field))
601 tree type = TREE_TYPE (field);
602 tree pos = bit_position (field);
603 tree this_size = DECL_SIZE (field);
604 tree this_ada_size;
606 if ((TREE_CODE (type) == RECORD_TYPE
607 || TREE_CODE (type) == UNION_TYPE
608 || TREE_CODE (type) == QUAL_UNION_TYPE)
609 && !TYPE_FAT_POINTER_P (type)
610 && !TYPE_CONTAINS_TEMPLATE_P (type)
611 && TYPE_ADA_SIZE (type))
612 this_ada_size = TYPE_ADA_SIZE (type);
613 else
614 this_ada_size = this_size;
616 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
617 if (DECL_BIT_FIELD (field)
618 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
620 unsigned int align = TYPE_ALIGN (type);
622 /* In the general case, type alignment is required. */
623 if (value_factor_p (pos, align))
625 /* The enclosing record type must be sufficiently aligned.
626 Otherwise, if no alignment was specified for it and it
627 has been laid out already, bump its alignment to the
628 desired one if this is compatible with its size. */
629 if (TYPE_ALIGN (record_type) >= align)
631 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
632 DECL_BIT_FIELD (field) = 0;
634 else if (!had_align
635 && rep_level == 0
636 && value_factor_p (TYPE_SIZE (record_type), align))
638 TYPE_ALIGN (record_type) = align;
639 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
640 DECL_BIT_FIELD (field) = 0;
644 /* In the non-strict alignment case, only byte alignment is. */
645 if (!STRICT_ALIGNMENT
646 && DECL_BIT_FIELD (field)
647 && value_factor_p (pos, BITS_PER_UNIT))
648 DECL_BIT_FIELD (field) = 0;
651 /* If we still have DECL_BIT_FIELD set at this point, we know that the
652 field is technically not addressable. Except that it can actually
653 be addressed if it is BLKmode and happens to be properly aligned. */
654 if (DECL_BIT_FIELD (field)
655 && !(DECL_MODE (field) == BLKmode
656 && value_factor_p (pos, BITS_PER_UNIT)))
657 DECL_NONADDRESSABLE_P (field) = 1;
659 /* A type must be as aligned as its most aligned field that is not
660 a bit-field. But this is already enforced by layout_type. */
661 if (rep_level > 0 && !DECL_BIT_FIELD (field))
662 TYPE_ALIGN (record_type)
663 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
665 switch (code)
667 case UNION_TYPE:
668 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
669 size = size_binop (MAX_EXPR, size, this_size);
670 break;
672 case QUAL_UNION_TYPE:
673 ada_size
674 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
675 this_ada_size, ada_size);
676 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
677 this_size, size);
678 break;
680 case RECORD_TYPE:
681 /* Since we know here that all fields are sorted in order of
682 increasing bit position, the size of the record is one
683 higher than the ending bit of the last field processed
684 unless we have a rep clause, since in that case we might
685 have a field outside a QUAL_UNION_TYPE that has a higher ending
686 position. So use a MAX in that case. Also, if this field is a
687 QUAL_UNION_TYPE, we need to take into account the previous size in
688 the case of empty variants. */
689 ada_size
690 = merge_sizes (ada_size, pos, this_ada_size,
691 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
692 size
693 = merge_sizes (size, pos, this_size,
694 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
695 break;
697 default:
698 gcc_unreachable ();
702 if (code == QUAL_UNION_TYPE)
703 nreverse (field_list);
705 if (rep_level < 2)
707 /* If this is a padding record, we never want to make the size smaller
708 than what was specified in it, if any. */
709 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
710 size = TYPE_SIZE (record_type);
712 /* Now set any of the values we've just computed that apply. */
713 if (!TYPE_FAT_POINTER_P (record_type)
714 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
715 SET_TYPE_ADA_SIZE (record_type, ada_size);
717 if (rep_level > 0)
719 tree size_unit = had_size_unit
720 ? TYPE_SIZE_UNIT (record_type)
721 : convert (sizetype,
722 size_binop (CEIL_DIV_EXPR, size,
723 bitsize_unit_node));
724 unsigned int align = TYPE_ALIGN (record_type);
726 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
727 TYPE_SIZE_UNIT (record_type)
728 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
730 compute_record_mode (record_type);
734 if (debug_info_p)
735 rest_of_record_type_compilation (record_type);
738 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
739 associated with it. It need not be invoked directly in most cases since
740 finish_record_type takes care of doing so, but this can be necessary if
741 a parallel type is to be attached to the record type. */
743 void
744 rest_of_record_type_compilation (tree record_type)
746 tree field_list = TYPE_FIELDS (record_type);
747 tree field;
748 enum tree_code code = TREE_CODE (record_type);
749 bool var_size = false;
751 for (field = field_list; field; field = TREE_CHAIN (field))
753 /* We need to make an XVE/XVU record if any field has variable size,
754 whether or not the record does. For example, if we have a union,
755 it may be that all fields, rounded up to the alignment, have the
756 same size, in which case we'll use that size. But the debug
757 output routines (except Dwarf2) won't be able to output the fields,
758 so we need to make the special record. */
759 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
760 /* If a field has a non-constant qualifier, the record will have
761 variable size too. */
762 || (code == QUAL_UNION_TYPE
763 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
765 var_size = true;
766 break;
770 /* If this record is of variable size, rename it so that the
771 debugger knows it is and make a new, parallel, record
772 that tells the debugger how the record is laid out. See
773 exp_dbug.ads. But don't do this for records that are padding
774 since they confuse GDB. */
775 if (var_size && !TYPE_IS_PADDING_P (record_type))
777 tree new_record_type
778 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
779 ? UNION_TYPE : TREE_CODE (record_type));
780 tree orig_name = TYPE_NAME (record_type), new_name;
781 tree last_pos = bitsize_zero_node;
782 tree old_field, prev_old_field = NULL_TREE;
784 if (TREE_CODE (orig_name) == TYPE_DECL)
785 orig_name = DECL_NAME (orig_name);
787 new_name
788 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
789 ? "XVU" : "XVE");
790 TYPE_NAME (new_record_type) = new_name;
791 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
792 TYPE_STUB_DECL (new_record_type)
793 = create_type_stub_decl (new_name, new_record_type);
794 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
795 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
796 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
797 TYPE_SIZE_UNIT (new_record_type)
798 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
800 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
802 /* Now scan all the fields, replacing each field with a new
803 field corresponding to the new encoding. */
804 for (old_field = TYPE_FIELDS (record_type); old_field;
805 old_field = TREE_CHAIN (old_field))
807 tree field_type = TREE_TYPE (old_field);
808 tree field_name = DECL_NAME (old_field);
809 tree new_field;
810 tree curpos = bit_position (old_field);
811 bool var = false;
812 unsigned int align = 0;
813 tree pos;
815 /* See how the position was modified from the last position.
817 There are two basic cases we support: a value was added
818 to the last position or the last position was rounded to
819 a boundary and they something was added. Check for the
820 first case first. If not, see if there is any evidence
821 of rounding. If so, round the last position and try
822 again.
824 If this is a union, the position can be taken as zero. */
826 /* Some computations depend on the shape of the position expression,
827 so strip conversions to make sure it's exposed. */
828 curpos = remove_conversions (curpos, true);
830 if (TREE_CODE (new_record_type) == UNION_TYPE)
831 pos = bitsize_zero_node, align = 0;
832 else
833 pos = compute_related_constant (curpos, last_pos);
835 if (!pos && TREE_CODE (curpos) == MULT_EXPR
836 && host_integerp (TREE_OPERAND (curpos, 1), 1))
838 tree offset = TREE_OPERAND (curpos, 0);
839 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
841 /* An offset which is a bitwise AND with a negative power of 2
842 means an alignment corresponding to this power of 2. */
843 offset = remove_conversions (offset, true);
844 if (TREE_CODE (offset) == BIT_AND_EXPR
845 && host_integerp (TREE_OPERAND (offset, 1), 0)
846 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
848 unsigned int pow
849 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
850 if (exact_log2 (pow) > 0)
851 align *= pow;
854 pos = compute_related_constant (curpos,
855 round_up (last_pos, align));
857 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
858 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
859 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
860 && host_integerp (TREE_OPERAND
861 (TREE_OPERAND (curpos, 0), 1),
864 align
865 = tree_low_cst
866 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
867 pos = compute_related_constant (curpos,
868 round_up (last_pos, align));
870 else if (potential_alignment_gap (prev_old_field, old_field,
871 pos))
873 align = TYPE_ALIGN (field_type);
874 pos = compute_related_constant (curpos,
875 round_up (last_pos, align));
878 /* If we can't compute a position, set it to zero.
880 ??? We really should abort here, but it's too much work
881 to get this correct for all cases. */
883 if (!pos)
884 pos = bitsize_zero_node;
886 /* See if this type is variable-sized and make a pointer type
887 and indicate the indirection if so. Beware that the debug
888 back-end may adjust the position computed above according
889 to the alignment of the field type, i.e. the pointer type
890 in this case, if we don't preventively counter that. */
891 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
893 field_type = build_pointer_type (field_type);
894 if (align != 0 && TYPE_ALIGN (field_type) > align)
896 field_type = copy_node (field_type);
897 TYPE_ALIGN (field_type) = align;
899 var = true;
902 /* Make a new field name, if necessary. */
903 if (var || align != 0)
905 char suffix[16];
907 if (align != 0)
908 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
909 align / BITS_PER_UNIT);
910 else
911 strcpy (suffix, "XVL");
913 field_name = concat_name (field_name, suffix);
916 new_field = create_field_decl (field_name, field_type,
917 new_record_type, 0,
918 DECL_SIZE (old_field), pos, 0);
919 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
920 TYPE_FIELDS (new_record_type) = new_field;
922 /* If old_field is a QUAL_UNION_TYPE, take its size as being
923 zero. The only time it's not the last field of the record
924 is when there are other components at fixed positions after
925 it (meaning there was a rep clause for every field) and we
926 want to be able to encode them. */
927 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
928 (TREE_CODE (TREE_TYPE (old_field))
929 == QUAL_UNION_TYPE)
930 ? bitsize_zero_node
931 : DECL_SIZE (old_field));
932 prev_old_field = old_field;
935 TYPE_FIELDS (new_record_type)
936 = nreverse (TYPE_FIELDS (new_record_type));
938 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
941 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
944 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
946 void
947 add_parallel_type (tree decl, tree parallel_type)
949 tree d = decl;
951 while (DECL_PARALLEL_TYPE (d))
952 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
954 SET_DECL_PARALLEL_TYPE (d, parallel_type);
957 /* Return the parallel type associated to a type, if any. */
959 tree
960 get_parallel_type (tree type)
962 if (TYPE_STUB_DECL (type))
963 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
964 else
965 return NULL_TREE;
968 /* Utility function of above to merge LAST_SIZE, the previous size of a record
969 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
970 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
971 replace a value of zero with the old size. If HAS_REP is true, we take the
972 MAX of the end position of this field with LAST_SIZE. In all other cases,
973 we use FIRST_BIT plus SIZE. Return an expression for the size. */
975 static tree
976 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
977 bool has_rep)
979 tree type = TREE_TYPE (last_size);
980 tree new_size;
982 if (!special || TREE_CODE (size) != COND_EXPR)
984 new_size = size_binop (PLUS_EXPR, first_bit, size);
985 if (has_rep)
986 new_size = size_binop (MAX_EXPR, last_size, new_size);
989 else
990 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
991 integer_zerop (TREE_OPERAND (size, 1))
992 ? last_size : merge_sizes (last_size, first_bit,
993 TREE_OPERAND (size, 1),
994 1, has_rep),
995 integer_zerop (TREE_OPERAND (size, 2))
996 ? last_size : merge_sizes (last_size, first_bit,
997 TREE_OPERAND (size, 2),
998 1, has_rep));
1000 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1001 when fed through substitute_in_expr) into thinking that a constant
1002 size is not constant. */
1003 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1004 new_size = TREE_OPERAND (new_size, 0);
1006 return new_size;
1009 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1010 related by the addition of a constant. Return that constant if so. */
1012 static tree
1013 compute_related_constant (tree op0, tree op1)
1015 tree op0_var, op1_var;
1016 tree op0_con = split_plus (op0, &op0_var);
1017 tree op1_con = split_plus (op1, &op1_var);
1018 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1020 if (operand_equal_p (op0_var, op1_var, 0))
1021 return result;
1022 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1023 return result;
1024 else
1025 return 0;
1028 /* Utility function of above to split a tree OP which may be a sum, into a
1029 constant part, which is returned, and a variable part, which is stored
1030 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1031 bitsizetype. */
1033 static tree
1034 split_plus (tree in, tree *pvar)
1036 /* Strip NOPS in order to ease the tree traversal and maximize the
1037 potential for constant or plus/minus discovery. We need to be careful
1038 to always return and set *pvar to bitsizetype trees, but it's worth
1039 the effort. */
1040 STRIP_NOPS (in);
1042 *pvar = convert (bitsizetype, in);
1044 if (TREE_CODE (in) == INTEGER_CST)
1046 *pvar = bitsize_zero_node;
1047 return convert (bitsizetype, in);
1049 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1051 tree lhs_var, rhs_var;
1052 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1053 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1055 if (lhs_var == TREE_OPERAND (in, 0)
1056 && rhs_var == TREE_OPERAND (in, 1))
1057 return bitsize_zero_node;
1059 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1060 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1062 else
1063 return bitsize_zero_node;
1066 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1067 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1068 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1069 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1070 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1071 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1072 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1073 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1074 invisible reference. */
1076 tree
1077 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1078 bool return_unconstrained_p, bool return_by_direct_ref_p,
1079 bool return_by_invisi_ref_p)
1081 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1082 the subprogram formal parameters. This list is generated by traversing
1083 the input list of PARM_DECL nodes. */
1084 tree param_type_list = NULL_TREE;
1085 tree t, type;
1087 for (t = param_decl_list; t; t = TREE_CHAIN (t))
1088 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (t), param_type_list);
1090 /* The list of the function parameter types has to be terminated by the void
1091 type to signal to the back-end that we are not dealing with a variable
1092 parameter subprogram, but that it has a fixed number of parameters. */
1093 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1095 /* The list of argument types has been created in reverse so reverse it. */
1096 param_type_list = nreverse (param_type_list);
1098 type = build_function_type (return_type, param_type_list);
1100 /* TYPE may have been shared since GCC hashes types. If it has a different
1101 CICO_LIST, make a copy. Likewise for the various flags. */
1102 if (TYPE_CI_CO_LIST (type) != cico_list
1103 || TYPE_RETURN_UNCONSTRAINED_P (type) != return_unconstrained_p
1104 || TYPE_RETURN_BY_DIRECT_REF_P (type) != return_by_direct_ref_p
1105 || TREE_ADDRESSABLE (type) != return_by_invisi_ref_p)
1107 type = copy_type (type);
1108 TYPE_CI_CO_LIST (type) = cico_list;
1109 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1110 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1111 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1114 return type;
1117 /* Return a copy of TYPE but safe to modify in any way. */
1119 tree
1120 copy_type (tree type)
1122 tree new_type = copy_node (type);
1124 /* Unshare the language-specific data. */
1125 if (TYPE_LANG_SPECIFIC (type))
1127 TYPE_LANG_SPECIFIC (new_type) = NULL;
1128 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1131 /* And the contents of the language-specific slot if needed. */
1132 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1133 && TYPE_RM_VALUES (type))
1135 TYPE_RM_VALUES (new_type) = NULL_TREE;
1136 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
1137 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
1138 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
1141 /* copy_node clears this field instead of copying it, because it is
1142 aliased with TREE_CHAIN. */
1143 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
1145 TYPE_POINTER_TO (new_type) = 0;
1146 TYPE_REFERENCE_TO (new_type) = 0;
1147 TYPE_MAIN_VARIANT (new_type) = new_type;
1148 TYPE_NEXT_VARIANT (new_type) = 0;
1150 return new_type;
1153 /* Return a subtype of sizetype with range MIN to MAX and whose
1154 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1155 of the associated TYPE_DECL. */
1157 tree
1158 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1160 /* First build a type for the desired range. */
1161 tree type = build_index_2_type (min, max);
1163 /* If this type has the TYPE_INDEX_TYPE we want, return it. */
1164 if (TYPE_INDEX_TYPE (type) == index)
1165 return type;
1167 /* Otherwise, if TYPE_INDEX_TYPE is set, make a copy. Note that we have
1168 no way of sharing these types, but that's only a small hole. */
1169 if (TYPE_INDEX_TYPE (type))
1170 type = copy_type (type);
1172 SET_TYPE_INDEX_TYPE (type, index);
1173 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1175 return type;
1178 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
1179 sizetype is used. */
1181 tree
1182 create_range_type (tree type, tree min, tree max)
1184 tree range_type;
1186 if (type == NULL_TREE)
1187 type = sizetype;
1189 /* First build a type with the base range. */
1190 range_type
1191 = build_range_type (type, TYPE_MIN_VALUE (type), TYPE_MAX_VALUE (type));
1193 min = convert (type, min);
1194 max = convert (type, max);
1196 /* If this type has the TYPE_RM_{MIN,MAX}_VALUE we want, return it. */
1197 if (TYPE_RM_MIN_VALUE (range_type)
1198 && TYPE_RM_MAX_VALUE (range_type)
1199 && operand_equal_p (TYPE_RM_MIN_VALUE (range_type), min, 0)
1200 && operand_equal_p (TYPE_RM_MAX_VALUE (range_type), max, 0))
1201 return range_type;
1203 /* Otherwise, if TYPE_RM_{MIN,MAX}_VALUE is set, make a copy. */
1204 if (TYPE_RM_MIN_VALUE (range_type) || TYPE_RM_MAX_VALUE (range_type))
1205 range_type = copy_type (range_type);
1207 /* Then set the actual range. */
1208 SET_TYPE_RM_MIN_VALUE (range_type, min);
1209 SET_TYPE_RM_MAX_VALUE (range_type, max);
1211 return range_type;
1214 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1215 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1216 its data type. */
1218 tree
1219 create_type_stub_decl (tree type_name, tree type)
1221 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1222 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1223 emitted in DWARF. */
1224 tree type_decl = build_decl (input_location,
1225 TYPE_DECL, type_name, type);
1226 DECL_ARTIFICIAL (type_decl) = 1;
1227 return type_decl;
1230 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1231 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1232 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1233 true if we need to write debug information about this type. GNAT_NODE
1234 is used for the position of the decl. */
1236 tree
1237 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1238 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1240 enum tree_code code = TREE_CODE (type);
1241 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1242 tree type_decl;
1244 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1245 gcc_assert (!TYPE_IS_DUMMY_P (type));
1247 /* If the type hasn't been named yet, we're naming it; preserve an existing
1248 TYPE_STUB_DECL that has been attached to it for some purpose. */
1249 if (!named && TYPE_STUB_DECL (type))
1251 type_decl = TYPE_STUB_DECL (type);
1252 DECL_NAME (type_decl) = type_name;
1254 else
1255 type_decl = build_decl (input_location,
1256 TYPE_DECL, type_name, type);
1258 DECL_ARTIFICIAL (type_decl) = artificial_p;
1260 /* Add this decl to the current binding level. */
1261 gnat_pushdecl (type_decl, gnat_node);
1263 process_attributes (type_decl, attr_list);
1265 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1266 This causes the name to be also viewed as a "tag" by the debug
1267 back-end, with the advantage that no DW_TAG_typedef is emitted
1268 for artificial "tagged" types in DWARF. */
1269 if (!named)
1270 TYPE_STUB_DECL (type) = type_decl;
1272 /* Pass the type declaration to the debug back-end unless this is an
1273 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1274 type for which debugging information was not requested, or else an
1275 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1276 handled separately. And do not pass dummy types either. */
1277 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1278 DECL_IGNORED_P (type_decl) = 1;
1279 else if (code != ENUMERAL_TYPE
1280 && (code != RECORD_TYPE || TYPE_FAT_POINTER_P (type))
1281 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1282 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1283 && !(code == RECORD_TYPE
1284 && TYPE_IS_DUMMY_P
1285 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1286 rest_of_type_decl_compilation (type_decl);
1288 return type_decl;
1291 /* Return a VAR_DECL or CONST_DECL node.
1293 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1294 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1295 the GCC tree for an optional initial expression; NULL_TREE if none.
1297 CONST_FLAG is true if this variable is constant, in which case we might
1298 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1300 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1301 definition to be made visible outside of the current compilation unit, for
1302 instance variable definitions in a package specification.
1304 EXTERN_FLAG is true when processing an external variable declaration (as
1305 opposed to a definition: no storage is to be allocated for the variable).
1307 STATIC_FLAG is only relevant when not at top level. In that case
1308 it indicates whether to always allocate storage to the variable.
1310 GNAT_NODE is used for the position of the decl. */
1312 tree
1313 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1314 bool const_flag, bool public_flag, bool extern_flag,
1315 bool static_flag, bool const_decl_allowed_p,
1316 struct attrib *attr_list, Node_Id gnat_node)
1318 bool init_const
1319 = (var_init != 0
1320 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1321 && (global_bindings_p () || static_flag
1322 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1323 : TREE_CONSTANT (var_init)));
1325 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1326 case the initializer may be used in-lieu of the DECL node (as done in
1327 Identifier_to_gnu). This is useful to prevent the need of elaboration
1328 code when an identifier for which such a decl is made is in turn used as
1329 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1330 but extra constraints apply to this choice (see below) and are not
1331 relevant to the distinction we wish to make. */
1332 bool constant_p = const_flag && init_const;
1334 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1335 and may be used for scalars in general but not for aggregates. */
1336 tree var_decl
1337 = build_decl (input_location,
1338 (constant_p && const_decl_allowed_p
1339 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1340 var_name, type);
1342 /* If this is external, throw away any initializations (they will be done
1343 elsewhere) unless this is a constant for which we would like to remain
1344 able to get the initializer. If we are defining a global here, leave a
1345 constant initialization and save any variable elaborations for the
1346 elaboration routine. If we are just annotating types, throw away the
1347 initialization if it isn't a constant. */
1348 if ((extern_flag && !constant_p)
1349 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1350 var_init = NULL_TREE;
1352 /* At the global level, an initializer requiring code to be generated
1353 produces elaboration statements. Check that such statements are allowed,
1354 that is, not violating a No_Elaboration_Code restriction. */
1355 if (global_bindings_p () && var_init != 0 && !init_const)
1356 Check_Elaboration_Code_Allowed (gnat_node);
1358 DECL_INITIAL (var_decl) = var_init;
1359 TREE_READONLY (var_decl) = const_flag;
1360 DECL_EXTERNAL (var_decl) = extern_flag;
1361 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1362 TREE_CONSTANT (var_decl) = constant_p;
1363 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1364 = TYPE_VOLATILE (type);
1366 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1367 try to fiddle with DECL_COMMON. However, on platforms that don't
1368 support global BSS sections, uninitialized global variables would
1369 go in DATA instead, thus increasing the size of the executable. */
1370 if (!flag_no_common
1371 && TREE_CODE (var_decl) == VAR_DECL
1372 && TREE_PUBLIC (var_decl)
1373 && !have_global_bss_p ())
1374 DECL_COMMON (var_decl) = 1;
1376 /* If it's public and not external, always allocate storage for it.
1377 At the global binding level we need to allocate static storage for the
1378 variable if and only if it's not external. If we are not at the top level
1379 we allocate automatic storage unless requested not to. */
1380 TREE_STATIC (var_decl)
1381 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1383 /* For an external constant whose initializer is not absolute, do not emit
1384 debug info. In DWARF this would mean a global relocation in a read-only
1385 section which runs afoul of the PE-COFF runtime relocation mechanism. */
1386 if (extern_flag
1387 && constant_p
1388 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
1389 != null_pointer_node)
1390 DECL_IGNORED_P (var_decl) = 1;
1392 /* Add this decl to the current binding level. */
1393 gnat_pushdecl (var_decl, gnat_node);
1395 if (TREE_SIDE_EFFECTS (var_decl))
1396 TREE_ADDRESSABLE (var_decl) = 1;
1398 if (TREE_CODE (var_decl) == VAR_DECL)
1400 if (asm_name)
1401 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1402 process_attributes (var_decl, attr_list);
1403 if (global_bindings_p ())
1404 rest_of_decl_compilation (var_decl, true, 0);
1406 else
1407 expand_decl (var_decl);
1409 return var_decl;
1412 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1414 static bool
1415 aggregate_type_contains_array_p (tree type)
1417 switch (TREE_CODE (type))
1419 case RECORD_TYPE:
1420 case UNION_TYPE:
1421 case QUAL_UNION_TYPE:
1423 tree field;
1424 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1425 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1426 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1427 return true;
1428 return false;
1431 case ARRAY_TYPE:
1432 return true;
1434 default:
1435 gcc_unreachable ();
1439 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
1440 its type and RECORD_TYPE is the type of the enclosing record. PACKED is
1441 1 if the enclosing record is packed, -1 if it has Component_Alignment of
1442 Storage_Unit. If SIZE is nonzero, it is the specified size of the field.
1443 If POS is nonzero, it is the bit position. If ADDRESSABLE is nonzero, it
1444 means we are allowed to take the address of the field; if it is negative,
1445 we should not make a bitfield, which is used by make_aligning_type. */
1447 tree
1448 create_field_decl (tree field_name, tree field_type, tree record_type,
1449 int packed, tree size, tree pos, int addressable)
1451 tree field_decl = build_decl (input_location,
1452 FIELD_DECL, field_name, field_type);
1454 DECL_CONTEXT (field_decl) = record_type;
1455 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1457 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1458 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1459 Likewise for an aggregate without specified position that contains an
1460 array, because in this case slices of variable length of this array
1461 must be handled by GCC and variable-sized objects need to be aligned
1462 to at least a byte boundary. */
1463 if (packed && (TYPE_MODE (field_type) == BLKmode
1464 || (!pos
1465 && AGGREGATE_TYPE_P (field_type)
1466 && aggregate_type_contains_array_p (field_type))))
1467 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1469 /* If a size is specified, use it. Otherwise, if the record type is packed
1470 compute a size to use, which may differ from the object's natural size.
1471 We always set a size in this case to trigger the checks for bitfield
1472 creation below, which is typically required when no position has been
1473 specified. */
1474 if (size)
1475 size = convert (bitsizetype, size);
1476 else if (packed == 1)
1478 size = rm_size (field_type);
1479 if (TYPE_MODE (field_type) == BLKmode)
1480 size = round_up (size, BITS_PER_UNIT);
1483 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1484 specified for two reasons: first if the size differs from the natural
1485 size. Second, if the alignment is insufficient. There are a number of
1486 ways the latter can be true.
1488 We never make a bitfield if the type of the field has a nonconstant size,
1489 because no such entity requiring bitfield operations should reach here.
1491 We do *preventively* make a bitfield when there might be the need for it
1492 but we don't have all the necessary information to decide, as is the case
1493 of a field with no specified position in a packed record.
1495 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1496 in layout_decl or finish_record_type to clear the bit_field indication if
1497 it is in fact not needed. */
1498 if (addressable >= 0
1499 && size
1500 && TREE_CODE (size) == INTEGER_CST
1501 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1502 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1503 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1504 || packed
1505 || (TYPE_ALIGN (record_type) != 0
1506 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1508 DECL_BIT_FIELD (field_decl) = 1;
1509 DECL_SIZE (field_decl) = size;
1510 if (!packed && !pos)
1512 if (TYPE_ALIGN (record_type) != 0
1513 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1514 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1515 else
1516 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1520 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1522 /* Bump the alignment if need be, either for bitfield/packing purposes or
1523 to satisfy the type requirements if no such consideration applies. When
1524 we get the alignment from the type, indicate if this is from an explicit
1525 user request, which prevents stor-layout from lowering it later on. */
1527 unsigned int bit_align
1528 = (DECL_BIT_FIELD (field_decl) ? 1
1529 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1531 if (bit_align > DECL_ALIGN (field_decl))
1532 DECL_ALIGN (field_decl) = bit_align;
1533 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1535 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1536 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1540 if (pos)
1542 /* We need to pass in the alignment the DECL is known to have.
1543 This is the lowest-order bit set in POS, but no more than
1544 the alignment of the record, if one is specified. Note
1545 that an alignment of 0 is taken as infinite. */
1546 unsigned int known_align;
1548 if (host_integerp (pos, 1))
1549 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1550 else
1551 known_align = BITS_PER_UNIT;
1553 if (TYPE_ALIGN (record_type)
1554 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1555 known_align = TYPE_ALIGN (record_type);
1557 layout_decl (field_decl, known_align);
1558 SET_DECL_OFFSET_ALIGN (field_decl,
1559 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1560 : BITS_PER_UNIT);
1561 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1562 &DECL_FIELD_BIT_OFFSET (field_decl),
1563 DECL_OFFSET_ALIGN (field_decl), pos);
1566 /* In addition to what our caller says, claim the field is addressable if we
1567 know that its type is not suitable.
1569 The field may also be "technically" nonaddressable, meaning that even if
1570 we attempt to take the field's address we will actually get the address
1571 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1572 value we have at this point is not accurate enough, so we don't account
1573 for this here and let finish_record_type decide. */
1574 if (!addressable && !type_for_nonaliased_component_p (field_type))
1575 addressable = 1;
1577 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1579 return field_decl;
1582 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1583 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1584 (either an In parameter or an address of a pass-by-ref parameter). */
1586 tree
1587 create_param_decl (tree param_name, tree param_type, bool readonly)
1589 tree param_decl = build_decl (input_location,
1590 PARM_DECL, param_name, param_type);
1592 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1593 can lead to various ABI violations. */
1594 if (targetm.calls.promote_prototypes (NULL_TREE)
1595 && INTEGRAL_TYPE_P (param_type)
1596 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1598 /* We have to be careful about biased types here. Make a subtype
1599 of integer_type_node with the proper biasing. */
1600 if (TREE_CODE (param_type) == INTEGER_TYPE
1601 && TYPE_BIASED_REPRESENTATION_P (param_type))
1603 tree subtype
1604 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
1605 TREE_TYPE (subtype) = integer_type_node;
1606 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
1607 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
1608 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
1609 param_type = subtype;
1611 else
1612 param_type = integer_type_node;
1615 DECL_ARG_TYPE (param_decl) = param_type;
1616 TREE_READONLY (param_decl) = readonly;
1617 return param_decl;
1620 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1622 static void
1623 process_attributes (tree decl, struct attrib *attr_list)
1625 for (; attr_list; attr_list = attr_list->next)
1626 switch (attr_list->type)
1628 case ATTR_MACHINE_ATTRIBUTE:
1629 input_location = DECL_SOURCE_LOCATION (decl);
1630 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1631 NULL_TREE),
1632 ATTR_FLAG_TYPE_IN_PLACE);
1633 break;
1635 case ATTR_LINK_ALIAS:
1636 if (! DECL_EXTERNAL (decl))
1638 TREE_STATIC (decl) = 1;
1639 assemble_alias (decl, attr_list->name);
1641 break;
1643 case ATTR_WEAK_EXTERNAL:
1644 if (SUPPORTS_WEAK)
1645 declare_weak (decl);
1646 else
1647 post_error ("?weak declarations not supported on this target",
1648 attr_list->error_point);
1649 break;
1651 case ATTR_LINK_SECTION:
1652 if (targetm.have_named_sections)
1654 DECL_SECTION_NAME (decl)
1655 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1656 IDENTIFIER_POINTER (attr_list->name));
1657 DECL_COMMON (decl) = 0;
1659 else
1660 post_error ("?section attributes are not supported for this target",
1661 attr_list->error_point);
1662 break;
1664 case ATTR_LINK_CONSTRUCTOR:
1665 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1666 TREE_USED (decl) = 1;
1667 break;
1669 case ATTR_LINK_DESTRUCTOR:
1670 DECL_STATIC_DESTRUCTOR (decl) = 1;
1671 TREE_USED (decl) = 1;
1672 break;
1674 case ATTR_THREAD_LOCAL_STORAGE:
1675 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1676 DECL_COMMON (decl) = 0;
1677 break;
1681 /* Record DECL as a global renaming pointer. */
1683 void
1684 record_global_renaming_pointer (tree decl)
1686 gcc_assert (DECL_RENAMED_OBJECT (decl));
1687 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1690 /* Invalidate the global renaming pointers. */
1692 void
1693 invalidate_global_renaming_pointers (void)
1695 unsigned int i;
1696 tree iter;
1698 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1699 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1701 VEC_free (tree, gc, global_renaming_pointers);
1704 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1705 a power of 2. */
1707 bool
1708 value_factor_p (tree value, HOST_WIDE_INT factor)
1710 if (host_integerp (value, 1))
1711 return tree_low_cst (value, 1) % factor == 0;
1713 if (TREE_CODE (value) == MULT_EXPR)
1714 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1715 || value_factor_p (TREE_OPERAND (value, 1), factor));
1717 return false;
1720 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1721 unless we can prove these 2 fields are laid out in such a way that no gap
1722 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1723 is the distance in bits between the end of PREV_FIELD and the starting
1724 position of CURR_FIELD. It is ignored if null. */
1726 static bool
1727 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1729 /* If this is the first field of the record, there cannot be any gap */
1730 if (!prev_field)
1731 return false;
1733 /* If the previous field is a union type, then return False: The only
1734 time when such a field is not the last field of the record is when
1735 there are other components at fixed positions after it (meaning there
1736 was a rep clause for every field), in which case we don't want the
1737 alignment constraint to override them. */
1738 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1739 return false;
1741 /* If the distance between the end of prev_field and the beginning of
1742 curr_field is constant, then there is a gap if the value of this
1743 constant is not null. */
1744 if (offset && host_integerp (offset, 1))
1745 return !integer_zerop (offset);
1747 /* If the size and position of the previous field are constant,
1748 then check the sum of this size and position. There will be a gap
1749 iff it is not multiple of the current field alignment. */
1750 if (host_integerp (DECL_SIZE (prev_field), 1)
1751 && host_integerp (bit_position (prev_field), 1))
1752 return ((tree_low_cst (bit_position (prev_field), 1)
1753 + tree_low_cst (DECL_SIZE (prev_field), 1))
1754 % DECL_ALIGN (curr_field) != 0);
1756 /* If both the position and size of the previous field are multiples
1757 of the current field alignment, there cannot be any gap. */
1758 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1759 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1760 return false;
1762 /* Fallback, return that there may be a potential gap */
1763 return true;
1766 /* Returns a LABEL_DECL node for LABEL_NAME. */
1768 tree
1769 create_label_decl (tree label_name)
1771 tree label_decl = build_decl (input_location,
1772 LABEL_DECL, label_name, void_type_node);
1774 DECL_CONTEXT (label_decl) = current_function_decl;
1775 DECL_MODE (label_decl) = VOIDmode;
1776 DECL_SOURCE_LOCATION (label_decl) = input_location;
1778 return label_decl;
1781 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1782 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1783 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1784 PARM_DECL nodes chained through the TREE_CHAIN field).
1786 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1787 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1789 tree
1790 create_subprog_decl (tree subprog_name, tree asm_name,
1791 tree subprog_type, tree param_decl_list, bool inline_flag,
1792 bool public_flag, bool extern_flag,
1793 struct attrib *attr_list, Node_Id gnat_node)
1795 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
1796 subprog_type);
1797 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
1798 TREE_TYPE (subprog_type));
1800 /* If this is a non-inline function nested inside an inlined external
1801 function, we cannot honor both requests without cloning the nested
1802 function in the current unit since it is private to the other unit.
1803 We could inline the nested function as well but it's probably better
1804 to err on the side of too little inlining. */
1805 if (!inline_flag
1806 && current_function_decl
1807 && DECL_DECLARED_INLINE_P (current_function_decl)
1808 && DECL_EXTERNAL (current_function_decl))
1809 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1811 DECL_EXTERNAL (subprog_decl) = extern_flag;
1812 TREE_PUBLIC (subprog_decl) = public_flag;
1813 TREE_STATIC (subprog_decl) = 1;
1814 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1815 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1816 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1817 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1818 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1820 DECL_ARTIFICIAL (result_decl) = 1;
1821 DECL_IGNORED_P (result_decl) = 1;
1822 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
1823 DECL_RESULT (subprog_decl) = result_decl;
1825 if (asm_name)
1827 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1829 /* The expand_main_function circuitry expects "main_identifier_node" to
1830 designate the DECL_NAME of the 'main' entry point, in turn expected
1831 to be declared as the "main" function literally by default. Ada
1832 program entry points are typically declared with a different name
1833 within the binder generated file, exported as 'main' to satisfy the
1834 system expectations. Force main_identifier_node in this case. */
1835 if (asm_name == main_identifier_node)
1836 DECL_NAME (subprog_decl) = main_identifier_node;
1839 /* Add this decl to the current binding level. */
1840 gnat_pushdecl (subprog_decl, gnat_node);
1842 process_attributes (subprog_decl, attr_list);
1844 /* Output the assembler code and/or RTL for the declaration. */
1845 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1847 return subprog_decl;
1850 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1851 body. This routine needs to be invoked before processing the declarations
1852 appearing in the subprogram. */
1854 void
1855 begin_subprog_body (tree subprog_decl)
1857 tree param_decl;
1859 announce_function (subprog_decl);
1861 current_function_decl = subprog_decl;
1863 /* Enter a new binding level and show that all the parameters belong to
1864 this function. */
1865 gnat_pushlevel ();
1867 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1868 param_decl = TREE_CHAIN (param_decl))
1869 DECL_CONTEXT (param_decl) = subprog_decl;
1871 make_decl_rtl (subprog_decl);
1873 /* We handle pending sizes via the elaboration of types, so we don't need to
1874 save them. This causes them to be marked as part of the outer function
1875 and then discarded. */
1876 get_pending_sizes ();
1879 /* Finish the definition of the current subprogram BODY and finalize it. */
1881 void
1882 end_subprog_body (tree body)
1884 tree fndecl = current_function_decl;
1886 /* Mark the BLOCK for this level as being for this function and pop the
1887 level. Since the vars in it are the parameters, clear them. */
1888 BLOCK_VARS (current_binding_level->block) = NULL_TREE;
1889 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
1890 DECL_INITIAL (fndecl) = current_binding_level->block;
1891 gnat_poplevel ();
1893 /* We handle pending sizes via the elaboration of types, so we don't
1894 need to save them. */
1895 get_pending_sizes ();
1897 /* Mark the RESULT_DECL as being in this subprogram. */
1898 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
1900 DECL_SAVED_TREE (fndecl) = body;
1902 current_function_decl = DECL_CONTEXT (fndecl);
1904 /* We cannot track the location of errors past this point. */
1905 error_gnat_node = Empty;
1907 /* If we're only annotating types, don't actually compile this function. */
1908 if (type_annotate_only)
1909 return;
1911 /* Dump functions before gimplification. */
1912 dump_function (TDI_original, fndecl);
1914 /* ??? This special handling of nested functions is probably obsolete. */
1915 if (!DECL_CONTEXT (fndecl))
1916 cgraph_finalize_function (fndecl, false);
1917 else
1918 /* Register this function with cgraph just far enough to get it
1919 added to our parent's nested function list. */
1920 (void) cgraph_node (fndecl);
1923 tree
1924 gnat_builtin_function (tree decl)
1926 gnat_pushdecl (decl, Empty);
1927 return decl;
1930 /* Return an integer type with the number of bits of precision given by
1931 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1932 it is a signed type. */
1934 tree
1935 gnat_type_for_size (unsigned precision, int unsignedp)
1937 tree t;
1938 char type_name[20];
1940 if (precision <= 2 * MAX_BITS_PER_WORD
1941 && signed_and_unsigned_types[precision][unsignedp])
1942 return signed_and_unsigned_types[precision][unsignedp];
1944 if (unsignedp)
1945 t = make_unsigned_type (precision);
1946 else
1947 t = make_signed_type (precision);
1949 if (precision <= 2 * MAX_BITS_PER_WORD)
1950 signed_and_unsigned_types[precision][unsignedp] = t;
1952 if (!TYPE_NAME (t))
1954 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1955 TYPE_NAME (t) = get_identifier (type_name);
1958 return t;
1961 /* Likewise for floating-point types. */
1963 static tree
1964 float_type_for_precision (int precision, enum machine_mode mode)
1966 tree t;
1967 char type_name[20];
1969 if (float_types[(int) mode])
1970 return float_types[(int) mode];
1972 float_types[(int) mode] = t = make_node (REAL_TYPE);
1973 TYPE_PRECISION (t) = precision;
1974 layout_type (t);
1976 gcc_assert (TYPE_MODE (t) == mode);
1977 if (!TYPE_NAME (t))
1979 sprintf (type_name, "FLOAT_%d", precision);
1980 TYPE_NAME (t) = get_identifier (type_name);
1983 return t;
1986 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1987 an unsigned type; otherwise a signed type is returned. */
1989 tree
1990 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
1992 if (mode == BLKmode)
1993 return NULL_TREE;
1995 if (mode == VOIDmode)
1996 return void_type_node;
1998 if (COMPLEX_MODE_P (mode))
1999 return NULL_TREE;
2001 if (SCALAR_FLOAT_MODE_P (mode))
2002 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2004 if (SCALAR_INT_MODE_P (mode))
2005 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2007 if (VECTOR_MODE_P (mode))
2009 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2010 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2011 if (inner_type)
2012 return build_vector_type_for_mode (inner_type, mode);
2015 return NULL_TREE;
2018 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2020 tree
2021 gnat_unsigned_type (tree type_node)
2023 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2025 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2027 type = copy_node (type);
2028 TREE_TYPE (type) = type_node;
2030 else if (TREE_TYPE (type_node)
2031 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2032 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2034 type = copy_node (type);
2035 TREE_TYPE (type) = TREE_TYPE (type_node);
2038 return type;
2041 /* Return the signed version of a TYPE_NODE, a scalar type. */
2043 tree
2044 gnat_signed_type (tree type_node)
2046 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2048 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2050 type = copy_node (type);
2051 TREE_TYPE (type) = type_node;
2053 else if (TREE_TYPE (type_node)
2054 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2055 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2057 type = copy_node (type);
2058 TREE_TYPE (type) = TREE_TYPE (type_node);
2061 return type;
2064 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2065 transparently converted to each other. */
2068 gnat_types_compatible_p (tree t1, tree t2)
2070 enum tree_code code;
2072 /* This is the default criterion. */
2073 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2074 return 1;
2076 /* We only check structural equivalence here. */
2077 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2078 return 0;
2080 /* Vector types are also compatible if they have the same number of subparts
2081 and the same form of (scalar) element type. */
2082 if (code == VECTOR_TYPE
2083 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2084 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2085 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2086 return 1;
2088 /* Array types are also compatible if they are constrained and have
2089 the same component type and the same domain. */
2090 if (code == ARRAY_TYPE
2091 && TREE_TYPE (t1) == TREE_TYPE (t2)
2092 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2093 || (TYPE_DOMAIN (t1)
2094 && TYPE_DOMAIN (t2)
2095 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2096 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2097 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2098 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
2099 return 1;
2101 /* Padding record types are also compatible if they pad the same
2102 type and have the same constant size. */
2103 if (code == RECORD_TYPE
2104 && TYPE_PADDING_P (t1) && TYPE_PADDING_P (t2)
2105 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2106 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2107 return 1;
2109 return 0;
2112 /* EXP is an expression for the size of an object. If this size contains
2113 discriminant references, replace them with the maximum (if MAX_P) or
2114 minimum (if !MAX_P) possible value of the discriminant. */
2116 tree
2117 max_size (tree exp, bool max_p)
2119 enum tree_code code = TREE_CODE (exp);
2120 tree type = TREE_TYPE (exp);
2122 switch (TREE_CODE_CLASS (code))
2124 case tcc_declaration:
2125 case tcc_constant:
2126 return exp;
2128 case tcc_vl_exp:
2129 if (code == CALL_EXPR)
2131 tree t, *argarray;
2132 int n, i;
2134 t = maybe_inline_call_in_expr (exp);
2135 if (t)
2136 return max_size (t, max_p);
2138 n = call_expr_nargs (exp);
2139 gcc_assert (n > 0);
2140 argarray = (tree *) alloca (n * sizeof (tree));
2141 for (i = 0; i < n; i++)
2142 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2143 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2145 break;
2147 case tcc_reference:
2148 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2149 modify. Otherwise, we treat it like a variable. */
2150 if (!CONTAINS_PLACEHOLDER_P (exp))
2151 return exp;
2153 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2154 return
2155 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2157 case tcc_comparison:
2158 return max_p ? size_one_node : size_zero_node;
2160 case tcc_unary:
2161 case tcc_binary:
2162 case tcc_expression:
2163 switch (TREE_CODE_LENGTH (code))
2165 case 1:
2166 if (code == NON_LVALUE_EXPR)
2167 return max_size (TREE_OPERAND (exp, 0), max_p);
2168 else
2169 return
2170 fold_build1 (code, type,
2171 max_size (TREE_OPERAND (exp, 0),
2172 code == NEGATE_EXPR ? !max_p : max_p));
2174 case 2:
2175 if (code == COMPOUND_EXPR)
2176 return max_size (TREE_OPERAND (exp, 1), max_p);
2178 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2179 may provide a tighter bound on max_size. */
2180 if (code == MINUS_EXPR
2181 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2183 tree lhs = fold_build2 (MINUS_EXPR, type,
2184 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2185 TREE_OPERAND (exp, 1));
2186 tree rhs = fold_build2 (MINUS_EXPR, type,
2187 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2188 TREE_OPERAND (exp, 1));
2189 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2190 max_size (lhs, max_p),
2191 max_size (rhs, max_p));
2195 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2196 tree rhs = max_size (TREE_OPERAND (exp, 1),
2197 code == MINUS_EXPR ? !max_p : max_p);
2199 /* Special-case wanting the maximum value of a MIN_EXPR.
2200 In that case, if one side overflows, return the other.
2201 sizetype is signed, but we know sizes are non-negative.
2202 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2203 overflowing and the RHS a variable. */
2204 if (max_p
2205 && code == MIN_EXPR
2206 && TREE_CODE (rhs) == INTEGER_CST
2207 && TREE_OVERFLOW (rhs))
2208 return lhs;
2209 else if (max_p
2210 && code == MIN_EXPR
2211 && TREE_CODE (lhs) == INTEGER_CST
2212 && TREE_OVERFLOW (lhs))
2213 return rhs;
2214 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2215 && TREE_CODE (lhs) == INTEGER_CST
2216 && TREE_OVERFLOW (lhs)
2217 && !TREE_CONSTANT (rhs))
2218 return lhs;
2219 else
2220 return fold_build2 (code, type, lhs, rhs);
2223 case 3:
2224 if (code == SAVE_EXPR)
2225 return exp;
2226 else if (code == COND_EXPR)
2227 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2228 max_size (TREE_OPERAND (exp, 1), max_p),
2229 max_size (TREE_OPERAND (exp, 2), max_p));
2232 /* Other tree classes cannot happen. */
2233 default:
2234 break;
2237 gcc_unreachable ();
2240 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2241 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2242 Return a constructor for the template. */
2244 tree
2245 build_template (tree template_type, tree array_type, tree expr)
2247 tree template_elts = NULL_TREE;
2248 tree bound_list = NULL_TREE;
2249 tree field;
2251 while (TREE_CODE (array_type) == RECORD_TYPE
2252 && (TYPE_PADDING_P (array_type)
2253 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2254 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2256 if (TREE_CODE (array_type) == ARRAY_TYPE
2257 || (TREE_CODE (array_type) == INTEGER_TYPE
2258 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2259 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2261 /* First make the list for a CONSTRUCTOR for the template. Go down the
2262 field list of the template instead of the type chain because this
2263 array might be an Ada array of arrays and we can't tell where the
2264 nested arrays stop being the underlying object. */
2266 for (field = TYPE_FIELDS (template_type); field;
2267 (bound_list
2268 ? (bound_list = TREE_CHAIN (bound_list))
2269 : (array_type = TREE_TYPE (array_type))),
2270 field = TREE_CHAIN (TREE_CHAIN (field)))
2272 tree bounds, min, max;
2274 /* If we have a bound list, get the bounds from there. Likewise
2275 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2276 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2277 This will give us a maximum range. */
2278 if (bound_list)
2279 bounds = TREE_VALUE (bound_list);
2280 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2281 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2282 else if (expr && TREE_CODE (expr) == PARM_DECL
2283 && DECL_BY_COMPONENT_PTR_P (expr))
2284 bounds = TREE_TYPE (field);
2285 else
2286 gcc_unreachable ();
2288 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2289 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2291 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2292 substitute it from OBJECT. */
2293 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2294 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2296 template_elts = tree_cons (TREE_CHAIN (field), max,
2297 tree_cons (field, min, template_elts));
2300 return gnat_build_constructor (template_type, nreverse (template_elts));
2303 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
2304 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2305 type contains in its DECL_INITIAL the expression to use when a constructor
2306 is made for the type. GNAT_ENTITY is an entity used to print out an error
2307 message if the mechanism cannot be applied to an object of that type and
2308 also for the name. */
2310 tree
2311 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2313 tree record_type = make_node (RECORD_TYPE);
2314 tree pointer32_type;
2315 tree field_list = 0;
2316 int klass;
2317 int dtype = 0;
2318 tree inner_type;
2319 int ndim;
2320 int i;
2321 tree *idx_arr;
2322 tree tem;
2324 /* If TYPE is an unconstrained array, use the underlying array type. */
2325 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2326 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2328 /* If this is an array, compute the number of dimensions in the array,
2329 get the index types, and point to the inner type. */
2330 if (TREE_CODE (type) != ARRAY_TYPE)
2331 ndim = 0;
2332 else
2333 for (ndim = 1, inner_type = type;
2334 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2335 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2336 ndim++, inner_type = TREE_TYPE (inner_type))
2339 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2341 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2342 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2343 for (i = ndim - 1, inner_type = type;
2344 i >= 0;
2345 i--, inner_type = TREE_TYPE (inner_type))
2346 idx_arr[i] = TYPE_DOMAIN (inner_type);
2347 else
2348 for (i = 0, inner_type = type;
2349 i < ndim;
2350 i++, inner_type = TREE_TYPE (inner_type))
2351 idx_arr[i] = TYPE_DOMAIN (inner_type);
2353 /* Now get the DTYPE value. */
2354 switch (TREE_CODE (type))
2356 case INTEGER_TYPE:
2357 case ENUMERAL_TYPE:
2358 case BOOLEAN_TYPE:
2359 if (TYPE_VAX_FLOATING_POINT_P (type))
2360 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2362 case 6:
2363 dtype = 10;
2364 break;
2365 case 9:
2366 dtype = 11;
2367 break;
2368 case 15:
2369 dtype = 27;
2370 break;
2372 else
2373 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2375 case 8:
2376 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2377 break;
2378 case 16:
2379 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2380 break;
2381 case 32:
2382 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2383 break;
2384 case 64:
2385 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2386 break;
2387 case 128:
2388 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2389 break;
2391 break;
2393 case REAL_TYPE:
2394 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2395 break;
2397 case COMPLEX_TYPE:
2398 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2399 && TYPE_VAX_FLOATING_POINT_P (type))
2400 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2402 case 6:
2403 dtype = 12;
2404 break;
2405 case 9:
2406 dtype = 13;
2407 break;
2408 case 15:
2409 dtype = 29;
2411 else
2412 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2413 break;
2415 case ARRAY_TYPE:
2416 dtype = 14;
2417 break;
2419 default:
2420 break;
2423 /* Get the CLASS value. */
2424 switch (mech)
2426 case By_Descriptor_A:
2427 case By_Short_Descriptor_A:
2428 klass = 4;
2429 break;
2430 case By_Descriptor_NCA:
2431 case By_Short_Descriptor_NCA:
2432 klass = 10;
2433 break;
2434 case By_Descriptor_SB:
2435 case By_Short_Descriptor_SB:
2436 klass = 15;
2437 break;
2438 case By_Descriptor:
2439 case By_Short_Descriptor:
2440 case By_Descriptor_S:
2441 case By_Short_Descriptor_S:
2442 default:
2443 klass = 1;
2444 break;
2447 /* Make the type for a descriptor for VMS. The first four fields are the
2448 same for all types. */
2449 field_list
2450 = chainon (field_list,
2451 make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1),
2452 record_type,
2453 size_in_bytes
2454 ((mech == By_Descriptor_A
2455 || mech == By_Short_Descriptor_A)
2456 ? inner_type : type)));
2457 field_list
2458 = chainon (field_list,
2459 make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
2460 record_type, size_int (dtype)));
2461 field_list
2462 = chainon (field_list,
2463 make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
2464 record_type, size_int (klass)));
2466 /* Of course this will crash at run-time if the address space is not
2467 within the low 32 bits, but there is nothing else we can do. */
2468 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2470 field_list
2471 = chainon (field_list,
2472 make_descriptor_field ("POINTER", pointer32_type, record_type,
2473 build_unary_op (ADDR_EXPR,
2474 pointer32_type,
2475 build0 (PLACEHOLDER_EXPR,
2476 type))));
2478 switch (mech)
2480 case By_Descriptor:
2481 case By_Short_Descriptor:
2482 case By_Descriptor_S:
2483 case By_Short_Descriptor_S:
2484 break;
2486 case By_Descriptor_SB:
2487 case By_Short_Descriptor_SB:
2488 field_list
2489 = chainon (field_list,
2490 make_descriptor_field
2491 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2492 TREE_CODE (type) == ARRAY_TYPE
2493 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2494 field_list
2495 = chainon (field_list,
2496 make_descriptor_field
2497 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2498 TREE_CODE (type) == ARRAY_TYPE
2499 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2500 break;
2502 case By_Descriptor_A:
2503 case By_Short_Descriptor_A:
2504 case By_Descriptor_NCA:
2505 case By_Short_Descriptor_NCA:
2506 field_list = chainon (field_list,
2507 make_descriptor_field ("SCALE",
2508 gnat_type_for_size (8, 1),
2509 record_type,
2510 size_zero_node));
2512 field_list = chainon (field_list,
2513 make_descriptor_field ("DIGITS",
2514 gnat_type_for_size (8, 1),
2515 record_type,
2516 size_zero_node));
2518 field_list
2519 = chainon (field_list,
2520 make_descriptor_field
2521 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2522 size_int ((mech == By_Descriptor_NCA ||
2523 mech == By_Short_Descriptor_NCA)
2525 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2526 : (TREE_CODE (type) == ARRAY_TYPE
2527 && TYPE_CONVENTION_FORTRAN_P (type)
2528 ? 224 : 192))));
2530 field_list = chainon (field_list,
2531 make_descriptor_field ("DIMCT",
2532 gnat_type_for_size (8, 1),
2533 record_type,
2534 size_int (ndim)));
2536 field_list = chainon (field_list,
2537 make_descriptor_field ("ARSIZE",
2538 gnat_type_for_size (32, 1),
2539 record_type,
2540 size_in_bytes (type)));
2542 /* Now build a pointer to the 0,0,0... element. */
2543 tem = build0 (PLACEHOLDER_EXPR, type);
2544 for (i = 0, inner_type = type; i < ndim;
2545 i++, inner_type = TREE_TYPE (inner_type))
2546 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2547 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2548 NULL_TREE, NULL_TREE);
2550 field_list
2551 = chainon (field_list,
2552 make_descriptor_field
2553 ("A0",
2554 build_pointer_type_for_mode (inner_type, SImode, false),
2555 record_type,
2556 build1 (ADDR_EXPR,
2557 build_pointer_type_for_mode (inner_type, SImode,
2558 false),
2559 tem)));
2561 /* Next come the addressing coefficients. */
2562 tem = size_one_node;
2563 for (i = 0; i < ndim; i++)
2565 char fname[3];
2566 tree idx_length
2567 = size_binop (MULT_EXPR, tem,
2568 size_binop (PLUS_EXPR,
2569 size_binop (MINUS_EXPR,
2570 TYPE_MAX_VALUE (idx_arr[i]),
2571 TYPE_MIN_VALUE (idx_arr[i])),
2572 size_int (1)));
2574 fname[0] = ((mech == By_Descriptor_NCA ||
2575 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2576 fname[1] = '0' + i, fname[2] = 0;
2577 field_list
2578 = chainon (field_list,
2579 make_descriptor_field (fname,
2580 gnat_type_for_size (32, 1),
2581 record_type, idx_length));
2583 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2584 tem = idx_length;
2587 /* Finally here are the bounds. */
2588 for (i = 0; i < ndim; i++)
2590 char fname[3];
2592 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2593 field_list
2594 = chainon (field_list,
2595 make_descriptor_field
2596 (fname, gnat_type_for_size (32, 1), record_type,
2597 TYPE_MIN_VALUE (idx_arr[i])));
2599 fname[0] = 'U';
2600 field_list
2601 = chainon (field_list,
2602 make_descriptor_field
2603 (fname, gnat_type_for_size (32, 1), record_type,
2604 TYPE_MAX_VALUE (idx_arr[i])));
2606 break;
2608 default:
2609 post_error ("unsupported descriptor type for &", gnat_entity);
2612 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2613 finish_record_type (record_type, field_list, 0, false);
2614 return record_type;
2617 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
2618 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2619 type contains in its DECL_INITIAL the expression to use when a constructor
2620 is made for the type. GNAT_ENTITY is an entity used to print out an error
2621 message if the mechanism cannot be applied to an object of that type and
2622 also for the name. */
2624 tree
2625 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2627 tree record64_type = make_node (RECORD_TYPE);
2628 tree pointer64_type;
2629 tree field_list64 = 0;
2630 int klass;
2631 int dtype = 0;
2632 tree inner_type;
2633 int ndim;
2634 int i;
2635 tree *idx_arr;
2636 tree tem;
2638 /* If TYPE is an unconstrained array, use the underlying array type. */
2639 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2640 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2642 /* If this is an array, compute the number of dimensions in the array,
2643 get the index types, and point to the inner type. */
2644 if (TREE_CODE (type) != ARRAY_TYPE)
2645 ndim = 0;
2646 else
2647 for (ndim = 1, inner_type = type;
2648 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2649 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2650 ndim++, inner_type = TREE_TYPE (inner_type))
2653 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2655 if (mech != By_Descriptor_NCA
2656 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2657 for (i = ndim - 1, inner_type = type;
2658 i >= 0;
2659 i--, inner_type = TREE_TYPE (inner_type))
2660 idx_arr[i] = TYPE_DOMAIN (inner_type);
2661 else
2662 for (i = 0, inner_type = type;
2663 i < ndim;
2664 i++, inner_type = TREE_TYPE (inner_type))
2665 idx_arr[i] = TYPE_DOMAIN (inner_type);
2667 /* Now get the DTYPE value. */
2668 switch (TREE_CODE (type))
2670 case INTEGER_TYPE:
2671 case ENUMERAL_TYPE:
2672 case BOOLEAN_TYPE:
2673 if (TYPE_VAX_FLOATING_POINT_P (type))
2674 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2676 case 6:
2677 dtype = 10;
2678 break;
2679 case 9:
2680 dtype = 11;
2681 break;
2682 case 15:
2683 dtype = 27;
2684 break;
2686 else
2687 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2689 case 8:
2690 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2691 break;
2692 case 16:
2693 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2694 break;
2695 case 32:
2696 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2697 break;
2698 case 64:
2699 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2700 break;
2701 case 128:
2702 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2703 break;
2705 break;
2707 case REAL_TYPE:
2708 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2709 break;
2711 case COMPLEX_TYPE:
2712 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2713 && TYPE_VAX_FLOATING_POINT_P (type))
2714 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2716 case 6:
2717 dtype = 12;
2718 break;
2719 case 9:
2720 dtype = 13;
2721 break;
2722 case 15:
2723 dtype = 29;
2725 else
2726 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2727 break;
2729 case ARRAY_TYPE:
2730 dtype = 14;
2731 break;
2733 default:
2734 break;
2737 /* Get the CLASS value. */
2738 switch (mech)
2740 case By_Descriptor_A:
2741 klass = 4;
2742 break;
2743 case By_Descriptor_NCA:
2744 klass = 10;
2745 break;
2746 case By_Descriptor_SB:
2747 klass = 15;
2748 break;
2749 case By_Descriptor:
2750 case By_Descriptor_S:
2751 default:
2752 klass = 1;
2753 break;
2756 /* Make the type for a 64-bit descriptor for VMS. The first six fields
2757 are the same for all types. */
2758 field_list64
2759 = chainon (field_list64,
2760 make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
2761 record64_type, size_int (1)));
2762 field_list64
2763 = chainon (field_list64,
2764 make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
2765 record64_type, size_int (dtype)));
2766 field_list64
2767 = chainon (field_list64,
2768 make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
2769 record64_type, size_int (klass)));
2770 field_list64
2771 = chainon (field_list64,
2772 make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
2773 record64_type, ssize_int (-1)));
2774 field_list64
2775 = chainon (field_list64,
2776 make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
2777 record64_type,
2778 size_in_bytes (mech == By_Descriptor_A
2779 ? inner_type : type)));
2781 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2783 field_list64
2784 = chainon (field_list64,
2785 make_descriptor_field ("POINTER", pointer64_type,
2786 record64_type,
2787 build_unary_op (ADDR_EXPR,
2788 pointer64_type,
2789 build0 (PLACEHOLDER_EXPR,
2790 type))));
2792 switch (mech)
2794 case By_Descriptor:
2795 case By_Descriptor_S:
2796 break;
2798 case By_Descriptor_SB:
2799 field_list64
2800 = chainon (field_list64,
2801 make_descriptor_field
2802 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
2803 TREE_CODE (type) == ARRAY_TYPE
2804 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2805 field_list64
2806 = chainon (field_list64,
2807 make_descriptor_field
2808 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
2809 TREE_CODE (type) == ARRAY_TYPE
2810 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2811 break;
2813 case By_Descriptor_A:
2814 case By_Descriptor_NCA:
2815 field_list64 = chainon (field_list64,
2816 make_descriptor_field ("SCALE",
2817 gnat_type_for_size (8, 1),
2818 record64_type,
2819 size_zero_node));
2821 field_list64 = chainon (field_list64,
2822 make_descriptor_field ("DIGITS",
2823 gnat_type_for_size (8, 1),
2824 record64_type,
2825 size_zero_node));
2827 field_list64
2828 = chainon (field_list64,
2829 make_descriptor_field
2830 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
2831 size_int (mech == By_Descriptor_NCA
2833 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2834 : (TREE_CODE (type) == ARRAY_TYPE
2835 && TYPE_CONVENTION_FORTRAN_P (type)
2836 ? 224 : 192))));
2838 field_list64 = chainon (field_list64,
2839 make_descriptor_field ("DIMCT",
2840 gnat_type_for_size (8, 1),
2841 record64_type,
2842 size_int (ndim)));
2844 field_list64 = chainon (field_list64,
2845 make_descriptor_field ("MBZ",
2846 gnat_type_for_size (32, 1),
2847 record64_type,
2848 size_int (0)));
2849 field_list64 = chainon (field_list64,
2850 make_descriptor_field ("ARSIZE",
2851 gnat_type_for_size (64, 1),
2852 record64_type,
2853 size_in_bytes (type)));
2855 /* Now build a pointer to the 0,0,0... element. */
2856 tem = build0 (PLACEHOLDER_EXPR, type);
2857 for (i = 0, inner_type = type; i < ndim;
2858 i++, inner_type = TREE_TYPE (inner_type))
2859 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2860 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2861 NULL_TREE, NULL_TREE);
2863 field_list64
2864 = chainon (field_list64,
2865 make_descriptor_field
2866 ("A0",
2867 build_pointer_type_for_mode (inner_type, DImode, false),
2868 record64_type,
2869 build1 (ADDR_EXPR,
2870 build_pointer_type_for_mode (inner_type, DImode,
2871 false),
2872 tem)));
2874 /* Next come the addressing coefficients. */
2875 tem = size_one_node;
2876 for (i = 0; i < ndim; i++)
2878 char fname[3];
2879 tree idx_length
2880 = size_binop (MULT_EXPR, tem,
2881 size_binop (PLUS_EXPR,
2882 size_binop (MINUS_EXPR,
2883 TYPE_MAX_VALUE (idx_arr[i]),
2884 TYPE_MIN_VALUE (idx_arr[i])),
2885 size_int (1)));
2887 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2888 fname[1] = '0' + i, fname[2] = 0;
2889 field_list64
2890 = chainon (field_list64,
2891 make_descriptor_field (fname,
2892 gnat_type_for_size (64, 1),
2893 record64_type, idx_length));
2895 if (mech == By_Descriptor_NCA)
2896 tem = idx_length;
2899 /* Finally here are the bounds. */
2900 for (i = 0; i < ndim; i++)
2902 char fname[3];
2904 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2905 field_list64
2906 = chainon (field_list64,
2907 make_descriptor_field
2908 (fname, gnat_type_for_size (64, 1), record64_type,
2909 TYPE_MIN_VALUE (idx_arr[i])));
2911 fname[0] = 'U';
2912 field_list64
2913 = chainon (field_list64,
2914 make_descriptor_field
2915 (fname, gnat_type_for_size (64, 1), record64_type,
2916 TYPE_MAX_VALUE (idx_arr[i])));
2918 break;
2920 default:
2921 post_error ("unsupported descriptor type for &", gnat_entity);
2924 TYPE_NAME (record64_type) = create_concat_name (gnat_entity, "DESC64");
2925 finish_record_type (record64_type, field_list64, 0, false);
2926 return record64_type;
2929 /* Utility routine for above code to make a field. */
2931 static tree
2932 make_descriptor_field (const char *name, tree type,
2933 tree rec_type, tree initial)
2935 tree field
2936 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2938 DECL_INITIAL (field) = initial;
2939 return field;
2942 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
2943 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
2944 which the VMS descriptor is passed. */
2946 static tree
2947 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
2949 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
2950 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
2951 /* The CLASS field is the 3rd field in the descriptor. */
2952 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
2953 /* The POINTER field is the 6th field in the descriptor. */
2954 tree pointer = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (klass)));
2956 /* Retrieve the value of the POINTER field. */
2957 tree gnu_expr64
2958 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
2960 if (POINTER_TYPE_P (gnu_type))
2961 return convert (gnu_type, gnu_expr64);
2963 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
2965 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
2966 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
2967 tree template_type = TREE_TYPE (p_bounds_type);
2968 tree min_field = TYPE_FIELDS (template_type);
2969 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
2970 tree template_tree, template_addr, aflags, dimct, t, u;
2971 /* See the head comment of build_vms_descriptor. */
2972 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
2973 tree lfield, ufield;
2975 /* Convert POINTER to the pointer-to-array type. */
2976 gnu_expr64 = convert (p_array_type, gnu_expr64);
2978 switch (iklass)
2980 case 1: /* Class S */
2981 case 15: /* Class SB */
2982 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
2983 t = TREE_CHAIN (TREE_CHAIN (klass));
2984 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2985 t = tree_cons (min_field,
2986 convert (TREE_TYPE (min_field), integer_one_node),
2987 tree_cons (max_field,
2988 convert (TREE_TYPE (max_field), t),
2989 NULL_TREE));
2990 template_tree = gnat_build_constructor (template_type, t);
2991 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
2993 /* For class S, we are done. */
2994 if (iklass == 1)
2995 break;
2997 /* Test that we really have a SB descriptor, like DEC Ada. */
2998 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
2999 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3000 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3001 /* If so, there is already a template in the descriptor and
3002 it is located right after the POINTER field. The fields are
3003 64bits so they must be repacked. */
3004 t = TREE_CHAIN (pointer);
3005 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3006 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3008 t = TREE_CHAIN (t);
3009 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3010 ufield = convert
3011 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3013 /* Build the template in the form of a constructor. */
3014 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3015 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3016 ufield, NULL_TREE));
3017 template_tree = gnat_build_constructor (template_type, t);
3019 /* Otherwise use the {1, LENGTH} template we build above. */
3020 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3021 build_unary_op (ADDR_EXPR, p_bounds_type,
3022 template_tree),
3023 template_addr);
3024 break;
3026 case 4: /* Class A */
3027 /* The AFLAGS field is the 3rd field after the pointer in the
3028 descriptor. */
3029 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3030 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3031 /* The DIMCT field is the next field in the descriptor after
3032 aflags. */
3033 t = TREE_CHAIN (t);
3034 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3035 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3036 or FL_COEFF or FL_BOUNDS not set. */
3037 u = build_int_cst (TREE_TYPE (aflags), 192);
3038 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3039 build_binary_op (NE_EXPR, boolean_type_node,
3040 dimct,
3041 convert (TREE_TYPE (dimct),
3042 size_one_node)),
3043 build_binary_op (NE_EXPR, boolean_type_node,
3044 build2 (BIT_AND_EXPR,
3045 TREE_TYPE (aflags),
3046 aflags, u),
3047 u));
3048 /* There is already a template in the descriptor and it is located
3049 in block 3. The fields are 64bits so they must be repacked. */
3050 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3051 (t)))));
3052 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3053 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3055 t = TREE_CHAIN (t);
3056 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3057 ufield = convert
3058 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3060 /* Build the template in the form of a constructor. */
3061 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3062 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3063 ufield, NULL_TREE));
3064 template_tree = gnat_build_constructor (template_type, t);
3065 template_tree = build3 (COND_EXPR, template_type, u,
3066 build_call_raise (CE_Length_Check_Failed, Empty,
3067 N_Raise_Constraint_Error),
3068 template_tree);
3069 template_addr
3070 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3071 break;
3073 case 10: /* Class NCA */
3074 default:
3075 post_error ("unsupported descriptor type for &", gnat_subprog);
3076 template_addr = integer_zero_node;
3077 break;
3080 /* Build the fat pointer in the form of a constructor. */
3081 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3082 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3083 template_addr, NULL_TREE));
3084 return gnat_build_constructor (gnu_type, t);
3087 else
3088 gcc_unreachable ();
3091 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3092 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3093 which the VMS descriptor is passed. */
3095 static tree
3096 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3098 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3099 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3100 /* The CLASS field is the 3rd field in the descriptor. */
3101 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3102 /* The POINTER field is the 4th field in the descriptor. */
3103 tree pointer = TREE_CHAIN (klass);
3105 /* Retrieve the value of the POINTER field. */
3106 tree gnu_expr32
3107 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3109 if (POINTER_TYPE_P (gnu_type))
3110 return convert (gnu_type, gnu_expr32);
3112 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3114 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3115 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3116 tree template_type = TREE_TYPE (p_bounds_type);
3117 tree min_field = TYPE_FIELDS (template_type);
3118 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3119 tree template_tree, template_addr, aflags, dimct, t, u;
3120 /* See the head comment of build_vms_descriptor. */
3121 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3123 /* Convert POINTER to the pointer-to-array type. */
3124 gnu_expr32 = convert (p_array_type, gnu_expr32);
3126 switch (iklass)
3128 case 1: /* Class S */
3129 case 15: /* Class SB */
3130 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3131 t = TYPE_FIELDS (desc_type);
3132 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3133 t = tree_cons (min_field,
3134 convert (TREE_TYPE (min_field), integer_one_node),
3135 tree_cons (max_field,
3136 convert (TREE_TYPE (max_field), t),
3137 NULL_TREE));
3138 template_tree = gnat_build_constructor (template_type, t);
3139 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3141 /* For class S, we are done. */
3142 if (iklass == 1)
3143 break;
3145 /* Test that we really have a SB descriptor, like DEC Ada. */
3146 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3147 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3148 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3149 /* If so, there is already a template in the descriptor and
3150 it is located right after the POINTER field. */
3151 t = TREE_CHAIN (pointer);
3152 template_tree
3153 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3154 /* Otherwise use the {1, LENGTH} template we build above. */
3155 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3156 build_unary_op (ADDR_EXPR, p_bounds_type,
3157 template_tree),
3158 template_addr);
3159 break;
3161 case 4: /* Class A */
3162 /* The AFLAGS field is the 7th field in the descriptor. */
3163 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3164 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3165 /* The DIMCT field is the 8th field in the descriptor. */
3166 t = TREE_CHAIN (t);
3167 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3168 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3169 or FL_COEFF or FL_BOUNDS not set. */
3170 u = build_int_cst (TREE_TYPE (aflags), 192);
3171 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3172 build_binary_op (NE_EXPR, boolean_type_node,
3173 dimct,
3174 convert (TREE_TYPE (dimct),
3175 size_one_node)),
3176 build_binary_op (NE_EXPR, boolean_type_node,
3177 build2 (BIT_AND_EXPR,
3178 TREE_TYPE (aflags),
3179 aflags, u),
3180 u));
3181 /* There is already a template in the descriptor and it is
3182 located at the start of block 3 (12th field). */
3183 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3184 template_tree
3185 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3186 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
3187 build_call_raise (CE_Length_Check_Failed, Empty,
3188 N_Raise_Constraint_Error),
3189 template_tree);
3190 template_addr
3191 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3192 break;
3194 case 10: /* Class NCA */
3195 default:
3196 post_error ("unsupported descriptor type for &", gnat_subprog);
3197 template_addr = integer_zero_node;
3198 break;
3201 /* Build the fat pointer in the form of a constructor. */
3202 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
3203 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3204 template_addr, NULL_TREE));
3206 return gnat_build_constructor (gnu_type, t);
3209 else
3210 gcc_unreachable ();
3213 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3214 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3215 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3216 VMS descriptor is passed. */
3218 static tree
3219 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3220 Entity_Id gnat_subprog)
3222 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3223 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3224 tree mbo = TYPE_FIELDS (desc_type);
3225 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3226 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
3227 tree is64bit, gnu_expr32, gnu_expr64;
3229 /* If the field name is not MBO, it must be 32-bit and no alternate.
3230 Otherwise primary must be 64-bit and alternate 32-bit. */
3231 if (strcmp (mbostr, "MBO") != 0)
3232 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3234 /* Build the test for 64-bit descriptor. */
3235 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3236 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3237 is64bit
3238 = build_binary_op (TRUTH_ANDIF_EXPR, boolean_type_node,
3239 build_binary_op (EQ_EXPR, boolean_type_node,
3240 convert (integer_type_node, mbo),
3241 integer_one_node),
3242 build_binary_op (EQ_EXPR, boolean_type_node,
3243 convert (integer_type_node, mbmo),
3244 integer_minus_one_node));
3246 /* Build the 2 possible end results. */
3247 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3248 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3249 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3251 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3254 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3255 and the GNAT node GNAT_SUBPROG. */
3257 void
3258 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3260 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3261 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3262 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3263 tree gnu_body;
3265 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3266 gnu_param_list = NULL_TREE;
3268 begin_subprog_body (gnu_stub_decl);
3269 gnat_pushlevel ();
3271 start_stmt_group ();
3273 /* Loop over the parameters of the stub and translate any of them
3274 passed by descriptor into a by reference one. */
3275 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3276 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3277 gnu_stub_param;
3278 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3279 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3281 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3282 gnu_param
3283 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3284 gnu_stub_param,
3285 DECL_PARM_ALT_TYPE (gnu_stub_param),
3286 gnat_subprog);
3287 else
3288 gnu_param = gnu_stub_param;
3290 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3293 gnu_body = end_stmt_group ();
3295 /* Invoke the internal subprogram. */
3296 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3297 gnu_subprog);
3298 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3299 gnu_subprog_addr,
3300 nreverse (gnu_param_list));
3302 /* Propagate the return value, if any. */
3303 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3304 append_to_statement_list (gnu_subprog_call, &gnu_body);
3305 else
3306 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3307 gnu_subprog_call),
3308 &gnu_body);
3310 gnat_poplevel ();
3312 allocate_struct_function (gnu_stub_decl, false);
3313 end_subprog_body (gnu_body);
3316 /* Build a type to be used to represent an aliased object whose nominal
3317 type is an unconstrained array. This consists of a RECORD_TYPE containing
3318 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3319 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3320 is used to represent an arbitrary unconstrained object. Use NAME
3321 as the name of the record. */
3323 tree
3324 build_unc_object_type (tree template_type, tree object_type, tree name)
3326 tree type = make_node (RECORD_TYPE);
3327 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3328 template_type, type, 0, 0, 0, 1);
3329 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3330 type, 0, 0, 0, 1);
3332 TYPE_NAME (type) = name;
3333 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3334 finish_record_type (type,
3335 chainon (chainon (NULL_TREE, template_field),
3336 array_field),
3337 0, true);
3339 return type;
3342 /* Same, taking a thin or fat pointer type instead of a template type. */
3344 tree
3345 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3346 tree name)
3348 tree template_type;
3350 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3352 template_type
3353 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3354 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3355 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3356 return build_unc_object_type (template_type, object_type, name);
3359 /* Shift the component offsets within an unconstrained object TYPE to make it
3360 suitable for use as a designated type for thin pointers. */
3362 void
3363 shift_unc_components_for_thin_pointers (tree type)
3365 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3366 allocated past the BOUNDS template. The designated type is adjusted to
3367 have ARRAY at position zero and the template at a negative offset, so
3368 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3370 tree bounds_field = TYPE_FIELDS (type);
3371 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3373 DECL_FIELD_OFFSET (bounds_field)
3374 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3376 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3377 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3380 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3381 In the normal case this is just two adjustments, but we have more to
3382 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3384 void
3385 update_pointer_to (tree old_type, tree new_type)
3387 tree ptr = TYPE_POINTER_TO (old_type);
3388 tree ref = TYPE_REFERENCE_TO (old_type);
3389 tree ptr1, ref1;
3390 tree type;
3392 /* If this is the main variant, process all the other variants first. */
3393 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3394 for (type = TYPE_NEXT_VARIANT (old_type); type;
3395 type = TYPE_NEXT_VARIANT (type))
3396 update_pointer_to (type, new_type);
3398 /* If no pointers and no references, we are done. */
3399 if (!ptr && !ref)
3400 return;
3402 /* Merge the old type qualifiers in the new type.
3404 Each old variant has qualifiers for specific reasons, and the new
3405 designated type as well. Each set of qualifiers represents useful
3406 information grabbed at some point, and merging the two simply unifies
3407 these inputs into the final type description.
3409 Consider for instance a volatile type frozen after an access to constant
3410 type designating it; after the designated type's freeze, we get here with
3411 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3412 when the access type was processed. We will make a volatile and readonly
3413 designated type, because that's what it really is.
3415 We might also get here for a non-dummy OLD_TYPE variant with different
3416 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3417 to private record type elaboration (see the comments around the call to
3418 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3419 the qualifiers in those cases too, to avoid accidentally discarding the
3420 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3421 new_type
3422 = build_qualified_type (new_type,
3423 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3425 /* If old type and new type are identical, there is nothing to do. */
3426 if (old_type == new_type)
3427 return;
3429 /* Otherwise, first handle the simple case. */
3430 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3432 TYPE_POINTER_TO (new_type) = ptr;
3433 TYPE_REFERENCE_TO (new_type) = ref;
3435 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3436 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3437 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3438 TREE_TYPE (ptr1) = new_type;
3440 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3441 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3442 ref1 = TYPE_NEXT_VARIANT (ref1))
3443 TREE_TYPE (ref1) = new_type;
3446 /* Now deal with the unconstrained array case. In this case the "pointer"
3447 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3448 Turn them into pointers to the correct types using update_pointer_to. */
3449 else if (!TYPE_IS_FAT_POINTER_P (ptr))
3450 gcc_unreachable ();
3452 else
3454 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3455 tree array_field = TYPE_FIELDS (ptr);
3456 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3457 tree new_ptr = TYPE_POINTER_TO (new_type);
3458 tree new_ref;
3459 tree var;
3461 /* Make pointers to the dummy template point to the real template. */
3462 update_pointer_to
3463 (TREE_TYPE (TREE_TYPE (bounds_field)),
3464 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3466 /* The references to the template bounds present in the array type
3467 are made through a PLACEHOLDER_EXPR of type NEW_PTR. Since we
3468 are updating PTR to make it a full replacement for NEW_PTR as
3469 pointer to NEW_TYPE, we must rework the PLACEHOLDER_EXPR so as
3470 to make it of type PTR. */
3471 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3472 build0 (PLACEHOLDER_EXPR, ptr),
3473 bounds_field, NULL_TREE);
3475 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
3476 to the dummy array point to it. */
3477 update_pointer_to
3478 (TREE_TYPE (TREE_TYPE (array_field)),
3479 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3480 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3482 /* Make PTR the pointer to NEW_TYPE. */
3483 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3484 = TREE_TYPE (new_type) = ptr;
3486 /* And show the original pointer NEW_PTR to the debugger. This is the
3487 counterpart of the equivalent processing in gnat_pushdecl when the
3488 unconstrained array type is frozen after access types to it. Note
3489 that update_pointer_to can be invoked multiple times on the same
3490 couple of types because of the type variants. */
3491 if (TYPE_NAME (ptr)
3492 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
3493 && !DECL_ORIGINAL_TYPE (TYPE_NAME (ptr)))
3495 DECL_ORIGINAL_TYPE (TYPE_NAME (ptr)) = new_ptr;
3496 DECL_ARTIFICIAL (TYPE_NAME (ptr)) = 0;
3498 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3499 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3501 /* Now handle updating the allocation record, what the thin pointer
3502 points to. Update all pointers from the old record into the new
3503 one, update the type of the array field, and recompute the size. */
3504 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3506 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3507 = TREE_TYPE (TREE_TYPE (array_field));
3509 /* The size recomputation needs to account for alignment constraints, so
3510 we let layout_type work it out. This will reset the field offsets to
3511 what they would be in a regular record, so we shift them back to what
3512 we want them to be for a thin pointer designated type afterwards. */
3513 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3514 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3515 TYPE_SIZE (new_obj_rec) = 0;
3516 layout_type (new_obj_rec);
3518 shift_unc_components_for_thin_pointers (new_obj_rec);
3520 /* We are done, at last. */
3521 rest_of_record_type_compilation (ptr);
3525 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3526 unconstrained one. This involves making or finding a template. */
3528 static tree
3529 convert_to_fat_pointer (tree type, tree expr)
3531 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3532 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3533 tree etype = TREE_TYPE (expr);
3534 tree template_tree;
3536 /* If EXPR is null, make a fat pointer that contains null pointers to the
3537 template and array. */
3538 if (integer_zerop (expr))
3539 return
3540 gnat_build_constructor
3541 (type,
3542 tree_cons (TYPE_FIELDS (type),
3543 convert (p_array_type, expr),
3544 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3545 convert (build_pointer_type (template_type),
3546 expr),
3547 NULL_TREE)));
3549 /* If EXPR is a thin pointer, make template and data from the record.. */
3550 else if (TYPE_IS_THIN_POINTER_P (etype))
3552 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3554 expr = gnat_protect_expr (expr);
3555 if (TREE_CODE (expr) == ADDR_EXPR)
3556 expr = TREE_OPERAND (expr, 0);
3557 else
3558 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3560 template_tree = build_component_ref (expr, NULL_TREE, fields, false);
3561 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3562 build_component_ref (expr, NULL_TREE,
3563 TREE_CHAIN (fields), false));
3566 /* Otherwise, build the constructor for the template. */
3567 else
3568 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
3570 /* The final result is a constructor for the fat pointer.
3572 If EXPR is an argument of a foreign convention subprogram, the type it
3573 points to is directly the component type. In this case, the expression
3574 type may not match the corresponding FIELD_DECL type at this point, so we
3575 call "convert" here to fix that up if necessary. This type consistency is
3576 required, for instance because it ensures that possible later folding of
3577 COMPONENT_REFs against this constructor always yields something of the
3578 same type as the initial reference.
3580 Note that the call to "build_template" above is still fine because it
3581 will only refer to the provided TEMPLATE_TYPE in this case. */
3582 return
3583 gnat_build_constructor
3584 (type,
3585 tree_cons (TYPE_FIELDS (type),
3586 convert (p_array_type, expr),
3587 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3588 build_unary_op (ADDR_EXPR, NULL_TREE,
3589 template_tree),
3590 NULL_TREE)));
3593 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3594 is something that is a fat pointer, so convert to it first if it EXPR
3595 is not already a fat pointer. */
3597 static tree
3598 convert_to_thin_pointer (tree type, tree expr)
3600 if (!TYPE_IS_FAT_POINTER_P (TREE_TYPE (expr)))
3601 expr
3602 = convert_to_fat_pointer
3603 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3605 /* We get the pointer to the data and use a NOP_EXPR to make it the
3606 proper GCC type. */
3607 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3608 false);
3609 expr = build1 (NOP_EXPR, type, expr);
3611 return expr;
3614 /* Create an expression whose value is that of EXPR,
3615 converted to type TYPE. The TREE_TYPE of the value
3616 is always TYPE. This function implements all reasonable
3617 conversions; callers should filter out those that are
3618 not permitted by the language being compiled. */
3620 tree
3621 convert (tree type, tree expr)
3623 tree etype = TREE_TYPE (expr);
3624 enum tree_code ecode = TREE_CODE (etype);
3625 enum tree_code code = TREE_CODE (type);
3627 /* If the expression is already of the right type, we are done. */
3628 if (etype == type)
3629 return expr;
3631 /* If both input and output have padding and are of variable size, do this
3632 as an unchecked conversion. Likewise if one is a mere variant of the
3633 other, so we avoid a pointless unpad/repad sequence. */
3634 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3635 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
3636 && (!TREE_CONSTANT (TYPE_SIZE (type))
3637 || !TREE_CONSTANT (TYPE_SIZE (etype))
3638 || gnat_types_compatible_p (type, etype)
3639 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3640 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3643 /* If the output type has padding, convert to the inner type and make a
3644 constructor to build the record, unless a variable size is involved. */
3645 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
3647 /* If we previously converted from another type and our type is
3648 of variable size, remove the conversion to avoid the need for
3649 variable-sized temporaries. Likewise for a conversion between
3650 original and packable version. */
3651 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3652 && (!TREE_CONSTANT (TYPE_SIZE (type))
3653 || (ecode == RECORD_TYPE
3654 && TYPE_NAME (etype)
3655 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3656 expr = TREE_OPERAND (expr, 0);
3658 /* If we are just removing the padding from expr, convert the original
3659 object if we have variable size in order to avoid the need for some
3660 variable-sized temporaries. Likewise if the padding is a variant
3661 of the other, so we avoid a pointless unpad/repad sequence. */
3662 if (TREE_CODE (expr) == COMPONENT_REF
3663 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3664 && (!TREE_CONSTANT (TYPE_SIZE (type))
3665 || gnat_types_compatible_p (type,
3666 TREE_TYPE (TREE_OPERAND (expr, 0)))
3667 || (ecode == RECORD_TYPE
3668 && TYPE_NAME (etype)
3669 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3670 return convert (type, TREE_OPERAND (expr, 0));
3672 /* If the inner type is of self-referential size and the expression type
3673 is a record, do this as an unchecked conversion. But first pad the
3674 expression if possible to have the same size on both sides. */
3675 if (ecode == RECORD_TYPE
3676 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3678 if (TREE_CONSTANT (TYPE_SIZE (etype)))
3679 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
3680 false, false, false, true), expr);
3681 return unchecked_convert (type, expr, false);
3684 /* If we are converting between array types with variable size, do the
3685 final conversion as an unchecked conversion, again to avoid the need
3686 for some variable-sized temporaries. If valid, this conversion is
3687 very likely purely technical and without real effects. */
3688 if (ecode == ARRAY_TYPE
3689 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
3690 && !TREE_CONSTANT (TYPE_SIZE (etype))
3691 && !TREE_CONSTANT (TYPE_SIZE (type)))
3692 return unchecked_convert (type,
3693 convert (TREE_TYPE (TYPE_FIELDS (type)),
3694 expr),
3695 false);
3697 return
3698 gnat_build_constructor (type,
3699 tree_cons (TYPE_FIELDS (type),
3700 convert (TREE_TYPE
3701 (TYPE_FIELDS (type)),
3702 expr),
3703 NULL_TREE));
3706 /* If the input type has padding, remove it and convert to the output type.
3707 The conditions ordering is arranged to ensure that the output type is not
3708 a padding type here, as it is not clear whether the conversion would
3709 always be correct if this was to happen. */
3710 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
3712 tree unpadded;
3714 /* If we have just converted to this padded type, just get the
3715 inner expression. */
3716 if (TREE_CODE (expr) == CONSTRUCTOR
3717 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3718 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3719 == TYPE_FIELDS (etype))
3720 unpadded
3721 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3723 /* Otherwise, build an explicit component reference. */
3724 else
3725 unpadded
3726 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3728 return convert (type, unpadded);
3731 /* If the input is a biased type, adjust first. */
3732 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3733 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3734 fold_convert (TREE_TYPE (etype),
3735 expr),
3736 TYPE_MIN_VALUE (etype)));
3738 /* If the input is a justified modular type, we need to extract the actual
3739 object before converting it to any other type with the exceptions of an
3740 unconstrained array or of a mere type variant. It is useful to avoid the
3741 extraction and conversion in the type variant case because it could end
3742 up replacing a VAR_DECL expr by a constructor and we might be about the
3743 take the address of the result. */
3744 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3745 && code != UNCONSTRAINED_ARRAY_TYPE
3746 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3747 return convert (type, build_component_ref (expr, NULL_TREE,
3748 TYPE_FIELDS (etype), false));
3750 /* If converting to a type that contains a template, convert to the data
3751 type and then build the template. */
3752 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3754 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3756 /* If the source already has a template, get a reference to the
3757 associated array only, as we are going to rebuild a template
3758 for the target type anyway. */
3759 expr = maybe_unconstrained_array (expr);
3761 return
3762 gnat_build_constructor
3763 (type,
3764 tree_cons (TYPE_FIELDS (type),
3765 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3766 obj_type, NULL_TREE),
3767 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3768 convert (obj_type, expr), NULL_TREE)));
3771 /* There are some special cases of expressions that we process
3772 specially. */
3773 switch (TREE_CODE (expr))
3775 case ERROR_MARK:
3776 return expr;
3778 case NULL_EXPR:
3779 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3780 conversion in gnat_expand_expr. NULL_EXPR does not represent
3781 and actual value, so no conversion is needed. */
3782 expr = copy_node (expr);
3783 TREE_TYPE (expr) = type;
3784 return expr;
3786 case STRING_CST:
3787 /* If we are converting a STRING_CST to another constrained array type,
3788 just make a new one in the proper type. */
3789 if (code == ecode && AGGREGATE_TYPE_P (etype)
3790 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3791 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3793 expr = copy_node (expr);
3794 TREE_TYPE (expr) = type;
3795 return expr;
3797 break;
3799 case VECTOR_CST:
3800 /* If we are converting a VECTOR_CST to a mere variant type, just make
3801 a new one in the proper type. */
3802 if (code == ecode && gnat_types_compatible_p (type, etype))
3804 expr = copy_node (expr);
3805 TREE_TYPE (expr) = type;
3806 return expr;
3809 case CONSTRUCTOR:
3810 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3811 a new one in the proper type. */
3812 if (code == ecode && gnat_types_compatible_p (type, etype))
3814 expr = copy_node (expr);
3815 TREE_TYPE (expr) = type;
3816 return expr;
3819 /* Likewise for a conversion between original and packable version, or
3820 conversion between types of the same size and with the same list of
3821 fields, but we have to work harder to preserve type consistency. */
3822 if (code == ecode
3823 && code == RECORD_TYPE
3824 && (TYPE_NAME (type) == TYPE_NAME (etype)
3825 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
3828 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3829 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3830 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
3831 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
3832 unsigned HOST_WIDE_INT idx;
3833 tree index, value;
3835 /* Whether we need to clear TREE_CONSTANT et al. on the output
3836 constructor when we convert in place. */
3837 bool clear_constant = false;
3839 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
3841 constructor_elt *elt;
3842 /* We expect only simple constructors. */
3843 if (!SAME_FIELD_P (index, efield))
3844 break;
3845 /* The field must be the same. */
3846 if (!SAME_FIELD_P (efield, field))
3847 break;
3848 elt = VEC_quick_push (constructor_elt, v, NULL);
3849 elt->index = field;
3850 elt->value = convert (TREE_TYPE (field), value);
3852 /* If packing has made this field a bitfield and the input
3853 value couldn't be emitted statically any more, we need to
3854 clear TREE_CONSTANT on our output. */
3855 if (!clear_constant
3856 && TREE_CONSTANT (expr)
3857 && !CONSTRUCTOR_BITFIELD_P (efield)
3858 && CONSTRUCTOR_BITFIELD_P (field)
3859 && !initializer_constant_valid_for_bitfield_p (value))
3860 clear_constant = true;
3862 efield = TREE_CHAIN (efield);
3863 field = TREE_CHAIN (field);
3866 /* If we have been able to match and convert all the input fields
3867 to their output type, convert in place now. We'll fallback to a
3868 view conversion downstream otherwise. */
3869 if (idx == len)
3871 expr = copy_node (expr);
3872 TREE_TYPE (expr) = type;
3873 CONSTRUCTOR_ELTS (expr) = v;
3874 if (clear_constant)
3875 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
3876 return expr;
3880 /* Likewise for a conversion between array type and vector type with a
3881 compatible representative array. */
3882 else if (code == VECTOR_TYPE
3883 && ecode == ARRAY_TYPE
3884 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
3885 etype))
3887 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3888 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3889 VEC(constructor_elt,gc) *v;
3890 unsigned HOST_WIDE_INT ix;
3891 tree value;
3893 /* Build a VECTOR_CST from a *constant* array constructor. */
3894 if (TREE_CONSTANT (expr))
3896 bool constant_p = true;
3898 /* Iterate through elements and check if all constructor
3899 elements are *_CSTs. */
3900 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
3901 if (!CONSTANT_CLASS_P (value))
3903 constant_p = false;
3904 break;
3907 if (constant_p)
3908 return build_vector_from_ctor (type,
3909 CONSTRUCTOR_ELTS (expr));
3912 /* Otherwise, build a regular vector constructor. */
3913 v = VEC_alloc (constructor_elt, gc, len);
3914 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
3916 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
3917 elt->index = NULL_TREE;
3918 elt->value = value;
3920 expr = copy_node (expr);
3921 TREE_TYPE (expr) = type;
3922 CONSTRUCTOR_ELTS (expr) = v;
3923 return expr;
3925 break;
3927 case UNCONSTRAINED_ARRAY_REF:
3928 /* Convert this to the type of the inner array by getting the address of
3929 the array from the template. */
3930 expr = TREE_OPERAND (expr, 0);
3931 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3932 build_component_ref (expr, NULL_TREE,
3933 TYPE_FIELDS
3934 (TREE_TYPE (expr)),
3935 false));
3936 etype = TREE_TYPE (expr);
3937 ecode = TREE_CODE (etype);
3938 break;
3940 case VIEW_CONVERT_EXPR:
3942 /* GCC 4.x is very sensitive to type consistency overall, and view
3943 conversions thus are very frequent. Even though just "convert"ing
3944 the inner operand to the output type is fine in most cases, it
3945 might expose unexpected input/output type mismatches in special
3946 circumstances so we avoid such recursive calls when we can. */
3947 tree op0 = TREE_OPERAND (expr, 0);
3949 /* If we are converting back to the original type, we can just
3950 lift the input conversion. This is a common occurrence with
3951 switches back-and-forth amongst type variants. */
3952 if (type == TREE_TYPE (op0))
3953 return op0;
3955 /* Otherwise, if we're converting between two aggregate or vector
3956 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
3957 target type in place or to just convert the inner expression. */
3958 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3959 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
3961 /* If we are converting between mere variants, we can just
3962 substitute the VIEW_CONVERT_EXPR in place. */
3963 if (gnat_types_compatible_p (type, etype))
3964 return build1 (VIEW_CONVERT_EXPR, type, op0);
3966 /* Otherwise, we may just bypass the input view conversion unless
3967 one of the types is a fat pointer, which is handled by
3968 specialized code below which relies on exact type matching. */
3969 else if (!TYPE_IS_FAT_POINTER_P (type)
3970 && !TYPE_IS_FAT_POINTER_P (etype))
3971 return convert (type, op0);
3974 break;
3976 default:
3977 break;
3980 /* Check for converting to a pointer to an unconstrained array. */
3981 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
3982 return convert_to_fat_pointer (type, expr);
3984 /* If we are converting between two aggregate or vector types that are mere
3985 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
3986 to a vector type from its representative array type. */
3987 else if ((code == ecode
3988 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
3989 && gnat_types_compatible_p (type, etype))
3990 || (code == VECTOR_TYPE
3991 && ecode == ARRAY_TYPE
3992 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
3993 etype)))
3994 return build1 (VIEW_CONVERT_EXPR, type, expr);
3996 /* If we are converting between tagged types, try to upcast properly. */
3997 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
3998 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4000 tree child_etype = etype;
4001 do {
4002 tree field = TYPE_FIELDS (child_etype);
4003 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4004 return build_component_ref (expr, NULL_TREE, field, false);
4005 child_etype = TREE_TYPE (field);
4006 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4009 /* In all other cases of related types, make a NOP_EXPR. */
4010 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4011 return fold_convert (type, expr);
4013 switch (code)
4015 case VOID_TYPE:
4016 return fold_build1 (CONVERT_EXPR, type, expr);
4018 case INTEGER_TYPE:
4019 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4020 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4021 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4022 return unchecked_convert (type, expr, false);
4023 else if (TYPE_BIASED_REPRESENTATION_P (type))
4024 return fold_convert (type,
4025 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4026 convert (TREE_TYPE (type), expr),
4027 TYPE_MIN_VALUE (type)));
4029 /* ... fall through ... */
4031 case ENUMERAL_TYPE:
4032 case BOOLEAN_TYPE:
4033 /* If we are converting an additive expression to an integer type
4034 with lower precision, be wary of the optimization that can be
4035 applied by convert_to_integer. There are 2 problematic cases:
4036 - if the first operand was originally of a biased type,
4037 because we could be recursively called to convert it
4038 to an intermediate type and thus rematerialize the
4039 additive operator endlessly,
4040 - if the expression contains a placeholder, because an
4041 intermediate conversion that changes the sign could
4042 be inserted and thus introduce an artificial overflow
4043 at compile time when the placeholder is substituted. */
4044 if (code == INTEGER_TYPE
4045 && ecode == INTEGER_TYPE
4046 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4047 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4049 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4051 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4052 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4053 || CONTAINS_PLACEHOLDER_P (expr))
4054 return build1 (NOP_EXPR, type, expr);
4057 return fold (convert_to_integer (type, expr));
4059 case POINTER_TYPE:
4060 case REFERENCE_TYPE:
4061 /* If converting between two pointers to records denoting
4062 both a template and type, adjust if needed to account
4063 for any differing offsets, since one might be negative. */
4064 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4066 tree bit_diff
4067 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4068 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4069 tree byte_diff
4070 = size_binop (CEIL_DIV_EXPR, bit_diff, sbitsize_unit_node);
4071 expr = build1 (NOP_EXPR, type, expr);
4072 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4073 if (integer_zerop (byte_diff))
4074 return expr;
4076 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4077 fold (convert (sizetype, byte_diff)));
4080 /* If converting to a thin pointer, handle specially. */
4081 if (TYPE_IS_THIN_POINTER_P (type)
4082 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4083 return convert_to_thin_pointer (type, expr);
4085 /* If converting fat pointer to normal pointer, get the pointer to the
4086 array and then convert it. */
4087 else if (TYPE_IS_FAT_POINTER_P (etype))
4088 expr
4089 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4091 return fold (convert_to_pointer (type, expr));
4093 case REAL_TYPE:
4094 return fold (convert_to_real (type, expr));
4096 case RECORD_TYPE:
4097 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4098 return
4099 gnat_build_constructor
4100 (type, tree_cons (TYPE_FIELDS (type),
4101 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4102 NULL_TREE));
4104 /* ... fall through ... */
4106 case ARRAY_TYPE:
4107 /* In these cases, assume the front-end has validated the conversion.
4108 If the conversion is valid, it will be a bit-wise conversion, so
4109 it can be viewed as an unchecked conversion. */
4110 return unchecked_convert (type, expr, false);
4112 case UNION_TYPE:
4113 /* This is a either a conversion between a tagged type and some
4114 subtype, which we have to mark as a UNION_TYPE because of
4115 overlapping fields or a conversion of an Unchecked_Union. */
4116 return unchecked_convert (type, expr, false);
4118 case UNCONSTRAINED_ARRAY_TYPE:
4119 /* If the input is a VECTOR_TYPE, convert to the representative
4120 array type first. */
4121 if (ecode == VECTOR_TYPE)
4123 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4124 etype = TREE_TYPE (expr);
4125 ecode = TREE_CODE (etype);
4128 /* If EXPR is a constrained array, take its address, convert it to a
4129 fat pointer, and then dereference it. Likewise if EXPR is a
4130 record containing both a template and a constrained array.
4131 Note that a record representing a justified modular type
4132 always represents a packed constrained array. */
4133 if (ecode == ARRAY_TYPE
4134 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4135 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4136 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4137 return
4138 build_unary_op
4139 (INDIRECT_REF, NULL_TREE,
4140 convert_to_fat_pointer (TREE_TYPE (type),
4141 build_unary_op (ADDR_EXPR,
4142 NULL_TREE, expr)));
4144 /* Do something very similar for converting one unconstrained
4145 array to another. */
4146 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4147 return
4148 build_unary_op (INDIRECT_REF, NULL_TREE,
4149 convert (TREE_TYPE (type),
4150 build_unary_op (ADDR_EXPR,
4151 NULL_TREE, expr)));
4152 else
4153 gcc_unreachable ();
4155 case COMPLEX_TYPE:
4156 return fold (convert_to_complex (type, expr));
4158 default:
4159 gcc_unreachable ();
4163 /* Remove all conversions that are done in EXP. This includes converting
4164 from a padded type or to a justified modular type. If TRUE_ADDRESS
4165 is true, always return the address of the containing object even if
4166 the address is not bit-aligned. */
4168 tree
4169 remove_conversions (tree exp, bool true_address)
4171 switch (TREE_CODE (exp))
4173 case CONSTRUCTOR:
4174 if (true_address
4175 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4176 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4177 return
4178 remove_conversions (VEC_index (constructor_elt,
4179 CONSTRUCTOR_ELTS (exp), 0)->value,
4180 true);
4181 break;
4183 case COMPONENT_REF:
4184 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4185 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4186 break;
4188 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4189 CASE_CONVERT:
4190 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4192 default:
4193 break;
4196 return exp;
4199 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4200 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4201 likewise return an expression pointing to the underlying array. */
4203 tree
4204 maybe_unconstrained_array (tree exp)
4206 enum tree_code code = TREE_CODE (exp);
4207 tree new_exp;
4209 switch (TREE_CODE (TREE_TYPE (exp)))
4211 case UNCONSTRAINED_ARRAY_TYPE:
4212 if (code == UNCONSTRAINED_ARRAY_REF)
4214 new_exp = TREE_OPERAND (exp, 0);
4215 new_exp
4216 = build_unary_op (INDIRECT_REF, NULL_TREE,
4217 build_component_ref (new_exp, NULL_TREE,
4218 TYPE_FIELDS
4219 (TREE_TYPE (new_exp)),
4220 false));
4221 TREE_READONLY (new_exp) = TREE_READONLY (exp);
4222 return new_exp;
4225 else if (code == NULL_EXPR)
4226 return build1 (NULL_EXPR,
4227 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4228 (TREE_TYPE (TREE_TYPE (exp))))),
4229 TREE_OPERAND (exp, 0));
4231 case RECORD_TYPE:
4232 /* If this is a padded type, convert to the unpadded type and see if
4233 it contains a template. */
4234 if (TYPE_PADDING_P (TREE_TYPE (exp)))
4236 new_exp = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4237 if (TREE_CODE (TREE_TYPE (new_exp)) == RECORD_TYPE
4238 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new_exp)))
4239 return
4240 build_component_ref (new_exp, NULL_TREE,
4241 TREE_CHAIN
4242 (TYPE_FIELDS (TREE_TYPE (new_exp))),
4243 false);
4245 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4246 return
4247 build_component_ref (exp, NULL_TREE,
4248 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))),
4249 false);
4250 break;
4252 default:
4253 break;
4256 return exp;
4259 /* If EXP's type is a VECTOR_TYPE, return EXP converted to the associated
4260 TYPE_REPRESENTATIVE_ARRAY. */
4262 tree
4263 maybe_vector_array (tree exp)
4265 tree etype = TREE_TYPE (exp);
4267 if (VECTOR_TYPE_P (etype))
4268 exp = convert (TYPE_REPRESENTATIVE_ARRAY (etype), exp);
4270 return exp;
4273 /* Return true if EXPR is an expression that can be folded as an operand
4274 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4276 static bool
4277 can_fold_for_view_convert_p (tree expr)
4279 tree t1, t2;
4281 /* The folder will fold NOP_EXPRs between integral types with the same
4282 precision (in the middle-end's sense). We cannot allow it if the
4283 types don't have the same precision in the Ada sense as well. */
4284 if (TREE_CODE (expr) != NOP_EXPR)
4285 return true;
4287 t1 = TREE_TYPE (expr);
4288 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4290 /* Defer to the folder for non-integral conversions. */
4291 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4292 return true;
4294 /* Only fold conversions that preserve both precisions. */
4295 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4296 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4297 return true;
4299 return false;
4302 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4303 If NOTRUNC_P is true, truncation operations should be suppressed.
4305 Special care is required with (source or target) integral types whose
4306 precision is not equal to their size, to make sure we fetch or assign
4307 the value bits whose location might depend on the endianness, e.g.
4309 Rmsize : constant := 8;
4310 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4312 type Bit_Array is array (1 .. Rmsize) of Boolean;
4313 pragma Pack (Bit_Array);
4315 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4317 Value : Int := 2#1000_0001#;
4318 Vbits : Bit_Array := To_Bit_Array (Value);
4320 we expect the 8 bits at Vbits'Address to always contain Value, while
4321 their original location depends on the endianness, at Value'Address
4322 on a little-endian architecture but not on a big-endian one. */
4324 tree
4325 unchecked_convert (tree type, tree expr, bool notrunc_p)
4327 tree etype = TREE_TYPE (expr);
4328 enum tree_code ecode = TREE_CODE (etype);
4329 enum tree_code code = TREE_CODE (type);
4331 /* If the expression is already of the right type, we are done. */
4332 if (etype == type)
4333 return expr;
4335 /* If both types types are integral just do a normal conversion.
4336 Likewise for a conversion to an unconstrained array. */
4337 if ((((INTEGRAL_TYPE_P (type)
4338 && !(code == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (type)))
4339 || (POINTER_TYPE_P (type) && ! TYPE_IS_THIN_POINTER_P (type))
4340 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4341 && ((INTEGRAL_TYPE_P (etype)
4342 && !(ecode == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (etype)))
4343 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4344 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4345 || code == UNCONSTRAINED_ARRAY_TYPE)
4347 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4349 tree ntype = copy_type (etype);
4350 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4351 TYPE_MAIN_VARIANT (ntype) = ntype;
4352 expr = build1 (NOP_EXPR, ntype, expr);
4355 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4357 tree rtype = copy_type (type);
4358 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4359 TYPE_MAIN_VARIANT (rtype) = rtype;
4360 expr = convert (rtype, expr);
4361 expr = build1 (NOP_EXPR, type, expr);
4363 else
4364 expr = convert (type, expr);
4367 /* If we are converting to an integral type whose precision is not equal
4368 to its size, first unchecked convert to a record that contains an
4369 object of the output type. Then extract the field. */
4370 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4371 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4372 GET_MODE_BITSIZE (TYPE_MODE (type))))
4374 tree rec_type = make_node (RECORD_TYPE);
4375 tree field = create_field_decl (get_identifier ("OBJ"), type,
4376 rec_type, 1, 0, 0, 0);
4378 TYPE_FIELDS (rec_type) = field;
4379 layout_type (rec_type);
4381 expr = unchecked_convert (rec_type, expr, notrunc_p);
4382 expr = build_component_ref (expr, NULL_TREE, field, false);
4385 /* Similarly if we are converting from an integral type whose precision
4386 is not equal to its size. */
4387 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4388 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4389 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4391 tree rec_type = make_node (RECORD_TYPE);
4392 tree field
4393 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4394 1, 0, 0, 0);
4396 TYPE_FIELDS (rec_type) = field;
4397 layout_type (rec_type);
4399 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4400 expr = unchecked_convert (type, expr, notrunc_p);
4403 /* We have a special case when we are converting between two unconstrained
4404 array types. In that case, take the address, convert the fat pointer
4405 types, and dereference. */
4406 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
4407 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4408 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4409 build_unary_op (ADDR_EXPR, NULL_TREE,
4410 expr)));
4412 /* Another special case is when we are converting to a vector type from its
4413 representative array type; this a regular conversion. */
4414 else if (code == VECTOR_TYPE
4415 && ecode == ARRAY_TYPE
4416 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4417 etype))
4418 expr = convert (type, expr);
4420 else
4422 expr = maybe_unconstrained_array (expr);
4423 etype = TREE_TYPE (expr);
4424 ecode = TREE_CODE (etype);
4425 if (can_fold_for_view_convert_p (expr))
4426 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4427 else
4428 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4431 /* If the result is an integral type whose precision is not equal to its
4432 size, sign- or zero-extend the result. We need not do this if the input
4433 is an integral type of the same precision and signedness or if the output
4434 is a biased type or if both the input and output are unsigned. */
4435 if (!notrunc_p
4436 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4437 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4438 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4439 GET_MODE_BITSIZE (TYPE_MODE (type)))
4440 && !(INTEGRAL_TYPE_P (etype)
4441 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4442 && operand_equal_p (TYPE_RM_SIZE (type),
4443 (TYPE_RM_SIZE (etype) != 0
4444 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4446 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4448 tree base_type
4449 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
4450 tree shift_expr
4451 = convert (base_type,
4452 size_binop (MINUS_EXPR,
4453 bitsize_int
4454 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4455 TYPE_RM_SIZE (type)));
4456 expr
4457 = convert (type,
4458 build_binary_op (RSHIFT_EXPR, base_type,
4459 build_binary_op (LSHIFT_EXPR, base_type,
4460 convert (base_type, expr),
4461 shift_expr),
4462 shift_expr));
4465 /* An unchecked conversion should never raise Constraint_Error. The code
4466 below assumes that GCC's conversion routines overflow the same way that
4467 the underlying hardware does. This is probably true. In the rare case
4468 when it is false, we can rely on the fact that such conversions are
4469 erroneous anyway. */
4470 if (TREE_CODE (expr) == INTEGER_CST)
4471 TREE_OVERFLOW (expr) = 0;
4473 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4474 show no longer constant. */
4475 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4476 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4477 OEP_ONLY_CONST))
4478 TREE_CONSTANT (expr) = 0;
4480 return expr;
4483 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4484 the latter being a record type as predicated by Is_Record_Type. */
4486 enum tree_code
4487 tree_code_for_record_type (Entity_Id gnat_type)
4489 Node_Id component_list
4490 = Component_List (Type_Definition
4491 (Declaration_Node
4492 (Implementation_Base_Type (gnat_type))));
4493 Node_Id component;
4495 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4496 we have a non-discriminant field outside a variant. In either case,
4497 it's a RECORD_TYPE. */
4499 if (!Is_Unchecked_Union (gnat_type))
4500 return RECORD_TYPE;
4502 for (component = First_Non_Pragma (Component_Items (component_list));
4503 Present (component);
4504 component = Next_Non_Pragma (component))
4505 if (Ekind (Defining_Entity (component)) == E_Component)
4506 return RECORD_TYPE;
4508 return UNION_TYPE;
4511 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
4512 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
4513 according to the presence of an alignment clause on the type or, if it
4514 is an array, on the component type. */
4516 bool
4517 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
4519 gnat_type = Underlying_Type (gnat_type);
4521 *align_clause = Present (Alignment_Clause (gnat_type));
4523 if (Is_Array_Type (gnat_type))
4525 gnat_type = Underlying_Type (Component_Type (gnat_type));
4526 if (Present (Alignment_Clause (gnat_type)))
4527 *align_clause = true;
4530 if (!Is_Floating_Point_Type (gnat_type))
4531 return false;
4533 if (UI_To_Int (Esize (gnat_type)) != 64)
4534 return false;
4536 return true;
4539 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
4540 size is greater or equal to 64 bits, or an array of such a type. Set
4541 ALIGN_CLAUSE according to the presence of an alignment clause on the
4542 type or, if it is an array, on the component type. */
4544 bool
4545 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
4547 gnat_type = Underlying_Type (gnat_type);
4549 *align_clause = Present (Alignment_Clause (gnat_type));
4551 if (Is_Array_Type (gnat_type))
4553 gnat_type = Underlying_Type (Component_Type (gnat_type));
4554 if (Present (Alignment_Clause (gnat_type)))
4555 *align_clause = true;
4558 if (!Is_Scalar_Type (gnat_type))
4559 return false;
4561 if (UI_To_Int (Esize (gnat_type)) < 64)
4562 return false;
4564 return true;
4567 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4568 component of an aggregate type. */
4570 bool
4571 type_for_nonaliased_component_p (tree gnu_type)
4573 /* If the type is passed by reference, we may have pointers to the
4574 component so it cannot be made non-aliased. */
4575 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4576 return false;
4578 /* We used to say that any component of aggregate type is aliased
4579 because the front-end may take 'Reference of it. The front-end
4580 has been enhanced in the meantime so as to use a renaming instead
4581 in most cases, but the back-end can probably take the address of
4582 such a component too so we go for the conservative stance.
4584 For instance, we might need the address of any array type, even
4585 if normally passed by copy, to construct a fat pointer if the
4586 component is used as an actual for an unconstrained formal.
4588 Likewise for record types: even if a specific record subtype is
4589 passed by copy, the parent type might be passed by ref (e.g. if
4590 it's of variable size) and we might take the address of a child
4591 component to pass to a parent formal. We have no way to check
4592 for such conditions here. */
4593 if (AGGREGATE_TYPE_P (gnu_type))
4594 return false;
4596 return true;
4599 /* Perform final processing on global variables. */
4601 void
4602 gnat_write_global_declarations (void)
4604 /* Proceed to optimize and emit assembly.
4605 FIXME: shouldn't be the front end's responsibility to call this. */
4606 cgraph_finalize_compilation_unit ();
4608 /* Emit debug info for all global declarations. */
4609 emit_debug_global_declarations (VEC_address (tree, global_decls),
4610 VEC_length (tree, global_decls));
4613 /* ************************************************************************
4614 * * GCC builtins support *
4615 * ************************************************************************ */
4617 /* The general scheme is fairly simple:
4619 For each builtin function/type to be declared, gnat_install_builtins calls
4620 internal facilities which eventually get to gnat_push_decl, which in turn
4621 tracks the so declared builtin function decls in the 'builtin_decls' global
4622 datastructure. When an Intrinsic subprogram declaration is processed, we
4623 search this global datastructure to retrieve the associated BUILT_IN DECL
4624 node. */
4626 /* Search the chain of currently available builtin declarations for a node
4627 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4628 found, if any, or NULL_TREE otherwise. */
4629 tree
4630 builtin_decl_for (tree name)
4632 unsigned i;
4633 tree decl;
4635 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4636 if (DECL_NAME (decl) == name)
4637 return decl;
4639 return NULL_TREE;
4642 /* The code below eventually exposes gnat_install_builtins, which declares
4643 the builtin types and functions we might need, either internally or as
4644 user accessible facilities.
4646 ??? This is a first implementation shot, still in rough shape. It is
4647 heavily inspired from the "C" family implementation, with chunks copied
4648 verbatim from there.
4650 Two obvious TODO candidates are
4651 o Use a more efficient name/decl mapping scheme
4652 o Devise a middle-end infrastructure to avoid having to copy
4653 pieces between front-ends. */
4655 /* ----------------------------------------------------------------------- *
4656 * BUILTIN ELEMENTARY TYPES *
4657 * ----------------------------------------------------------------------- */
4659 /* Standard data types to be used in builtin argument declarations. */
4661 enum c_tree_index
4663 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4664 CTI_STRING_TYPE,
4665 CTI_CONST_STRING_TYPE,
4667 CTI_MAX
4670 static tree c_global_trees[CTI_MAX];
4672 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4673 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4674 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4676 /* ??? In addition some attribute handlers, we currently don't support a
4677 (small) number of builtin-types, which in turns inhibits support for a
4678 number of builtin functions. */
4679 #define wint_type_node void_type_node
4680 #define intmax_type_node void_type_node
4681 #define uintmax_type_node void_type_node
4683 /* Build the void_list_node (void_type_node having been created). */
4685 static tree
4686 build_void_list_node (void)
4688 tree t = build_tree_list (NULL_TREE, void_type_node);
4689 return t;
4692 /* Used to help initialize the builtin-types.def table. When a type of
4693 the correct size doesn't exist, use error_mark_node instead of NULL.
4694 The later results in segfaults even when a decl using the type doesn't
4695 get invoked. */
4697 static tree
4698 builtin_type_for_size (int size, bool unsignedp)
4700 tree type = gnat_type_for_size (size, unsignedp);
4701 return type ? type : error_mark_node;
4704 /* Build/push the elementary type decls that builtin functions/types
4705 will need. */
4707 static void
4708 install_builtin_elementary_types (void)
4710 signed_size_type_node = size_type_node;
4711 pid_type_node = integer_type_node;
4712 void_list_node = build_void_list_node ();
4714 string_type_node = build_pointer_type (char_type_node);
4715 const_string_type_node
4716 = build_pointer_type (build_qualified_type
4717 (char_type_node, TYPE_QUAL_CONST));
4720 /* ----------------------------------------------------------------------- *
4721 * BUILTIN FUNCTION TYPES *
4722 * ----------------------------------------------------------------------- */
4724 /* Now, builtin function types per se. */
4726 enum c_builtin_type
4728 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4729 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4730 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4731 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4732 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4733 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4734 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4735 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4736 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4737 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4738 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4739 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4740 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4741 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4742 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4743 NAME,
4744 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4745 #include "builtin-types.def"
4746 #undef DEF_PRIMITIVE_TYPE
4747 #undef DEF_FUNCTION_TYPE_0
4748 #undef DEF_FUNCTION_TYPE_1
4749 #undef DEF_FUNCTION_TYPE_2
4750 #undef DEF_FUNCTION_TYPE_3
4751 #undef DEF_FUNCTION_TYPE_4
4752 #undef DEF_FUNCTION_TYPE_5
4753 #undef DEF_FUNCTION_TYPE_6
4754 #undef DEF_FUNCTION_TYPE_7
4755 #undef DEF_FUNCTION_TYPE_VAR_0
4756 #undef DEF_FUNCTION_TYPE_VAR_1
4757 #undef DEF_FUNCTION_TYPE_VAR_2
4758 #undef DEF_FUNCTION_TYPE_VAR_3
4759 #undef DEF_FUNCTION_TYPE_VAR_4
4760 #undef DEF_FUNCTION_TYPE_VAR_5
4761 #undef DEF_POINTER_TYPE
4762 BT_LAST
4765 typedef enum c_builtin_type builtin_type;
4767 /* A temporary array used in communication with def_fn_type. */
4768 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4770 /* A helper function for install_builtin_types. Build function type
4771 for DEF with return type RET and N arguments. If VAR is true, then the
4772 function should be variadic after those N arguments.
4774 Takes special care not to ICE if any of the types involved are
4775 error_mark_node, which indicates that said type is not in fact available
4776 (see builtin_type_for_size). In which case the function type as a whole
4777 should be error_mark_node. */
4779 static void
4780 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4782 tree args = NULL, t;
4783 va_list list;
4784 int i;
4786 va_start (list, n);
4787 for (i = 0; i < n; ++i)
4789 builtin_type a = (builtin_type) va_arg (list, int);
4790 t = builtin_types[a];
4791 if (t == error_mark_node)
4792 goto egress;
4793 args = tree_cons (NULL_TREE, t, args);
4795 va_end (list);
4797 args = nreverse (args);
4798 if (!var)
4799 args = chainon (args, void_list_node);
4801 t = builtin_types[ret];
4802 if (t == error_mark_node)
4803 goto egress;
4804 t = build_function_type (t, args);
4806 egress:
4807 builtin_types[def] = t;
4810 /* Build the builtin function types and install them in the builtin_types
4811 array for later use in builtin function decls. */
4813 static void
4814 install_builtin_function_types (void)
4816 tree va_list_ref_type_node;
4817 tree va_list_arg_type_node;
4819 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4821 va_list_arg_type_node = va_list_ref_type_node =
4822 build_pointer_type (TREE_TYPE (va_list_type_node));
4824 else
4826 va_list_arg_type_node = va_list_type_node;
4827 va_list_ref_type_node = build_reference_type (va_list_type_node);
4830 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
4831 builtin_types[ENUM] = VALUE;
4832 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
4833 def_fn_type (ENUM, RETURN, 0, 0);
4834 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
4835 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
4836 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
4837 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
4838 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4839 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
4840 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4841 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
4842 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4843 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4844 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4845 ARG6) \
4846 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4847 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4848 ARG6, ARG7) \
4849 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4850 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
4851 def_fn_type (ENUM, RETURN, 1, 0);
4852 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
4853 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
4854 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
4855 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
4856 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4857 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
4858 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4859 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
4860 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4861 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4862 #define DEF_POINTER_TYPE(ENUM, TYPE) \
4863 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
4865 #include "builtin-types.def"
4867 #undef DEF_PRIMITIVE_TYPE
4868 #undef DEF_FUNCTION_TYPE_1
4869 #undef DEF_FUNCTION_TYPE_2
4870 #undef DEF_FUNCTION_TYPE_3
4871 #undef DEF_FUNCTION_TYPE_4
4872 #undef DEF_FUNCTION_TYPE_5
4873 #undef DEF_FUNCTION_TYPE_6
4874 #undef DEF_FUNCTION_TYPE_VAR_0
4875 #undef DEF_FUNCTION_TYPE_VAR_1
4876 #undef DEF_FUNCTION_TYPE_VAR_2
4877 #undef DEF_FUNCTION_TYPE_VAR_3
4878 #undef DEF_FUNCTION_TYPE_VAR_4
4879 #undef DEF_FUNCTION_TYPE_VAR_5
4880 #undef DEF_POINTER_TYPE
4881 builtin_types[(int) BT_LAST] = NULL_TREE;
4884 /* ----------------------------------------------------------------------- *
4885 * BUILTIN ATTRIBUTES *
4886 * ----------------------------------------------------------------------- */
4888 enum built_in_attribute
4890 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
4891 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
4892 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
4893 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
4894 #include "builtin-attrs.def"
4895 #undef DEF_ATTR_NULL_TREE
4896 #undef DEF_ATTR_INT
4897 #undef DEF_ATTR_IDENT
4898 #undef DEF_ATTR_TREE_LIST
4899 ATTR_LAST
4902 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
4904 static void
4905 install_builtin_attributes (void)
4907 /* Fill in the built_in_attributes array. */
4908 #define DEF_ATTR_NULL_TREE(ENUM) \
4909 built_in_attributes[(int) ENUM] = NULL_TREE;
4910 #define DEF_ATTR_INT(ENUM, VALUE) \
4911 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
4912 #define DEF_ATTR_IDENT(ENUM, STRING) \
4913 built_in_attributes[(int) ENUM] = get_identifier (STRING);
4914 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
4915 built_in_attributes[(int) ENUM] \
4916 = tree_cons (built_in_attributes[(int) PURPOSE], \
4917 built_in_attributes[(int) VALUE], \
4918 built_in_attributes[(int) CHAIN]);
4919 #include "builtin-attrs.def"
4920 #undef DEF_ATTR_NULL_TREE
4921 #undef DEF_ATTR_INT
4922 #undef DEF_ATTR_IDENT
4923 #undef DEF_ATTR_TREE_LIST
4926 /* Handle a "const" attribute; arguments as in
4927 struct attribute_spec.handler. */
4929 static tree
4930 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
4931 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4932 bool *no_add_attrs)
4934 if (TREE_CODE (*node) == FUNCTION_DECL)
4935 TREE_READONLY (*node) = 1;
4936 else
4937 *no_add_attrs = true;
4939 return NULL_TREE;
4942 /* Handle a "nothrow" attribute; arguments as in
4943 struct attribute_spec.handler. */
4945 static tree
4946 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
4947 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4948 bool *no_add_attrs)
4950 if (TREE_CODE (*node) == FUNCTION_DECL)
4951 TREE_NOTHROW (*node) = 1;
4952 else
4953 *no_add_attrs = true;
4955 return NULL_TREE;
4958 /* Handle a "pure" attribute; arguments as in
4959 struct attribute_spec.handler. */
4961 static tree
4962 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4963 int ARG_UNUSED (flags), bool *no_add_attrs)
4965 if (TREE_CODE (*node) == FUNCTION_DECL)
4966 DECL_PURE_P (*node) = 1;
4967 /* ??? TODO: Support types. */
4968 else
4970 warning (OPT_Wattributes, "%qs attribute ignored",
4971 IDENTIFIER_POINTER (name));
4972 *no_add_attrs = true;
4975 return NULL_TREE;
4978 /* Handle a "no vops" attribute; arguments as in
4979 struct attribute_spec.handler. */
4981 static tree
4982 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
4983 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4984 bool *ARG_UNUSED (no_add_attrs))
4986 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
4987 DECL_IS_NOVOPS (*node) = 1;
4988 return NULL_TREE;
4991 /* Helper for nonnull attribute handling; fetch the operand number
4992 from the attribute argument list. */
4994 static bool
4995 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
4997 /* Verify the arg number is a constant. */
4998 if (TREE_CODE (arg_num_expr) != INTEGER_CST
4999 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
5000 return false;
5002 *valp = TREE_INT_CST_LOW (arg_num_expr);
5003 return true;
5006 /* Handle the "nonnull" attribute. */
5007 static tree
5008 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5009 tree args, int ARG_UNUSED (flags),
5010 bool *no_add_attrs)
5012 tree type = *node;
5013 unsigned HOST_WIDE_INT attr_arg_num;
5015 /* If no arguments are specified, all pointer arguments should be
5016 non-null. Verify a full prototype is given so that the arguments
5017 will have the correct types when we actually check them later. */
5018 if (!args)
5020 if (!TYPE_ARG_TYPES (type))
5022 error ("nonnull attribute without arguments on a non-prototype");
5023 *no_add_attrs = true;
5025 return NULL_TREE;
5028 /* Argument list specified. Verify that each argument number references
5029 a pointer argument. */
5030 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5032 tree argument;
5033 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5035 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5037 error ("nonnull argument has invalid operand number (argument %lu)",
5038 (unsigned long) attr_arg_num);
5039 *no_add_attrs = true;
5040 return NULL_TREE;
5043 argument = TYPE_ARG_TYPES (type);
5044 if (argument)
5046 for (ck_num = 1; ; ck_num++)
5048 if (!argument || ck_num == arg_num)
5049 break;
5050 argument = TREE_CHAIN (argument);
5053 if (!argument
5054 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5056 error ("nonnull argument with out-of-range operand number "
5057 "(argument %lu, operand %lu)",
5058 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5059 *no_add_attrs = true;
5060 return NULL_TREE;
5063 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5065 error ("nonnull argument references non-pointer operand "
5066 "(argument %lu, operand %lu)",
5067 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5068 *no_add_attrs = true;
5069 return NULL_TREE;
5074 return NULL_TREE;
5077 /* Handle a "sentinel" attribute. */
5079 static tree
5080 handle_sentinel_attribute (tree *node, tree name, tree args,
5081 int ARG_UNUSED (flags), bool *no_add_attrs)
5083 tree params = TYPE_ARG_TYPES (*node);
5085 if (!params)
5087 warning (OPT_Wattributes,
5088 "%qs attribute requires prototypes with named arguments",
5089 IDENTIFIER_POINTER (name));
5090 *no_add_attrs = true;
5092 else
5094 while (TREE_CHAIN (params))
5095 params = TREE_CHAIN (params);
5097 if (VOID_TYPE_P (TREE_VALUE (params)))
5099 warning (OPT_Wattributes,
5100 "%qs attribute only applies to variadic functions",
5101 IDENTIFIER_POINTER (name));
5102 *no_add_attrs = true;
5106 if (args)
5108 tree position = TREE_VALUE (args);
5110 if (TREE_CODE (position) != INTEGER_CST)
5112 warning (0, "requested position is not an integer constant");
5113 *no_add_attrs = true;
5115 else
5117 if (tree_int_cst_lt (position, integer_zero_node))
5119 warning (0, "requested position is less than zero");
5120 *no_add_attrs = true;
5125 return NULL_TREE;
5128 /* Handle a "noreturn" attribute; arguments as in
5129 struct attribute_spec.handler. */
5131 static tree
5132 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5133 int ARG_UNUSED (flags), bool *no_add_attrs)
5135 tree type = TREE_TYPE (*node);
5137 /* See FIXME comment in c_common_attribute_table. */
5138 if (TREE_CODE (*node) == FUNCTION_DECL)
5139 TREE_THIS_VOLATILE (*node) = 1;
5140 else if (TREE_CODE (type) == POINTER_TYPE
5141 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5142 TREE_TYPE (*node)
5143 = build_pointer_type
5144 (build_type_variant (TREE_TYPE (type),
5145 TYPE_READONLY (TREE_TYPE (type)), 1));
5146 else
5148 warning (OPT_Wattributes, "%qs attribute ignored",
5149 IDENTIFIER_POINTER (name));
5150 *no_add_attrs = true;
5153 return NULL_TREE;
5156 /* Handle a "malloc" attribute; arguments as in
5157 struct attribute_spec.handler. */
5159 static tree
5160 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5161 int ARG_UNUSED (flags), bool *no_add_attrs)
5163 if (TREE_CODE (*node) == FUNCTION_DECL
5164 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5165 DECL_IS_MALLOC (*node) = 1;
5166 else
5168 warning (OPT_Wattributes, "%qs attribute ignored",
5169 IDENTIFIER_POINTER (name));
5170 *no_add_attrs = true;
5173 return NULL_TREE;
5176 /* Fake handler for attributes we don't properly support. */
5178 tree
5179 fake_attribute_handler (tree * ARG_UNUSED (node),
5180 tree ARG_UNUSED (name),
5181 tree ARG_UNUSED (args),
5182 int ARG_UNUSED (flags),
5183 bool * ARG_UNUSED (no_add_attrs))
5185 return NULL_TREE;
5188 /* Handle a "type_generic" attribute. */
5190 static tree
5191 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5192 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5193 bool * ARG_UNUSED (no_add_attrs))
5195 tree params;
5197 /* Ensure we have a function type. */
5198 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5200 params = TYPE_ARG_TYPES (*node);
5201 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5202 params = TREE_CHAIN (params);
5204 /* Ensure we have a variadic function. */
5205 gcc_assert (!params);
5207 return NULL_TREE;
5210 /* Handle a "vector_size" attribute; arguments as in
5211 struct attribute_spec.handler. */
5213 static tree
5214 handle_vector_size_attribute (tree *node, tree name, tree args,
5215 int ARG_UNUSED (flags),
5216 bool *no_add_attrs)
5218 unsigned HOST_WIDE_INT vecsize, nunits;
5219 enum machine_mode orig_mode;
5220 tree type = *node, new_type, size;
5222 *no_add_attrs = true;
5224 size = TREE_VALUE (args);
5226 if (!host_integerp (size, 1))
5228 warning (OPT_Wattributes, "%qs attribute ignored",
5229 IDENTIFIER_POINTER (name));
5230 return NULL_TREE;
5233 /* Get the vector size (in bytes). */
5234 vecsize = tree_low_cst (size, 1);
5236 /* We need to provide for vector pointers, vector arrays, and
5237 functions returning vectors. For example:
5239 __attribute__((vector_size(16))) short *foo;
5241 In this case, the mode is SI, but the type being modified is
5242 HI, so we need to look further. */
5244 while (POINTER_TYPE_P (type)
5245 || TREE_CODE (type) == FUNCTION_TYPE
5246 || TREE_CODE (type) == METHOD_TYPE
5247 || TREE_CODE (type) == ARRAY_TYPE
5248 || TREE_CODE (type) == OFFSET_TYPE)
5249 type = TREE_TYPE (type);
5251 /* Get the mode of the type being modified. */
5252 orig_mode = TYPE_MODE (type);
5254 if ((!INTEGRAL_TYPE_P (type)
5255 && !SCALAR_FLOAT_TYPE_P (type)
5256 && !FIXED_POINT_TYPE_P (type))
5257 || (!SCALAR_FLOAT_MODE_P (orig_mode)
5258 && GET_MODE_CLASS (orig_mode) != MODE_INT
5259 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
5260 || !host_integerp (TYPE_SIZE_UNIT (type), 1)
5261 || TREE_CODE (type) == BOOLEAN_TYPE)
5263 error ("invalid vector type for attribute %qs",
5264 IDENTIFIER_POINTER (name));
5265 return NULL_TREE;
5268 if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
5270 error ("vector size not an integral multiple of component size");
5271 return NULL;
5274 if (vecsize == 0)
5276 error ("zero vector size");
5277 return NULL;
5280 /* Calculate how many units fit in the vector. */
5281 nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5282 if (nunits & (nunits - 1))
5284 error ("number of components of the vector not a power of two");
5285 return NULL_TREE;
5288 new_type = build_vector_type (type, nunits);
5290 /* Build back pointers if needed. */
5291 *node = lang_hooks.types.reconstruct_complex_type (*node, new_type);
5293 return NULL_TREE;
5296 /* Handle a "vector_type" attribute; arguments as in
5297 struct attribute_spec.handler. */
5299 static tree
5300 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5301 int ARG_UNUSED (flags),
5302 bool *no_add_attrs)
5304 /* Vector representative type and size. */
5305 tree rep_type = *node;
5306 tree rep_size = TYPE_SIZE_UNIT (rep_type);
5307 tree rep_name;
5309 /* Vector size in bytes and number of units. */
5310 unsigned HOST_WIDE_INT vec_bytes, vec_units;
5312 /* Vector element type and mode. */
5313 tree elem_type;
5314 enum machine_mode elem_mode;
5316 *no_add_attrs = true;
5318 /* Get the representative array type, possibly nested within a
5319 padding record e.g. for alignment purposes. */
5321 if (TYPE_IS_PADDING_P (rep_type))
5322 rep_type = TREE_TYPE (TYPE_FIELDS (rep_type));
5324 if (TREE_CODE (rep_type) != ARRAY_TYPE)
5326 error ("attribute %qs applies to array types only",
5327 IDENTIFIER_POINTER (name));
5328 return NULL_TREE;
5331 /* Silently punt on variable sizes. We can't make vector types for them,
5332 need to ignore them on front-end generated subtypes of unconstrained
5333 bases, and this attribute is for binding implementors, not end-users, so
5334 we should never get there from legitimate explicit uses. */
5336 if (!host_integerp (rep_size, 1))
5337 return NULL_TREE;
5339 /* Get the element type/mode and check this is something we know
5340 how to make vectors of. */
5342 elem_type = TREE_TYPE (rep_type);
5343 elem_mode = TYPE_MODE (elem_type);
5345 if ((!INTEGRAL_TYPE_P (elem_type)
5346 && !SCALAR_FLOAT_TYPE_P (elem_type)
5347 && !FIXED_POINT_TYPE_P (elem_type))
5348 || (!SCALAR_FLOAT_MODE_P (elem_mode)
5349 && GET_MODE_CLASS (elem_mode) != MODE_INT
5350 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
5351 || !host_integerp (TYPE_SIZE_UNIT (elem_type), 1))
5353 error ("invalid element type for attribute %qs",
5354 IDENTIFIER_POINTER (name));
5355 return NULL_TREE;
5358 /* Sanity check the vector size and element type consistency. */
5360 vec_bytes = tree_low_cst (rep_size, 1);
5362 if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
5364 error ("vector size not an integral multiple of component size");
5365 return NULL;
5368 if (vec_bytes == 0)
5370 error ("zero vector size");
5371 return NULL;
5374 vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
5375 if (vec_units & (vec_units - 1))
5377 error ("number of components of the vector not a power of two");
5378 return NULL_TREE;
5381 /* Build the vector type and replace. */
5383 *node = build_vector_type (elem_type, vec_units);
5384 rep_name = TYPE_NAME (rep_type);
5385 if (TREE_CODE (rep_name) == TYPE_DECL)
5386 rep_name = DECL_NAME (rep_name);
5387 TYPE_NAME (*node) = rep_name;
5388 TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
5390 return NULL_TREE;
5393 /* ----------------------------------------------------------------------- *
5394 * BUILTIN FUNCTIONS *
5395 * ----------------------------------------------------------------------- */
5397 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5398 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5399 if nonansi_p and flag_no_nonansi_builtin. */
5401 static void
5402 def_builtin_1 (enum built_in_function fncode,
5403 const char *name,
5404 enum built_in_class fnclass,
5405 tree fntype, tree libtype,
5406 bool both_p, bool fallback_p,
5407 bool nonansi_p ATTRIBUTE_UNUSED,
5408 tree fnattrs, bool implicit_p)
5410 tree decl;
5411 const char *libname;
5413 /* Preserve an already installed decl. It most likely was setup in advance
5414 (e.g. as part of the internal builtins) for specific reasons. */
5415 if (built_in_decls[(int) fncode] != NULL_TREE)
5416 return;
5418 gcc_assert ((!both_p && !fallback_p)
5419 || !strncmp (name, "__builtin_",
5420 strlen ("__builtin_")));
5422 libname = name + strlen ("__builtin_");
5423 decl = add_builtin_function (name, fntype, fncode, fnclass,
5424 (fallback_p ? libname : NULL),
5425 fnattrs);
5426 if (both_p)
5427 /* ??? This is normally further controlled by command-line options
5428 like -fno-builtin, but we don't have them for Ada. */
5429 add_builtin_function (libname, libtype, fncode, fnclass,
5430 NULL, fnattrs);
5432 built_in_decls[(int) fncode] = decl;
5433 if (implicit_p)
5434 implicit_built_in_decls[(int) fncode] = decl;
5437 static int flag_isoc94 = 0;
5438 static int flag_isoc99 = 0;
5440 /* Install what the common builtins.def offers. */
5442 static void
5443 install_builtin_functions (void)
5445 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5446 NONANSI_P, ATTRS, IMPLICIT, COND) \
5447 if (NAME && COND) \
5448 def_builtin_1 (ENUM, NAME, CLASS, \
5449 builtin_types[(int) TYPE], \
5450 builtin_types[(int) LIBTYPE], \
5451 BOTH_P, FALLBACK_P, NONANSI_P, \
5452 built_in_attributes[(int) ATTRS], IMPLICIT);
5453 #include "builtins.def"
5454 #undef DEF_BUILTIN
5457 /* ----------------------------------------------------------------------- *
5458 * BUILTIN FUNCTIONS *
5459 * ----------------------------------------------------------------------- */
5461 /* Install the builtin functions we might need. */
5463 void
5464 gnat_install_builtins (void)
5466 install_builtin_elementary_types ();
5467 install_builtin_function_types ();
5468 install_builtin_attributes ();
5470 /* Install builtins used by generic middle-end pieces first. Some of these
5471 know about internal specificities and control attributes accordingly, for
5472 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5473 the generic definition from builtins.def. */
5474 build_common_builtin_nodes ();
5476 /* Now, install the target specific builtins, such as the AltiVec family on
5477 ppc, and the common set as exposed by builtins.def. */
5478 targetm.init_builtins ();
5479 install_builtin_functions ();
5482 #include "gt-ada-utils.h"
5483 #include "gtype-ada.h"