Merged r158465 through r158660 into branch.
[official-gcc.git] / gcc / ada / gcc-interface / utils.c
blob7b403a7bab810ddfcf78352c8c5cbc6117b25449
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2010, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "toplev.h"
33 #include "rtl.h"
34 #include "output.h"
35 #include "ggc.h"
36 #include "debug.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "function.h"
40 #include "langhooks.h"
41 #include "pointer-set.h"
42 #include "cgraph.h"
43 #include "tree-dump.h"
44 #include "tree-inline.h"
45 #include "tree-iterator.h"
46 #include "gimple.h"
48 #include "ada.h"
49 #include "types.h"
50 #include "atree.h"
51 #include "elists.h"
52 #include "namet.h"
53 #include "nlists.h"
54 #include "stringt.h"
55 #include "uintp.h"
56 #include "fe.h"
57 #include "sinfo.h"
58 #include "einfo.h"
59 #include "ada-tree.h"
60 #include "gigi.h"
62 #ifndef MAX_BITS_PER_WORD
63 #define MAX_BITS_PER_WORD BITS_PER_WORD
64 #endif
66 /* If nonzero, pretend we are allocating at global level. */
67 int force_global;
69 /* The default alignment of "double" floating-point types, i.e. floating
70 point types whose size is equal to 64 bits, or 0 if this alignment is
71 not specifically capped. */
72 int double_float_alignment;
74 /* The default alignment of "double" or larger scalar types, i.e. scalar
75 types whose size is greater or equal to 64 bits, or 0 if this alignment
76 is not specifically capped. */
77 int double_scalar_alignment;
79 /* Tree nodes for the various types and decls we create. */
80 tree gnat_std_decls[(int) ADT_LAST];
82 /* Functions to call for each of the possible raise reasons. */
83 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
98 /* Fake handler for attributes we don't properly support, typically because
99 they'd require dragging a lot of the common-c front-end circuitry. */
100 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
102 /* Table of machine-independent internal attributes for Ada. We support
103 this minimal set of attributes to accommodate the needs of builtins. */
104 const struct attribute_spec gnat_internal_attribute_table[] =
106 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
107 { "const", 0, 0, true, false, false, handle_const_attribute },
108 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
109 { "pure", 0, 0, true, false, false, handle_pure_attribute },
110 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
111 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
112 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
113 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
114 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
115 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
117 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute },
118 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute },
119 { "may_alias", 0, 0, false, true, false, NULL },
121 /* ??? format and format_arg are heavy and not supported, which actually
122 prevents support for stdio builtins, which we however declare as part
123 of the common builtins.def contents. */
124 { "format", 3, 3, false, true, true, fake_attribute_handler },
125 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
127 { NULL, 0, 0, false, false, false, NULL }
130 /* Associates a GNAT tree node to a GCC tree node. It is used in
131 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
132 of `save_gnu_tree' for more info. */
133 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
135 #define GET_GNU_TREE(GNAT_ENTITY) \
136 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
138 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
139 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
141 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
142 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
144 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
145 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
147 #define GET_DUMMY_NODE(GNAT_ENTITY) \
148 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
150 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
151 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
153 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
154 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
156 /* This variable keeps a table for types for each precision so that we only
157 allocate each of them once. Signed and unsigned types are kept separate.
159 Note that these types are only used when fold-const requests something
160 special. Perhaps we should NOT share these types; we'll see how it
161 goes later. */
162 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
164 /* Likewise for float types, but record these by mode. */
165 static GTY(()) tree float_types[NUM_MACHINE_MODES];
167 /* For each binding contour we allocate a binding_level structure to indicate
168 the binding depth. */
170 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
171 /* The binding level containing this one (the enclosing binding level). */
172 struct gnat_binding_level *chain;
173 /* The BLOCK node for this level. */
174 tree block;
175 /* If nonzero, the setjmp buffer that needs to be updated for any
176 variable-sized definition within this context. */
177 tree jmpbuf_decl;
180 /* The binding level currently in effect. */
181 static GTY(()) struct gnat_binding_level *current_binding_level;
183 /* A chain of gnat_binding_level structures awaiting reuse. */
184 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
186 /* An array of global declarations. */
187 static GTY(()) VEC(tree,gc) *global_decls;
189 /* An array of builtin function declarations. */
190 static GTY(()) VEC(tree,gc) *builtin_decls;
192 /* An array of global renaming pointers. */
193 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
195 /* A chain of unused BLOCK nodes. */
196 static GTY((deletable)) tree free_block_chain;
198 static tree merge_sizes (tree, tree, tree, bool, bool);
199 static tree compute_related_constant (tree, tree);
200 static tree split_plus (tree, tree *);
201 static tree float_type_for_precision (int, enum machine_mode);
202 static tree convert_to_fat_pointer (tree, tree);
203 static tree convert_to_thin_pointer (tree, tree);
204 static tree make_descriptor_field (const char *,tree, tree, tree);
205 static bool potential_alignment_gap (tree, tree, tree);
206 static void process_attributes (tree, struct attrib *);
208 /* Initialize the association of GNAT nodes to GCC trees. */
210 void
211 init_gnat_to_gnu (void)
213 associate_gnat_to_gnu
214 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
217 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
218 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
219 a ..._DECL node. If NO_CHECK is true, the latter check is suppressed.
221 If GNU_DECL is zero, a previous association is to be reset. */
223 void
224 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
226 /* Check that GNAT_ENTITY is not already defined and that it is being set
227 to something which is a decl. Raise gigi 401 if not. Usually, this
228 means GNAT_ENTITY is defined twice, but occasionally is due to some
229 Gigi problem. */
230 gcc_assert (!(gnu_decl
231 && (PRESENT_GNU_TREE (gnat_entity)
232 || (!no_check && !DECL_P (gnu_decl)))));
234 SET_GNU_TREE (gnat_entity, gnu_decl);
237 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
238 Return the ..._DECL node that was associated with it. If there is no tree
239 node associated with GNAT_ENTITY, abort.
241 In some cases, such as delayed elaboration or expressions that need to
242 be elaborated only once, GNAT_ENTITY is really not an entity. */
244 tree
245 get_gnu_tree (Entity_Id gnat_entity)
247 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
248 return GET_GNU_TREE (gnat_entity);
251 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
253 bool
254 present_gnu_tree (Entity_Id gnat_entity)
256 return PRESENT_GNU_TREE (gnat_entity);
259 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
261 void
262 init_dummy_type (void)
264 dummy_node_table
265 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
268 /* Make a dummy type corresponding to GNAT_TYPE. */
270 tree
271 make_dummy_type (Entity_Id gnat_type)
273 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
274 tree gnu_type;
276 /* If there is an equivalent type, get its underlying type. */
277 if (Present (gnat_underlying))
278 gnat_underlying = Underlying_Type (gnat_underlying);
280 /* If there was no equivalent type (can only happen when just annotating
281 types) or underlying type, go back to the original type. */
282 if (No (gnat_underlying))
283 gnat_underlying = gnat_type;
285 /* If it there already a dummy type, use that one. Else make one. */
286 if (PRESENT_DUMMY_NODE (gnat_underlying))
287 return GET_DUMMY_NODE (gnat_underlying);
289 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
290 an ENUMERAL_TYPE. */
291 gnu_type = make_node (Is_Record_Type (gnat_underlying)
292 ? tree_code_for_record_type (gnat_underlying)
293 : ENUMERAL_TYPE);
294 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
295 TYPE_DUMMY_P (gnu_type) = 1;
296 TYPE_STUB_DECL (gnu_type)
297 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
298 if (Is_By_Reference_Type (gnat_type))
299 TREE_ADDRESSABLE (gnu_type) = 1;
301 SET_DUMMY_NODE (gnat_underlying, gnu_type);
303 return gnu_type;
306 /* Return nonzero if we are currently in the global binding level. */
309 global_bindings_p (void)
311 return ((force_global || !current_function_decl) ? -1 : 0);
314 /* Enter a new binding level. */
316 void
317 gnat_pushlevel (void)
319 struct gnat_binding_level *newlevel = NULL;
321 /* Reuse a struct for this binding level, if there is one. */
322 if (free_binding_level)
324 newlevel = free_binding_level;
325 free_binding_level = free_binding_level->chain;
327 else
328 newlevel
329 = (struct gnat_binding_level *)
330 ggc_alloc (sizeof (struct gnat_binding_level));
332 /* Use a free BLOCK, if any; otherwise, allocate one. */
333 if (free_block_chain)
335 newlevel->block = free_block_chain;
336 free_block_chain = BLOCK_CHAIN (free_block_chain);
337 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
339 else
340 newlevel->block = make_node (BLOCK);
342 /* Point the BLOCK we just made to its parent. */
343 if (current_binding_level)
344 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
346 BLOCK_VARS (newlevel->block) = NULL_TREE;
347 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
348 TREE_USED (newlevel->block) = 1;
350 /* Add this level to the front of the chain (stack) of active levels. */
351 newlevel->chain = current_binding_level;
352 newlevel->jmpbuf_decl = NULL_TREE;
353 current_binding_level = newlevel;
356 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
357 and point FNDECL to this BLOCK. */
359 void
360 set_current_block_context (tree fndecl)
362 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
363 DECL_INITIAL (fndecl) = current_binding_level->block;
364 set_block_for_group (current_binding_level->block);
367 /* Set the jmpbuf_decl for the current binding level to DECL. */
369 void
370 set_block_jmpbuf_decl (tree decl)
372 current_binding_level->jmpbuf_decl = decl;
375 /* Get the jmpbuf_decl, if any, for the current binding level. */
377 tree
378 get_block_jmpbuf_decl (void)
380 return current_binding_level->jmpbuf_decl;
383 /* Exit a binding level. Set any BLOCK into the current code group. */
385 void
386 gnat_poplevel (void)
388 struct gnat_binding_level *level = current_binding_level;
389 tree block = level->block;
391 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
392 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
394 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
395 are no variables free the block and merge its subblocks into those of its
396 parent block. Otherwise, add it to the list of its parent. */
397 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
399 else if (BLOCK_VARS (block) == NULL_TREE)
401 BLOCK_SUBBLOCKS (level->chain->block)
402 = chainon (BLOCK_SUBBLOCKS (block),
403 BLOCK_SUBBLOCKS (level->chain->block));
404 BLOCK_CHAIN (block) = free_block_chain;
405 free_block_chain = block;
407 else
409 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
410 BLOCK_SUBBLOCKS (level->chain->block) = block;
411 TREE_USED (block) = 1;
412 set_block_for_group (block);
415 /* Free this binding structure. */
416 current_binding_level = level->chain;
417 level->chain = free_binding_level;
418 free_binding_level = level;
422 /* Records a ..._DECL node DECL as belonging to the current lexical scope
423 and uses GNAT_NODE for location information and propagating flags. */
425 void
426 gnat_pushdecl (tree decl, Node_Id gnat_node)
428 /* If this decl is public external or at toplevel, there is no context.
429 But PARM_DECLs always go in the level of its function. */
430 if (TREE_CODE (decl) != PARM_DECL
431 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
432 || global_bindings_p ()))
433 DECL_CONTEXT (decl) = 0;
434 else
436 DECL_CONTEXT (decl) = current_function_decl;
438 /* Functions imported in another function are not really nested.
439 For really nested functions mark them initially as needing
440 a static chain for uses of that flag before unnesting;
441 lower_nested_functions will then recompute it. */
442 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
443 DECL_STATIC_CHAIN (decl) = 1;
446 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
448 /* Set the location of DECL and emit a declaration for it. */
449 if (Present (gnat_node))
450 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
451 add_decl_expr (decl, gnat_node);
453 /* Put the declaration on the list. The list of declarations is in reverse
454 order. The list will be reversed later. Put global variables in the
455 globals list and builtin functions in a dedicated list to speed up
456 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
457 the list, as they will cause trouble with the debugger and aren't needed
458 anyway. */
459 if (TREE_CODE (decl) != TYPE_DECL
460 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
462 if (global_bindings_p ())
464 VEC_safe_push (tree, gc, global_decls, decl);
466 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
467 VEC_safe_push (tree, gc, builtin_decls, decl);
469 else
471 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
472 BLOCK_VARS (current_binding_level->block) = decl;
476 /* For the declaration of a type, set its name if it either is not already
477 set or if the previous type name was not derived from a source name.
478 We'd rather have the type named with a real name and all the pointer
479 types to the same object have the same POINTER_TYPE node. Code in the
480 equivalent function of c-decl.c makes a copy of the type node here, but
481 that may cause us trouble with incomplete types. We make an exception
482 for fat pointer types because the compiler automatically builds them
483 for unconstrained array types and the debugger uses them to represent
484 both these and pointers to these. */
485 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
487 tree t = TREE_TYPE (decl);
489 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
491 else if (TYPE_IS_FAT_POINTER_P (t))
493 tree tt = build_variant_type_copy (t);
494 TYPE_NAME (tt) = decl;
495 TREE_USED (tt) = TREE_USED (t);
496 TREE_TYPE (decl) = tt;
497 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
498 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
499 else
500 DECL_ORIGINAL_TYPE (decl) = t;
501 t = NULL_TREE;
502 DECL_ARTIFICIAL (decl) = 0;
504 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
506 else
507 t = NULL_TREE;
509 /* Propagate the name to all the variants. This is needed for
510 the type qualifiers machinery to work properly. */
511 if (t)
512 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
513 TYPE_NAME (t) = decl;
517 /* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
519 void
520 record_builtin_type (const char *name, tree type)
522 tree type_decl = build_decl (input_location,
523 TYPE_DECL, get_identifier (name), type);
525 gnat_pushdecl (type_decl, Empty);
527 if (debug_hooks->type_decl)
528 debug_hooks->type_decl (type_decl, false);
531 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
532 finish constructing the record or union type. If REP_LEVEL is zero, this
533 record has no representation clause and so will be entirely laid out here.
534 If REP_LEVEL is one, this record has a representation clause and has been
535 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
536 this record is derived from a parent record and thus inherits its layout;
537 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
538 we need to write debug information about this type. */
540 void
541 finish_record_type (tree record_type, tree field_list, int rep_level,
542 bool debug_info_p)
544 enum tree_code code = TREE_CODE (record_type);
545 tree name = TYPE_NAME (record_type);
546 tree ada_size = bitsize_zero_node;
547 tree size = bitsize_zero_node;
548 bool had_size = TYPE_SIZE (record_type) != 0;
549 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
550 bool had_align = TYPE_ALIGN (record_type) != 0;
551 tree field;
553 TYPE_FIELDS (record_type) = field_list;
555 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
556 generate debug info and have a parallel type. */
557 if (name && TREE_CODE (name) == TYPE_DECL)
558 name = DECL_NAME (name);
559 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
561 /* Globally initialize the record first. If this is a rep'ed record,
562 that just means some initializations; otherwise, layout the record. */
563 if (rep_level > 0)
565 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
567 if (!had_size_unit)
568 TYPE_SIZE_UNIT (record_type) = size_zero_node;
570 if (!had_size)
571 TYPE_SIZE (record_type) = bitsize_zero_node;
573 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
574 out just like a UNION_TYPE, since the size will be fixed. */
575 else if (code == QUAL_UNION_TYPE)
576 code = UNION_TYPE;
578 else
580 /* Ensure there isn't a size already set. There can be in an error
581 case where there is a rep clause but all fields have errors and
582 no longer have a position. */
583 TYPE_SIZE (record_type) = 0;
584 layout_type (record_type);
587 /* At this point, the position and size of each field is known. It was
588 either set before entry by a rep clause, or by laying out the type above.
590 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
591 to compute the Ada size; the GCC size and alignment (for rep'ed records
592 that are not padding types); and the mode (for rep'ed records). We also
593 clear the DECL_BIT_FIELD indication for the cases we know have not been
594 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
596 if (code == QUAL_UNION_TYPE)
597 field_list = nreverse (field_list);
599 for (field = field_list; field; field = TREE_CHAIN (field))
601 tree type = TREE_TYPE (field);
602 tree pos = bit_position (field);
603 tree this_size = DECL_SIZE (field);
604 tree this_ada_size;
606 if ((TREE_CODE (type) == RECORD_TYPE
607 || TREE_CODE (type) == UNION_TYPE
608 || TREE_CODE (type) == QUAL_UNION_TYPE)
609 && !TYPE_FAT_POINTER_P (type)
610 && !TYPE_CONTAINS_TEMPLATE_P (type)
611 && TYPE_ADA_SIZE (type))
612 this_ada_size = TYPE_ADA_SIZE (type);
613 else
614 this_ada_size = this_size;
616 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
617 if (DECL_BIT_FIELD (field)
618 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
620 unsigned int align = TYPE_ALIGN (type);
622 /* In the general case, type alignment is required. */
623 if (value_factor_p (pos, align))
625 /* The enclosing record type must be sufficiently aligned.
626 Otherwise, if no alignment was specified for it and it
627 has been laid out already, bump its alignment to the
628 desired one if this is compatible with its size. */
629 if (TYPE_ALIGN (record_type) >= align)
631 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
632 DECL_BIT_FIELD (field) = 0;
634 else if (!had_align
635 && rep_level == 0
636 && value_factor_p (TYPE_SIZE (record_type), align))
638 TYPE_ALIGN (record_type) = align;
639 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
640 DECL_BIT_FIELD (field) = 0;
644 /* In the non-strict alignment case, only byte alignment is. */
645 if (!STRICT_ALIGNMENT
646 && DECL_BIT_FIELD (field)
647 && value_factor_p (pos, BITS_PER_UNIT))
648 DECL_BIT_FIELD (field) = 0;
651 /* If we still have DECL_BIT_FIELD set at this point, we know that the
652 field is technically not addressable. Except that it can actually
653 be addressed if it is BLKmode and happens to be properly aligned. */
654 if (DECL_BIT_FIELD (field)
655 && !(DECL_MODE (field) == BLKmode
656 && value_factor_p (pos, BITS_PER_UNIT)))
657 DECL_NONADDRESSABLE_P (field) = 1;
659 /* A type must be as aligned as its most aligned field that is not
660 a bit-field. But this is already enforced by layout_type. */
661 if (rep_level > 0 && !DECL_BIT_FIELD (field))
662 TYPE_ALIGN (record_type)
663 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
665 switch (code)
667 case UNION_TYPE:
668 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
669 size = size_binop (MAX_EXPR, size, this_size);
670 break;
672 case QUAL_UNION_TYPE:
673 ada_size
674 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
675 this_ada_size, ada_size);
676 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
677 this_size, size);
678 break;
680 case RECORD_TYPE:
681 /* Since we know here that all fields are sorted in order of
682 increasing bit position, the size of the record is one
683 higher than the ending bit of the last field processed
684 unless we have a rep clause, since in that case we might
685 have a field outside a QUAL_UNION_TYPE that has a higher ending
686 position. So use a MAX in that case. Also, if this field is a
687 QUAL_UNION_TYPE, we need to take into account the previous size in
688 the case of empty variants. */
689 ada_size
690 = merge_sizes (ada_size, pos, this_ada_size,
691 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
692 size
693 = merge_sizes (size, pos, this_size,
694 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
695 break;
697 default:
698 gcc_unreachable ();
702 if (code == QUAL_UNION_TYPE)
703 nreverse (field_list);
705 if (rep_level < 2)
707 /* If this is a padding record, we never want to make the size smaller
708 than what was specified in it, if any. */
709 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
710 size = TYPE_SIZE (record_type);
712 /* Now set any of the values we've just computed that apply. */
713 if (!TYPE_FAT_POINTER_P (record_type)
714 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
715 SET_TYPE_ADA_SIZE (record_type, ada_size);
717 if (rep_level > 0)
719 tree size_unit = had_size_unit
720 ? TYPE_SIZE_UNIT (record_type)
721 : convert (sizetype,
722 size_binop (CEIL_DIV_EXPR, size,
723 bitsize_unit_node));
724 unsigned int align = TYPE_ALIGN (record_type);
726 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
727 TYPE_SIZE_UNIT (record_type)
728 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
730 compute_record_mode (record_type);
734 if (debug_info_p)
735 rest_of_record_type_compilation (record_type);
738 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
739 associated with it. It need not be invoked directly in most cases since
740 finish_record_type takes care of doing so, but this can be necessary if
741 a parallel type is to be attached to the record type. */
743 void
744 rest_of_record_type_compilation (tree record_type)
746 tree field_list = TYPE_FIELDS (record_type);
747 tree field;
748 enum tree_code code = TREE_CODE (record_type);
749 bool var_size = false;
751 for (field = field_list; field; field = TREE_CHAIN (field))
753 /* We need to make an XVE/XVU record if any field has variable size,
754 whether or not the record does. For example, if we have a union,
755 it may be that all fields, rounded up to the alignment, have the
756 same size, in which case we'll use that size. But the debug
757 output routines (except Dwarf2) won't be able to output the fields,
758 so we need to make the special record. */
759 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
760 /* If a field has a non-constant qualifier, the record will have
761 variable size too. */
762 || (code == QUAL_UNION_TYPE
763 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
765 var_size = true;
766 break;
770 /* If this record is of variable size, rename it so that the
771 debugger knows it is and make a new, parallel, record
772 that tells the debugger how the record is laid out. See
773 exp_dbug.ads. But don't do this for records that are padding
774 since they confuse GDB. */
775 if (var_size && !TYPE_IS_PADDING_P (record_type))
777 tree new_record_type
778 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
779 ? UNION_TYPE : TREE_CODE (record_type));
780 tree orig_name = TYPE_NAME (record_type), new_name;
781 tree last_pos = bitsize_zero_node;
782 tree old_field, prev_old_field = NULL_TREE;
784 if (TREE_CODE (orig_name) == TYPE_DECL)
785 orig_name = DECL_NAME (orig_name);
787 new_name
788 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
789 ? "XVU" : "XVE");
790 TYPE_NAME (new_record_type) = new_name;
791 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
792 TYPE_STUB_DECL (new_record_type)
793 = create_type_stub_decl (new_name, new_record_type);
794 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
795 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
796 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
797 TYPE_SIZE_UNIT (new_record_type)
798 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
800 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
802 /* Now scan all the fields, replacing each field with a new
803 field corresponding to the new encoding. */
804 for (old_field = TYPE_FIELDS (record_type); old_field;
805 old_field = TREE_CHAIN (old_field))
807 tree field_type = TREE_TYPE (old_field);
808 tree field_name = DECL_NAME (old_field);
809 tree new_field;
810 tree curpos = bit_position (old_field);
811 bool var = false;
812 unsigned int align = 0;
813 tree pos;
815 /* See how the position was modified from the last position.
817 There are two basic cases we support: a value was added
818 to the last position or the last position was rounded to
819 a boundary and they something was added. Check for the
820 first case first. If not, see if there is any evidence
821 of rounding. If so, round the last position and try
822 again.
824 If this is a union, the position can be taken as zero. */
826 /* Some computations depend on the shape of the position expression,
827 so strip conversions to make sure it's exposed. */
828 curpos = remove_conversions (curpos, true);
830 if (TREE_CODE (new_record_type) == UNION_TYPE)
831 pos = bitsize_zero_node, align = 0;
832 else
833 pos = compute_related_constant (curpos, last_pos);
835 if (!pos && TREE_CODE (curpos) == MULT_EXPR
836 && host_integerp (TREE_OPERAND (curpos, 1), 1))
838 tree offset = TREE_OPERAND (curpos, 0);
839 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
841 /* An offset which is a bitwise AND with a negative power of 2
842 means an alignment corresponding to this power of 2. Note
843 that, as sizetype is sign-extended but nonetheless unsigned,
844 we don't directly use tree_int_cst_sgn. */
845 offset = remove_conversions (offset, true);
846 if (TREE_CODE (offset) == BIT_AND_EXPR
847 && host_integerp (TREE_OPERAND (offset, 1), 0)
848 && TREE_INT_CST_HIGH (TREE_OPERAND (offset, 1)) < 0)
850 unsigned int pow
851 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
852 if (exact_log2 (pow) > 0)
853 align *= pow;
856 pos = compute_related_constant (curpos,
857 round_up (last_pos, align));
859 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
860 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
861 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
862 && host_integerp (TREE_OPERAND
863 (TREE_OPERAND (curpos, 0), 1),
866 align
867 = tree_low_cst
868 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
869 pos = compute_related_constant (curpos,
870 round_up (last_pos, align));
872 else if (potential_alignment_gap (prev_old_field, old_field,
873 pos))
875 align = TYPE_ALIGN (field_type);
876 pos = compute_related_constant (curpos,
877 round_up (last_pos, align));
880 /* If we can't compute a position, set it to zero.
882 ??? We really should abort here, but it's too much work
883 to get this correct for all cases. */
885 if (!pos)
886 pos = bitsize_zero_node;
888 /* See if this type is variable-sized and make a pointer type
889 and indicate the indirection if so. Beware that the debug
890 back-end may adjust the position computed above according
891 to the alignment of the field type, i.e. the pointer type
892 in this case, if we don't preventively counter that. */
893 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
895 field_type = build_pointer_type (field_type);
896 if (align != 0 && TYPE_ALIGN (field_type) > align)
898 field_type = copy_node (field_type);
899 TYPE_ALIGN (field_type) = align;
901 var = true;
904 /* Make a new field name, if necessary. */
905 if (var || align != 0)
907 char suffix[16];
909 if (align != 0)
910 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
911 align / BITS_PER_UNIT);
912 else
913 strcpy (suffix, "XVL");
915 field_name = concat_name (field_name, suffix);
918 new_field = create_field_decl (field_name, field_type,
919 new_record_type, 0,
920 DECL_SIZE (old_field), pos, 0);
921 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
922 TYPE_FIELDS (new_record_type) = new_field;
924 /* If old_field is a QUAL_UNION_TYPE, take its size as being
925 zero. The only time it's not the last field of the record
926 is when there are other components at fixed positions after
927 it (meaning there was a rep clause for every field) and we
928 want to be able to encode them. */
929 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
930 (TREE_CODE (TREE_TYPE (old_field))
931 == QUAL_UNION_TYPE)
932 ? bitsize_zero_node
933 : DECL_SIZE (old_field));
934 prev_old_field = old_field;
937 TYPE_FIELDS (new_record_type)
938 = nreverse (TYPE_FIELDS (new_record_type));
940 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
943 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
946 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
948 void
949 add_parallel_type (tree decl, tree parallel_type)
951 tree d = decl;
953 while (DECL_PARALLEL_TYPE (d))
954 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
956 SET_DECL_PARALLEL_TYPE (d, parallel_type);
959 /* Return the parallel type associated to a type, if any. */
961 tree
962 get_parallel_type (tree type)
964 if (TYPE_STUB_DECL (type))
965 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
966 else
967 return NULL_TREE;
970 /* Utility function of above to merge LAST_SIZE, the previous size of a record
971 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
972 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
973 replace a value of zero with the old size. If HAS_REP is true, we take the
974 MAX of the end position of this field with LAST_SIZE. In all other cases,
975 we use FIRST_BIT plus SIZE. Return an expression for the size. */
977 static tree
978 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
979 bool has_rep)
981 tree type = TREE_TYPE (last_size);
982 tree new_size;
984 if (!special || TREE_CODE (size) != COND_EXPR)
986 new_size = size_binop (PLUS_EXPR, first_bit, size);
987 if (has_rep)
988 new_size = size_binop (MAX_EXPR, last_size, new_size);
991 else
992 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
993 integer_zerop (TREE_OPERAND (size, 1))
994 ? last_size : merge_sizes (last_size, first_bit,
995 TREE_OPERAND (size, 1),
996 1, has_rep),
997 integer_zerop (TREE_OPERAND (size, 2))
998 ? last_size : merge_sizes (last_size, first_bit,
999 TREE_OPERAND (size, 2),
1000 1, has_rep));
1002 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1003 when fed through substitute_in_expr) into thinking that a constant
1004 size is not constant. */
1005 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1006 new_size = TREE_OPERAND (new_size, 0);
1008 return new_size;
1011 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1012 related by the addition of a constant. Return that constant if so. */
1014 static tree
1015 compute_related_constant (tree op0, tree op1)
1017 tree op0_var, op1_var;
1018 tree op0_con = split_plus (op0, &op0_var);
1019 tree op1_con = split_plus (op1, &op1_var);
1020 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1022 if (operand_equal_p (op0_var, op1_var, 0))
1023 return result;
1024 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1025 return result;
1026 else
1027 return 0;
1030 /* Utility function of above to split a tree OP which may be a sum, into a
1031 constant part, which is returned, and a variable part, which is stored
1032 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1033 bitsizetype. */
1035 static tree
1036 split_plus (tree in, tree *pvar)
1038 /* Strip NOPS in order to ease the tree traversal and maximize the
1039 potential for constant or plus/minus discovery. We need to be careful
1040 to always return and set *pvar to bitsizetype trees, but it's worth
1041 the effort. */
1042 STRIP_NOPS (in);
1044 *pvar = convert (bitsizetype, in);
1046 if (TREE_CODE (in) == INTEGER_CST)
1048 *pvar = bitsize_zero_node;
1049 return convert (bitsizetype, in);
1051 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1053 tree lhs_var, rhs_var;
1054 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1055 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1057 if (lhs_var == TREE_OPERAND (in, 0)
1058 && rhs_var == TREE_OPERAND (in, 1))
1059 return bitsize_zero_node;
1061 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1062 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1064 else
1065 return bitsize_zero_node;
1068 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1069 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1070 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1071 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1072 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1073 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1074 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1075 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1076 invisible reference. */
1078 tree
1079 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1080 bool return_unconstrained_p, bool return_by_direct_ref_p,
1081 bool return_by_invisi_ref_p)
1083 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1084 the subprogram formal parameters. This list is generated by traversing
1085 the input list of PARM_DECL nodes. */
1086 tree param_type_list = NULL_TREE;
1087 tree t, type;
1089 for (t = param_decl_list; t; t = TREE_CHAIN (t))
1090 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (t), param_type_list);
1092 /* The list of the function parameter types has to be terminated by the void
1093 type to signal to the back-end that we are not dealing with a variable
1094 parameter subprogram, but that it has a fixed number of parameters. */
1095 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1097 /* The list of argument types has been created in reverse so reverse it. */
1098 param_type_list = nreverse (param_type_list);
1100 type = build_function_type (return_type, param_type_list);
1102 /* TYPE may have been shared since GCC hashes types. If it has a different
1103 CICO_LIST, make a copy. Likewise for the various flags. */
1104 if (TYPE_CI_CO_LIST (type) != cico_list
1105 || TYPE_RETURN_UNCONSTRAINED_P (type) != return_unconstrained_p
1106 || TYPE_RETURN_BY_DIRECT_REF_P (type) != return_by_direct_ref_p
1107 || TREE_ADDRESSABLE (type) != return_by_invisi_ref_p)
1109 type = copy_type (type);
1110 TYPE_CI_CO_LIST (type) = cico_list;
1111 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1112 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1113 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1116 return type;
1119 /* Return a copy of TYPE but safe to modify in any way. */
1121 tree
1122 copy_type (tree type)
1124 tree new_type = copy_node (type);
1126 /* Unshare the language-specific data. */
1127 if (TYPE_LANG_SPECIFIC (type))
1129 TYPE_LANG_SPECIFIC (new_type) = NULL;
1130 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1133 /* And the contents of the language-specific slot if needed. */
1134 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1135 && TYPE_RM_VALUES (type))
1137 TYPE_RM_VALUES (new_type) = NULL_TREE;
1138 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
1139 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
1140 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
1143 /* copy_node clears this field instead of copying it, because it is
1144 aliased with TREE_CHAIN. */
1145 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
1147 TYPE_POINTER_TO (new_type) = 0;
1148 TYPE_REFERENCE_TO (new_type) = 0;
1149 TYPE_MAIN_VARIANT (new_type) = new_type;
1150 TYPE_NEXT_VARIANT (new_type) = 0;
1152 return new_type;
1155 /* Return a subtype of sizetype with range MIN to MAX and whose
1156 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1157 of the associated TYPE_DECL. */
1159 tree
1160 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1162 /* First build a type for the desired range. */
1163 tree type = build_index_2_type (min, max);
1165 /* If this type has the TYPE_INDEX_TYPE we want, return it. */
1166 if (TYPE_INDEX_TYPE (type) == index)
1167 return type;
1169 /* Otherwise, if TYPE_INDEX_TYPE is set, make a copy. Note that we have
1170 no way of sharing these types, but that's only a small hole. */
1171 if (TYPE_INDEX_TYPE (type))
1172 type = copy_type (type);
1174 SET_TYPE_INDEX_TYPE (type, index);
1175 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1177 return type;
1180 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
1181 sizetype is used. */
1183 tree
1184 create_range_type (tree type, tree min, tree max)
1186 tree range_type;
1188 if (type == NULL_TREE)
1189 type = sizetype;
1191 /* First build a type with the base range. */
1192 range_type
1193 = build_range_type (type, TYPE_MIN_VALUE (type), TYPE_MAX_VALUE (type));
1195 min = convert (type, min);
1196 max = convert (type, max);
1198 /* If this type has the TYPE_RM_{MIN,MAX}_VALUE we want, return it. */
1199 if (TYPE_RM_MIN_VALUE (range_type)
1200 && TYPE_RM_MAX_VALUE (range_type)
1201 && operand_equal_p (TYPE_RM_MIN_VALUE (range_type), min, 0)
1202 && operand_equal_p (TYPE_RM_MAX_VALUE (range_type), max, 0))
1203 return range_type;
1205 /* Otherwise, if TYPE_RM_{MIN,MAX}_VALUE is set, make a copy. */
1206 if (TYPE_RM_MIN_VALUE (range_type) || TYPE_RM_MAX_VALUE (range_type))
1207 range_type = copy_type (range_type);
1209 /* Then set the actual range. */
1210 SET_TYPE_RM_MIN_VALUE (range_type, min);
1211 SET_TYPE_RM_MAX_VALUE (range_type, max);
1213 return range_type;
1216 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1217 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1218 its data type. */
1220 tree
1221 create_type_stub_decl (tree type_name, tree type)
1223 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1224 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1225 emitted in DWARF. */
1226 tree type_decl = build_decl (input_location,
1227 TYPE_DECL, type_name, type);
1228 DECL_ARTIFICIAL (type_decl) = 1;
1229 return type_decl;
1232 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1233 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1234 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1235 true if we need to write debug information about this type. GNAT_NODE
1236 is used for the position of the decl. */
1238 tree
1239 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1240 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1242 enum tree_code code = TREE_CODE (type);
1243 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1244 tree type_decl;
1246 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1247 gcc_assert (!TYPE_IS_DUMMY_P (type));
1249 /* If the type hasn't been named yet, we're naming it; preserve an existing
1250 TYPE_STUB_DECL that has been attached to it for some purpose. */
1251 if (!named && TYPE_STUB_DECL (type))
1253 type_decl = TYPE_STUB_DECL (type);
1254 DECL_NAME (type_decl) = type_name;
1256 else
1257 type_decl = build_decl (input_location,
1258 TYPE_DECL, type_name, type);
1260 DECL_ARTIFICIAL (type_decl) = artificial_p;
1262 /* Add this decl to the current binding level. */
1263 gnat_pushdecl (type_decl, gnat_node);
1265 process_attributes (type_decl, attr_list);
1267 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1268 This causes the name to be also viewed as a "tag" by the debug
1269 back-end, with the advantage that no DW_TAG_typedef is emitted
1270 for artificial "tagged" types in DWARF. */
1271 if (!named)
1272 TYPE_STUB_DECL (type) = type_decl;
1274 /* Pass the type declaration to the debug back-end unless this is an
1275 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1276 type for which debugging information was not requested, or else an
1277 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1278 handled separately. And do not pass dummy types either. */
1279 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1280 DECL_IGNORED_P (type_decl) = 1;
1281 else if (code != ENUMERAL_TYPE
1282 && (code != RECORD_TYPE || TYPE_FAT_POINTER_P (type))
1283 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1284 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1285 && !(code == RECORD_TYPE
1286 && TYPE_IS_DUMMY_P
1287 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1288 rest_of_type_decl_compilation (type_decl);
1290 return type_decl;
1293 /* Return a VAR_DECL or CONST_DECL node.
1295 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1296 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1297 the GCC tree for an optional initial expression; NULL_TREE if none.
1299 CONST_FLAG is true if this variable is constant, in which case we might
1300 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1302 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1303 definition to be made visible outside of the current compilation unit, for
1304 instance variable definitions in a package specification.
1306 EXTERN_FLAG is true when processing an external variable declaration (as
1307 opposed to a definition: no storage is to be allocated for the variable).
1309 STATIC_FLAG is only relevant when not at top level. In that case
1310 it indicates whether to always allocate storage to the variable.
1312 GNAT_NODE is used for the position of the decl. */
1314 tree
1315 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1316 bool const_flag, bool public_flag, bool extern_flag,
1317 bool static_flag, bool const_decl_allowed_p,
1318 struct attrib *attr_list, Node_Id gnat_node)
1320 bool init_const
1321 = (var_init != 0
1322 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1323 && (global_bindings_p () || static_flag
1324 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1325 : TREE_CONSTANT (var_init)));
1327 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1328 case the initializer may be used in-lieu of the DECL node (as done in
1329 Identifier_to_gnu). This is useful to prevent the need of elaboration
1330 code when an identifier for which such a decl is made is in turn used as
1331 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1332 but extra constraints apply to this choice (see below) and are not
1333 relevant to the distinction we wish to make. */
1334 bool constant_p = const_flag && init_const;
1336 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1337 and may be used for scalars in general but not for aggregates. */
1338 tree var_decl
1339 = build_decl (input_location,
1340 (constant_p && const_decl_allowed_p
1341 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1342 var_name, type);
1344 /* If this is external, throw away any initializations (they will be done
1345 elsewhere) unless this is a constant for which we would like to remain
1346 able to get the initializer. If we are defining a global here, leave a
1347 constant initialization and save any variable elaborations for the
1348 elaboration routine. If we are just annotating types, throw away the
1349 initialization if it isn't a constant. */
1350 if ((extern_flag && !constant_p)
1351 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1352 var_init = NULL_TREE;
1354 /* At the global level, an initializer requiring code to be generated
1355 produces elaboration statements. Check that such statements are allowed,
1356 that is, not violating a No_Elaboration_Code restriction. */
1357 if (global_bindings_p () && var_init != 0 && !init_const)
1358 Check_Elaboration_Code_Allowed (gnat_node);
1360 DECL_INITIAL (var_decl) = var_init;
1361 TREE_READONLY (var_decl) = const_flag;
1362 DECL_EXTERNAL (var_decl) = extern_flag;
1363 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1364 TREE_CONSTANT (var_decl) = constant_p;
1365 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1366 = TYPE_VOLATILE (type);
1368 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1369 try to fiddle with DECL_COMMON. However, on platforms that don't
1370 support global BSS sections, uninitialized global variables would
1371 go in DATA instead, thus increasing the size of the executable. */
1372 if (!flag_no_common
1373 && TREE_CODE (var_decl) == VAR_DECL
1374 && TREE_PUBLIC (var_decl)
1375 && !have_global_bss_p ())
1376 DECL_COMMON (var_decl) = 1;
1378 /* If it's public and not external, always allocate storage for it.
1379 At the global binding level we need to allocate static storage for the
1380 variable if and only if it's not external. If we are not at the top level
1381 we allocate automatic storage unless requested not to. */
1382 TREE_STATIC (var_decl)
1383 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1385 /* For an external constant whose initializer is not absolute, do not emit
1386 debug info. In DWARF this would mean a global relocation in a read-only
1387 section which runs afoul of the PE-COFF runtime relocation mechanism. */
1388 if (extern_flag
1389 && constant_p
1390 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
1391 != null_pointer_node)
1392 DECL_IGNORED_P (var_decl) = 1;
1394 /* Add this decl to the current binding level. */
1395 gnat_pushdecl (var_decl, gnat_node);
1397 if (TREE_SIDE_EFFECTS (var_decl))
1398 TREE_ADDRESSABLE (var_decl) = 1;
1400 if (TREE_CODE (var_decl) == VAR_DECL)
1402 if (asm_name)
1403 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1404 process_attributes (var_decl, attr_list);
1405 if (global_bindings_p ())
1406 rest_of_decl_compilation (var_decl, true, 0);
1408 else
1409 expand_decl (var_decl);
1411 return var_decl;
1414 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1416 static bool
1417 aggregate_type_contains_array_p (tree type)
1419 switch (TREE_CODE (type))
1421 case RECORD_TYPE:
1422 case UNION_TYPE:
1423 case QUAL_UNION_TYPE:
1425 tree field;
1426 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1427 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1428 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1429 return true;
1430 return false;
1433 case ARRAY_TYPE:
1434 return true;
1436 default:
1437 gcc_unreachable ();
1441 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
1442 its type and RECORD_TYPE is the type of the enclosing record. PACKED is
1443 1 if the enclosing record is packed, -1 if it has Component_Alignment of
1444 Storage_Unit. If SIZE is nonzero, it is the specified size of the field.
1445 If POS is nonzero, it is the bit position. If ADDRESSABLE is nonzero, it
1446 means we are allowed to take the address of the field; if it is negative,
1447 we should not make a bitfield, which is used by make_aligning_type. */
1449 tree
1450 create_field_decl (tree field_name, tree field_type, tree record_type,
1451 int packed, tree size, tree pos, int addressable)
1453 tree field_decl = build_decl (input_location,
1454 FIELD_DECL, field_name, field_type);
1456 DECL_CONTEXT (field_decl) = record_type;
1457 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1459 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1460 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1461 Likewise for an aggregate without specified position that contains an
1462 array, because in this case slices of variable length of this array
1463 must be handled by GCC and variable-sized objects need to be aligned
1464 to at least a byte boundary. */
1465 if (packed && (TYPE_MODE (field_type) == BLKmode
1466 || (!pos
1467 && AGGREGATE_TYPE_P (field_type)
1468 && aggregate_type_contains_array_p (field_type))))
1469 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1471 /* If a size is specified, use it. Otherwise, if the record type is packed
1472 compute a size to use, which may differ from the object's natural size.
1473 We always set a size in this case to trigger the checks for bitfield
1474 creation below, which is typically required when no position has been
1475 specified. */
1476 if (size)
1477 size = convert (bitsizetype, size);
1478 else if (packed == 1)
1480 size = rm_size (field_type);
1481 if (TYPE_MODE (field_type) == BLKmode)
1482 size = round_up (size, BITS_PER_UNIT);
1485 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1486 specified for two reasons: first if the size differs from the natural
1487 size. Second, if the alignment is insufficient. There are a number of
1488 ways the latter can be true.
1490 We never make a bitfield if the type of the field has a nonconstant size,
1491 because no such entity requiring bitfield operations should reach here.
1493 We do *preventively* make a bitfield when there might be the need for it
1494 but we don't have all the necessary information to decide, as is the case
1495 of a field with no specified position in a packed record.
1497 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1498 in layout_decl or finish_record_type to clear the bit_field indication if
1499 it is in fact not needed. */
1500 if (addressable >= 0
1501 && size
1502 && TREE_CODE (size) == INTEGER_CST
1503 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1504 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1505 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1506 || packed
1507 || (TYPE_ALIGN (record_type) != 0
1508 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1510 DECL_BIT_FIELD (field_decl) = 1;
1511 DECL_SIZE (field_decl) = size;
1512 if (!packed && !pos)
1514 if (TYPE_ALIGN (record_type) != 0
1515 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1516 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1517 else
1518 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1522 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1524 /* Bump the alignment if need be, either for bitfield/packing purposes or
1525 to satisfy the type requirements if no such consideration applies. When
1526 we get the alignment from the type, indicate if this is from an explicit
1527 user request, which prevents stor-layout from lowering it later on. */
1529 unsigned int bit_align
1530 = (DECL_BIT_FIELD (field_decl) ? 1
1531 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1533 if (bit_align > DECL_ALIGN (field_decl))
1534 DECL_ALIGN (field_decl) = bit_align;
1535 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1537 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1538 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1542 if (pos)
1544 /* We need to pass in the alignment the DECL is known to have.
1545 This is the lowest-order bit set in POS, but no more than
1546 the alignment of the record, if one is specified. Note
1547 that an alignment of 0 is taken as infinite. */
1548 unsigned int known_align;
1550 if (host_integerp (pos, 1))
1551 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1552 else
1553 known_align = BITS_PER_UNIT;
1555 if (TYPE_ALIGN (record_type)
1556 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1557 known_align = TYPE_ALIGN (record_type);
1559 layout_decl (field_decl, known_align);
1560 SET_DECL_OFFSET_ALIGN (field_decl,
1561 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1562 : BITS_PER_UNIT);
1563 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1564 &DECL_FIELD_BIT_OFFSET (field_decl),
1565 DECL_OFFSET_ALIGN (field_decl), pos);
1568 /* In addition to what our caller says, claim the field is addressable if we
1569 know that its type is not suitable.
1571 The field may also be "technically" nonaddressable, meaning that even if
1572 we attempt to take the field's address we will actually get the address
1573 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1574 value we have at this point is not accurate enough, so we don't account
1575 for this here and let finish_record_type decide. */
1576 if (!addressable && !type_for_nonaliased_component_p (field_type))
1577 addressable = 1;
1579 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1581 return field_decl;
1584 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1585 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1586 (either an In parameter or an address of a pass-by-ref parameter). */
1588 tree
1589 create_param_decl (tree param_name, tree param_type, bool readonly)
1591 tree param_decl = build_decl (input_location,
1592 PARM_DECL, param_name, param_type);
1594 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1595 can lead to various ABI violations. */
1596 if (targetm.calls.promote_prototypes (NULL_TREE)
1597 && INTEGRAL_TYPE_P (param_type)
1598 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1600 /* We have to be careful about biased types here. Make a subtype
1601 of integer_type_node with the proper biasing. */
1602 if (TREE_CODE (param_type) == INTEGER_TYPE
1603 && TYPE_BIASED_REPRESENTATION_P (param_type))
1605 tree subtype
1606 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
1607 TREE_TYPE (subtype) = integer_type_node;
1608 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
1609 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
1610 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
1611 param_type = subtype;
1613 else
1614 param_type = integer_type_node;
1617 DECL_ARG_TYPE (param_decl) = param_type;
1618 TREE_READONLY (param_decl) = readonly;
1619 return param_decl;
1622 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1624 static void
1625 process_attributes (tree decl, struct attrib *attr_list)
1627 for (; attr_list; attr_list = attr_list->next)
1628 switch (attr_list->type)
1630 case ATTR_MACHINE_ATTRIBUTE:
1631 input_location = DECL_SOURCE_LOCATION (decl);
1632 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1633 NULL_TREE),
1634 ATTR_FLAG_TYPE_IN_PLACE);
1635 break;
1637 case ATTR_LINK_ALIAS:
1638 if (! DECL_EXTERNAL (decl))
1640 TREE_STATIC (decl) = 1;
1641 assemble_alias (decl, attr_list->name);
1643 break;
1645 case ATTR_WEAK_EXTERNAL:
1646 if (SUPPORTS_WEAK)
1647 declare_weak (decl);
1648 else
1649 post_error ("?weak declarations not supported on this target",
1650 attr_list->error_point);
1651 break;
1653 case ATTR_LINK_SECTION:
1654 if (targetm.have_named_sections)
1656 DECL_SECTION_NAME (decl)
1657 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1658 IDENTIFIER_POINTER (attr_list->name));
1659 DECL_COMMON (decl) = 0;
1661 else
1662 post_error ("?section attributes are not supported for this target",
1663 attr_list->error_point);
1664 break;
1666 case ATTR_LINK_CONSTRUCTOR:
1667 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1668 TREE_USED (decl) = 1;
1669 break;
1671 case ATTR_LINK_DESTRUCTOR:
1672 DECL_STATIC_DESTRUCTOR (decl) = 1;
1673 TREE_USED (decl) = 1;
1674 break;
1676 case ATTR_THREAD_LOCAL_STORAGE:
1677 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1678 DECL_COMMON (decl) = 0;
1679 break;
1683 /* Record DECL as a global renaming pointer. */
1685 void
1686 record_global_renaming_pointer (tree decl)
1688 gcc_assert (DECL_RENAMED_OBJECT (decl));
1689 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1692 /* Invalidate the global renaming pointers. */
1694 void
1695 invalidate_global_renaming_pointers (void)
1697 unsigned int i;
1698 tree iter;
1700 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1701 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1703 VEC_free (tree, gc, global_renaming_pointers);
1706 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1707 a power of 2. */
1709 bool
1710 value_factor_p (tree value, HOST_WIDE_INT factor)
1712 if (host_integerp (value, 1))
1713 return tree_low_cst (value, 1) % factor == 0;
1715 if (TREE_CODE (value) == MULT_EXPR)
1716 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1717 || value_factor_p (TREE_OPERAND (value, 1), factor));
1719 return false;
1722 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1723 unless we can prove these 2 fields are laid out in such a way that no gap
1724 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1725 is the distance in bits between the end of PREV_FIELD and the starting
1726 position of CURR_FIELD. It is ignored if null. */
1728 static bool
1729 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1731 /* If this is the first field of the record, there cannot be any gap */
1732 if (!prev_field)
1733 return false;
1735 /* If the previous field is a union type, then return False: The only
1736 time when such a field is not the last field of the record is when
1737 there are other components at fixed positions after it (meaning there
1738 was a rep clause for every field), in which case we don't want the
1739 alignment constraint to override them. */
1740 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1741 return false;
1743 /* If the distance between the end of prev_field and the beginning of
1744 curr_field is constant, then there is a gap if the value of this
1745 constant is not null. */
1746 if (offset && host_integerp (offset, 1))
1747 return !integer_zerop (offset);
1749 /* If the size and position of the previous field are constant,
1750 then check the sum of this size and position. There will be a gap
1751 iff it is not multiple of the current field alignment. */
1752 if (host_integerp (DECL_SIZE (prev_field), 1)
1753 && host_integerp (bit_position (prev_field), 1))
1754 return ((tree_low_cst (bit_position (prev_field), 1)
1755 + tree_low_cst (DECL_SIZE (prev_field), 1))
1756 % DECL_ALIGN (curr_field) != 0);
1758 /* If both the position and size of the previous field are multiples
1759 of the current field alignment, there cannot be any gap. */
1760 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1761 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1762 return false;
1764 /* Fallback, return that there may be a potential gap */
1765 return true;
1768 /* Returns a LABEL_DECL node for LABEL_NAME. */
1770 tree
1771 create_label_decl (tree label_name)
1773 tree label_decl = build_decl (input_location,
1774 LABEL_DECL, label_name, void_type_node);
1776 DECL_CONTEXT (label_decl) = current_function_decl;
1777 DECL_MODE (label_decl) = VOIDmode;
1778 DECL_SOURCE_LOCATION (label_decl) = input_location;
1780 return label_decl;
1783 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1784 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1785 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1786 PARM_DECL nodes chained through the TREE_CHAIN field).
1788 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1789 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1791 tree
1792 create_subprog_decl (tree subprog_name, tree asm_name,
1793 tree subprog_type, tree param_decl_list, bool inline_flag,
1794 bool public_flag, bool extern_flag,
1795 struct attrib *attr_list, Node_Id gnat_node)
1797 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
1798 subprog_type);
1799 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
1800 TREE_TYPE (subprog_type));
1802 /* If this is a non-inline function nested inside an inlined external
1803 function, we cannot honor both requests without cloning the nested
1804 function in the current unit since it is private to the other unit.
1805 We could inline the nested function as well but it's probably better
1806 to err on the side of too little inlining. */
1807 if (!inline_flag
1808 && current_function_decl
1809 && DECL_DECLARED_INLINE_P (current_function_decl)
1810 && DECL_EXTERNAL (current_function_decl))
1811 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1813 DECL_EXTERNAL (subprog_decl) = extern_flag;
1814 TREE_PUBLIC (subprog_decl) = public_flag;
1815 TREE_STATIC (subprog_decl) = 1;
1816 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1817 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1818 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1819 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1820 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1822 DECL_ARTIFICIAL (result_decl) = 1;
1823 DECL_IGNORED_P (result_decl) = 1;
1824 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
1825 DECL_RESULT (subprog_decl) = result_decl;
1827 if (asm_name)
1829 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1831 /* The expand_main_function circuitry expects "main_identifier_node" to
1832 designate the DECL_NAME of the 'main' entry point, in turn expected
1833 to be declared as the "main" function literally by default. Ada
1834 program entry points are typically declared with a different name
1835 within the binder generated file, exported as 'main' to satisfy the
1836 system expectations. Force main_identifier_node in this case. */
1837 if (asm_name == main_identifier_node)
1838 DECL_NAME (subprog_decl) = main_identifier_node;
1841 /* Add this decl to the current binding level. */
1842 gnat_pushdecl (subprog_decl, gnat_node);
1844 process_attributes (subprog_decl, attr_list);
1846 /* Output the assembler code and/or RTL for the declaration. */
1847 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1849 return subprog_decl;
1852 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1853 body. This routine needs to be invoked before processing the declarations
1854 appearing in the subprogram. */
1856 void
1857 begin_subprog_body (tree subprog_decl)
1859 tree param_decl;
1861 announce_function (subprog_decl);
1863 current_function_decl = subprog_decl;
1865 /* Enter a new binding level and show that all the parameters belong to
1866 this function. */
1867 gnat_pushlevel ();
1869 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1870 param_decl = TREE_CHAIN (param_decl))
1871 DECL_CONTEXT (param_decl) = subprog_decl;
1873 make_decl_rtl (subprog_decl);
1875 /* We handle pending sizes via the elaboration of types, so we don't need to
1876 save them. This causes them to be marked as part of the outer function
1877 and then discarded. */
1878 get_pending_sizes ();
1881 /* Finish the definition of the current subprogram BODY and finalize it. */
1883 void
1884 end_subprog_body (tree body)
1886 tree fndecl = current_function_decl;
1888 /* Mark the BLOCK for this level as being for this function and pop the
1889 level. Since the vars in it are the parameters, clear them. */
1890 BLOCK_VARS (current_binding_level->block) = NULL_TREE;
1891 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
1892 DECL_INITIAL (fndecl) = current_binding_level->block;
1893 gnat_poplevel ();
1895 /* We handle pending sizes via the elaboration of types, so we don't
1896 need to save them. */
1897 get_pending_sizes ();
1899 /* Mark the RESULT_DECL as being in this subprogram. */
1900 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
1902 DECL_SAVED_TREE (fndecl) = body;
1904 current_function_decl = DECL_CONTEXT (fndecl);
1906 /* We cannot track the location of errors past this point. */
1907 error_gnat_node = Empty;
1909 /* If we're only annotating types, don't actually compile this function. */
1910 if (type_annotate_only)
1911 return;
1913 /* Dump functions before gimplification. */
1914 dump_function (TDI_original, fndecl);
1916 /* ??? This special handling of nested functions is probably obsolete. */
1917 if (!DECL_CONTEXT (fndecl))
1918 cgraph_finalize_function (fndecl, false);
1919 else
1920 /* Register this function with cgraph just far enough to get it
1921 added to our parent's nested function list. */
1922 (void) cgraph_node (fndecl);
1925 tree
1926 gnat_builtin_function (tree decl)
1928 gnat_pushdecl (decl, Empty);
1929 return decl;
1932 /* Return an integer type with the number of bits of precision given by
1933 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1934 it is a signed type. */
1936 tree
1937 gnat_type_for_size (unsigned precision, int unsignedp)
1939 tree t;
1940 char type_name[20];
1942 if (precision <= 2 * MAX_BITS_PER_WORD
1943 && signed_and_unsigned_types[precision][unsignedp])
1944 return signed_and_unsigned_types[precision][unsignedp];
1946 if (unsignedp)
1947 t = make_unsigned_type (precision);
1948 else
1949 t = make_signed_type (precision);
1951 if (precision <= 2 * MAX_BITS_PER_WORD)
1952 signed_and_unsigned_types[precision][unsignedp] = t;
1954 if (!TYPE_NAME (t))
1956 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1957 TYPE_NAME (t) = get_identifier (type_name);
1960 return t;
1963 /* Likewise for floating-point types. */
1965 static tree
1966 float_type_for_precision (int precision, enum machine_mode mode)
1968 tree t;
1969 char type_name[20];
1971 if (float_types[(int) mode])
1972 return float_types[(int) mode];
1974 float_types[(int) mode] = t = make_node (REAL_TYPE);
1975 TYPE_PRECISION (t) = precision;
1976 layout_type (t);
1978 gcc_assert (TYPE_MODE (t) == mode);
1979 if (!TYPE_NAME (t))
1981 sprintf (type_name, "FLOAT_%d", precision);
1982 TYPE_NAME (t) = get_identifier (type_name);
1985 return t;
1988 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1989 an unsigned type; otherwise a signed type is returned. */
1991 tree
1992 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
1994 if (mode == BLKmode)
1995 return NULL_TREE;
1997 if (mode == VOIDmode)
1998 return void_type_node;
2000 if (COMPLEX_MODE_P (mode))
2001 return NULL_TREE;
2003 if (SCALAR_FLOAT_MODE_P (mode))
2004 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2006 if (SCALAR_INT_MODE_P (mode))
2007 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2009 if (VECTOR_MODE_P (mode))
2011 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2012 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2013 if (inner_type)
2014 return build_vector_type_for_mode (inner_type, mode);
2017 return NULL_TREE;
2020 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2022 tree
2023 gnat_unsigned_type (tree type_node)
2025 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2027 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2029 type = copy_node (type);
2030 TREE_TYPE (type) = type_node;
2032 else if (TREE_TYPE (type_node)
2033 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2034 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2036 type = copy_node (type);
2037 TREE_TYPE (type) = TREE_TYPE (type_node);
2040 return type;
2043 /* Return the signed version of a TYPE_NODE, a scalar type. */
2045 tree
2046 gnat_signed_type (tree type_node)
2048 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2050 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2052 type = copy_node (type);
2053 TREE_TYPE (type) = type_node;
2055 else if (TREE_TYPE (type_node)
2056 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2057 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2059 type = copy_node (type);
2060 TREE_TYPE (type) = TREE_TYPE (type_node);
2063 return type;
2066 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2067 transparently converted to each other. */
2070 gnat_types_compatible_p (tree t1, tree t2)
2072 enum tree_code code;
2074 /* This is the default criterion. */
2075 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2076 return 1;
2078 /* We only check structural equivalence here. */
2079 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2080 return 0;
2082 /* Vector types are also compatible if they have the same number of subparts
2083 and the same form of (scalar) element type. */
2084 if (code == VECTOR_TYPE
2085 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2086 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2087 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2088 return 1;
2090 /* Array types are also compatible if they are constrained and have
2091 the same component type and the same domain. */
2092 if (code == ARRAY_TYPE
2093 && TREE_TYPE (t1) == TREE_TYPE (t2)
2094 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2095 || (TYPE_DOMAIN (t1)
2096 && TYPE_DOMAIN (t2)
2097 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2098 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2099 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2100 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
2101 return 1;
2103 /* Padding record types are also compatible if they pad the same
2104 type and have the same constant size. */
2105 if (code == RECORD_TYPE
2106 && TYPE_PADDING_P (t1) && TYPE_PADDING_P (t2)
2107 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2108 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2109 return 1;
2111 return 0;
2114 /* EXP is an expression for the size of an object. If this size contains
2115 discriminant references, replace them with the maximum (if MAX_P) or
2116 minimum (if !MAX_P) possible value of the discriminant. */
2118 tree
2119 max_size (tree exp, bool max_p)
2121 enum tree_code code = TREE_CODE (exp);
2122 tree type = TREE_TYPE (exp);
2124 switch (TREE_CODE_CLASS (code))
2126 case tcc_declaration:
2127 case tcc_constant:
2128 return exp;
2130 case tcc_vl_exp:
2131 if (code == CALL_EXPR)
2133 tree t, *argarray;
2134 int n, i;
2136 t = maybe_inline_call_in_expr (exp);
2137 if (t)
2138 return max_size (t, max_p);
2140 n = call_expr_nargs (exp);
2141 gcc_assert (n > 0);
2142 argarray = (tree *) alloca (n * sizeof (tree));
2143 for (i = 0; i < n; i++)
2144 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2145 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2147 break;
2149 case tcc_reference:
2150 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2151 modify. Otherwise, we treat it like a variable. */
2152 if (!CONTAINS_PLACEHOLDER_P (exp))
2153 return exp;
2155 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2156 return
2157 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2159 case tcc_comparison:
2160 return max_p ? size_one_node : size_zero_node;
2162 case tcc_unary:
2163 case tcc_binary:
2164 case tcc_expression:
2165 switch (TREE_CODE_LENGTH (code))
2167 case 1:
2168 if (code == NON_LVALUE_EXPR)
2169 return max_size (TREE_OPERAND (exp, 0), max_p);
2170 else
2171 return
2172 fold_build1 (code, type,
2173 max_size (TREE_OPERAND (exp, 0),
2174 code == NEGATE_EXPR ? !max_p : max_p));
2176 case 2:
2177 if (code == COMPOUND_EXPR)
2178 return max_size (TREE_OPERAND (exp, 1), max_p);
2181 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2182 tree rhs = max_size (TREE_OPERAND (exp, 1),
2183 code == MINUS_EXPR ? !max_p : max_p);
2185 /* Special-case wanting the maximum value of a MIN_EXPR.
2186 In that case, if one side overflows, return the other.
2187 sizetype is signed, but we know sizes are non-negative.
2188 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2189 overflowing and the RHS a variable. */
2190 if (max_p
2191 && code == MIN_EXPR
2192 && TREE_CODE (rhs) == INTEGER_CST
2193 && TREE_OVERFLOW (rhs))
2194 return lhs;
2195 else if (max_p
2196 && code == MIN_EXPR
2197 && TREE_CODE (lhs) == INTEGER_CST
2198 && TREE_OVERFLOW (lhs))
2199 return rhs;
2200 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2201 && TREE_CODE (lhs) == INTEGER_CST
2202 && TREE_OVERFLOW (lhs)
2203 && !TREE_CONSTANT (rhs))
2204 return lhs;
2205 else
2206 return fold_build2 (code, type, lhs, rhs);
2209 case 3:
2210 if (code == SAVE_EXPR)
2211 return exp;
2212 else if (code == COND_EXPR)
2213 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2214 max_size (TREE_OPERAND (exp, 1), max_p),
2215 max_size (TREE_OPERAND (exp, 2), max_p));
2218 /* Other tree classes cannot happen. */
2219 default:
2220 break;
2223 gcc_unreachable ();
2226 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2227 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2228 Return a constructor for the template. */
2230 tree
2231 build_template (tree template_type, tree array_type, tree expr)
2233 tree template_elts = NULL_TREE;
2234 tree bound_list = NULL_TREE;
2235 tree field;
2237 while (TREE_CODE (array_type) == RECORD_TYPE
2238 && (TYPE_PADDING_P (array_type)
2239 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2240 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2242 if (TREE_CODE (array_type) == ARRAY_TYPE
2243 || (TREE_CODE (array_type) == INTEGER_TYPE
2244 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2245 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2247 /* First make the list for a CONSTRUCTOR for the template. Go down the
2248 field list of the template instead of the type chain because this
2249 array might be an Ada array of arrays and we can't tell where the
2250 nested arrays stop being the underlying object. */
2252 for (field = TYPE_FIELDS (template_type); field;
2253 (bound_list
2254 ? (bound_list = TREE_CHAIN (bound_list))
2255 : (array_type = TREE_TYPE (array_type))),
2256 field = TREE_CHAIN (TREE_CHAIN (field)))
2258 tree bounds, min, max;
2260 /* If we have a bound list, get the bounds from there. Likewise
2261 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2262 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2263 This will give us a maximum range. */
2264 if (bound_list)
2265 bounds = TREE_VALUE (bound_list);
2266 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2267 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2268 else if (expr && TREE_CODE (expr) == PARM_DECL
2269 && DECL_BY_COMPONENT_PTR_P (expr))
2270 bounds = TREE_TYPE (field);
2271 else
2272 gcc_unreachable ();
2274 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2275 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2277 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2278 substitute it from OBJECT. */
2279 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2280 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2282 template_elts = tree_cons (TREE_CHAIN (field), max,
2283 tree_cons (field, min, template_elts));
2286 return gnat_build_constructor (template_type, nreverse (template_elts));
2289 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
2290 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2291 type contains in its DECL_INITIAL the expression to use when a constructor
2292 is made for the type. GNAT_ENTITY is an entity used to print out an error
2293 message if the mechanism cannot be applied to an object of that type and
2294 also for the name. */
2296 tree
2297 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2299 tree record_type = make_node (RECORD_TYPE);
2300 tree pointer32_type;
2301 tree field_list = 0;
2302 int klass;
2303 int dtype = 0;
2304 tree inner_type;
2305 int ndim;
2306 int i;
2307 tree *idx_arr;
2308 tree tem;
2310 /* If TYPE is an unconstrained array, use the underlying array type. */
2311 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2312 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2314 /* If this is an array, compute the number of dimensions in the array,
2315 get the index types, and point to the inner type. */
2316 if (TREE_CODE (type) != ARRAY_TYPE)
2317 ndim = 0;
2318 else
2319 for (ndim = 1, inner_type = type;
2320 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2321 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2322 ndim++, inner_type = TREE_TYPE (inner_type))
2325 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2327 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2328 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2329 for (i = ndim - 1, inner_type = type;
2330 i >= 0;
2331 i--, inner_type = TREE_TYPE (inner_type))
2332 idx_arr[i] = TYPE_DOMAIN (inner_type);
2333 else
2334 for (i = 0, inner_type = type;
2335 i < ndim;
2336 i++, inner_type = TREE_TYPE (inner_type))
2337 idx_arr[i] = TYPE_DOMAIN (inner_type);
2339 /* Now get the DTYPE value. */
2340 switch (TREE_CODE (type))
2342 case INTEGER_TYPE:
2343 case ENUMERAL_TYPE:
2344 case BOOLEAN_TYPE:
2345 if (TYPE_VAX_FLOATING_POINT_P (type))
2346 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2348 case 6:
2349 dtype = 10;
2350 break;
2351 case 9:
2352 dtype = 11;
2353 break;
2354 case 15:
2355 dtype = 27;
2356 break;
2358 else
2359 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2361 case 8:
2362 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2363 break;
2364 case 16:
2365 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2366 break;
2367 case 32:
2368 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2369 break;
2370 case 64:
2371 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2372 break;
2373 case 128:
2374 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2375 break;
2377 break;
2379 case REAL_TYPE:
2380 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2381 break;
2383 case COMPLEX_TYPE:
2384 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2385 && TYPE_VAX_FLOATING_POINT_P (type))
2386 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2388 case 6:
2389 dtype = 12;
2390 break;
2391 case 9:
2392 dtype = 13;
2393 break;
2394 case 15:
2395 dtype = 29;
2397 else
2398 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2399 break;
2401 case ARRAY_TYPE:
2402 dtype = 14;
2403 break;
2405 default:
2406 break;
2409 /* Get the CLASS value. */
2410 switch (mech)
2412 case By_Descriptor_A:
2413 case By_Short_Descriptor_A:
2414 klass = 4;
2415 break;
2416 case By_Descriptor_NCA:
2417 case By_Short_Descriptor_NCA:
2418 klass = 10;
2419 break;
2420 case By_Descriptor_SB:
2421 case By_Short_Descriptor_SB:
2422 klass = 15;
2423 break;
2424 case By_Descriptor:
2425 case By_Short_Descriptor:
2426 case By_Descriptor_S:
2427 case By_Short_Descriptor_S:
2428 default:
2429 klass = 1;
2430 break;
2433 /* Make the type for a descriptor for VMS. The first four fields are the
2434 same for all types. */
2435 field_list
2436 = chainon (field_list,
2437 make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1),
2438 record_type,
2439 size_in_bytes
2440 ((mech == By_Descriptor_A
2441 || mech == By_Short_Descriptor_A)
2442 ? inner_type : type)));
2443 field_list
2444 = chainon (field_list,
2445 make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
2446 record_type, size_int (dtype)));
2447 field_list
2448 = chainon (field_list,
2449 make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
2450 record_type, size_int (klass)));
2452 /* Of course this will crash at run-time if the address space is not
2453 within the low 32 bits, but there is nothing else we can do. */
2454 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2456 field_list
2457 = chainon (field_list,
2458 make_descriptor_field ("POINTER", pointer32_type, record_type,
2459 build_unary_op (ADDR_EXPR,
2460 pointer32_type,
2461 build0 (PLACEHOLDER_EXPR,
2462 type))));
2464 switch (mech)
2466 case By_Descriptor:
2467 case By_Short_Descriptor:
2468 case By_Descriptor_S:
2469 case By_Short_Descriptor_S:
2470 break;
2472 case By_Descriptor_SB:
2473 case By_Short_Descriptor_SB:
2474 field_list
2475 = chainon (field_list,
2476 make_descriptor_field
2477 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2478 TREE_CODE (type) == ARRAY_TYPE
2479 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2480 field_list
2481 = chainon (field_list,
2482 make_descriptor_field
2483 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2484 TREE_CODE (type) == ARRAY_TYPE
2485 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2486 break;
2488 case By_Descriptor_A:
2489 case By_Short_Descriptor_A:
2490 case By_Descriptor_NCA:
2491 case By_Short_Descriptor_NCA:
2492 field_list = chainon (field_list,
2493 make_descriptor_field ("SCALE",
2494 gnat_type_for_size (8, 1),
2495 record_type,
2496 size_zero_node));
2498 field_list = chainon (field_list,
2499 make_descriptor_field ("DIGITS",
2500 gnat_type_for_size (8, 1),
2501 record_type,
2502 size_zero_node));
2504 field_list
2505 = chainon (field_list,
2506 make_descriptor_field
2507 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2508 size_int ((mech == By_Descriptor_NCA ||
2509 mech == By_Short_Descriptor_NCA)
2511 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2512 : (TREE_CODE (type) == ARRAY_TYPE
2513 && TYPE_CONVENTION_FORTRAN_P (type)
2514 ? 224 : 192))));
2516 field_list = chainon (field_list,
2517 make_descriptor_field ("DIMCT",
2518 gnat_type_for_size (8, 1),
2519 record_type,
2520 size_int (ndim)));
2522 field_list = chainon (field_list,
2523 make_descriptor_field ("ARSIZE",
2524 gnat_type_for_size (32, 1),
2525 record_type,
2526 size_in_bytes (type)));
2528 /* Now build a pointer to the 0,0,0... element. */
2529 tem = build0 (PLACEHOLDER_EXPR, type);
2530 for (i = 0, inner_type = type; i < ndim;
2531 i++, inner_type = TREE_TYPE (inner_type))
2532 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2533 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2534 NULL_TREE, NULL_TREE);
2536 field_list
2537 = chainon (field_list,
2538 make_descriptor_field
2539 ("A0",
2540 build_pointer_type_for_mode (inner_type, SImode, false),
2541 record_type,
2542 build1 (ADDR_EXPR,
2543 build_pointer_type_for_mode (inner_type, SImode,
2544 false),
2545 tem)));
2547 /* Next come the addressing coefficients. */
2548 tem = size_one_node;
2549 for (i = 0; i < ndim; i++)
2551 char fname[3];
2552 tree idx_length
2553 = size_binop (MULT_EXPR, tem,
2554 size_binop (PLUS_EXPR,
2555 size_binop (MINUS_EXPR,
2556 TYPE_MAX_VALUE (idx_arr[i]),
2557 TYPE_MIN_VALUE (idx_arr[i])),
2558 size_int (1)));
2560 fname[0] = ((mech == By_Descriptor_NCA ||
2561 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2562 fname[1] = '0' + i, fname[2] = 0;
2563 field_list
2564 = chainon (field_list,
2565 make_descriptor_field (fname,
2566 gnat_type_for_size (32, 1),
2567 record_type, idx_length));
2569 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2570 tem = idx_length;
2573 /* Finally here are the bounds. */
2574 for (i = 0; i < ndim; i++)
2576 char fname[3];
2578 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2579 field_list
2580 = chainon (field_list,
2581 make_descriptor_field
2582 (fname, gnat_type_for_size (32, 1), record_type,
2583 TYPE_MIN_VALUE (idx_arr[i])));
2585 fname[0] = 'U';
2586 field_list
2587 = chainon (field_list,
2588 make_descriptor_field
2589 (fname, gnat_type_for_size (32, 1), record_type,
2590 TYPE_MAX_VALUE (idx_arr[i])));
2592 break;
2594 default:
2595 post_error ("unsupported descriptor type for &", gnat_entity);
2598 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2599 finish_record_type (record_type, field_list, 0, false);
2600 return record_type;
2603 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
2604 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2605 type contains in its DECL_INITIAL the expression to use when a constructor
2606 is made for the type. GNAT_ENTITY is an entity used to print out an error
2607 message if the mechanism cannot be applied to an object of that type and
2608 also for the name. */
2610 tree
2611 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2613 tree record64_type = make_node (RECORD_TYPE);
2614 tree pointer64_type;
2615 tree field_list64 = 0;
2616 int klass;
2617 int dtype = 0;
2618 tree inner_type;
2619 int ndim;
2620 int i;
2621 tree *idx_arr;
2622 tree tem;
2624 /* If TYPE is an unconstrained array, use the underlying array type. */
2625 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2626 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2628 /* If this is an array, compute the number of dimensions in the array,
2629 get the index types, and point to the inner type. */
2630 if (TREE_CODE (type) != ARRAY_TYPE)
2631 ndim = 0;
2632 else
2633 for (ndim = 1, inner_type = type;
2634 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2635 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2636 ndim++, inner_type = TREE_TYPE (inner_type))
2639 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2641 if (mech != By_Descriptor_NCA
2642 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2643 for (i = ndim - 1, inner_type = type;
2644 i >= 0;
2645 i--, inner_type = TREE_TYPE (inner_type))
2646 idx_arr[i] = TYPE_DOMAIN (inner_type);
2647 else
2648 for (i = 0, inner_type = type;
2649 i < ndim;
2650 i++, inner_type = TREE_TYPE (inner_type))
2651 idx_arr[i] = TYPE_DOMAIN (inner_type);
2653 /* Now get the DTYPE value. */
2654 switch (TREE_CODE (type))
2656 case INTEGER_TYPE:
2657 case ENUMERAL_TYPE:
2658 case BOOLEAN_TYPE:
2659 if (TYPE_VAX_FLOATING_POINT_P (type))
2660 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2662 case 6:
2663 dtype = 10;
2664 break;
2665 case 9:
2666 dtype = 11;
2667 break;
2668 case 15:
2669 dtype = 27;
2670 break;
2672 else
2673 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2675 case 8:
2676 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2677 break;
2678 case 16:
2679 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2680 break;
2681 case 32:
2682 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2683 break;
2684 case 64:
2685 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2686 break;
2687 case 128:
2688 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2689 break;
2691 break;
2693 case REAL_TYPE:
2694 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2695 break;
2697 case COMPLEX_TYPE:
2698 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2699 && TYPE_VAX_FLOATING_POINT_P (type))
2700 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2702 case 6:
2703 dtype = 12;
2704 break;
2705 case 9:
2706 dtype = 13;
2707 break;
2708 case 15:
2709 dtype = 29;
2711 else
2712 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2713 break;
2715 case ARRAY_TYPE:
2716 dtype = 14;
2717 break;
2719 default:
2720 break;
2723 /* Get the CLASS value. */
2724 switch (mech)
2726 case By_Descriptor_A:
2727 klass = 4;
2728 break;
2729 case By_Descriptor_NCA:
2730 klass = 10;
2731 break;
2732 case By_Descriptor_SB:
2733 klass = 15;
2734 break;
2735 case By_Descriptor:
2736 case By_Descriptor_S:
2737 default:
2738 klass = 1;
2739 break;
2742 /* Make the type for a 64-bit descriptor for VMS. The first six fields
2743 are the same for all types. */
2744 field_list64
2745 = chainon (field_list64,
2746 make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
2747 record64_type, size_int (1)));
2748 field_list64
2749 = chainon (field_list64,
2750 make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
2751 record64_type, size_int (dtype)));
2752 field_list64
2753 = chainon (field_list64,
2754 make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
2755 record64_type, size_int (klass)));
2756 field_list64
2757 = chainon (field_list64,
2758 make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
2759 record64_type, ssize_int (-1)));
2760 field_list64
2761 = chainon (field_list64,
2762 make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
2763 record64_type,
2764 size_in_bytes (mech == By_Descriptor_A
2765 ? inner_type : type)));
2767 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2769 field_list64
2770 = chainon (field_list64,
2771 make_descriptor_field ("POINTER", pointer64_type,
2772 record64_type,
2773 build_unary_op (ADDR_EXPR,
2774 pointer64_type,
2775 build0 (PLACEHOLDER_EXPR,
2776 type))));
2778 switch (mech)
2780 case By_Descriptor:
2781 case By_Descriptor_S:
2782 break;
2784 case By_Descriptor_SB:
2785 field_list64
2786 = chainon (field_list64,
2787 make_descriptor_field
2788 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
2789 TREE_CODE (type) == ARRAY_TYPE
2790 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2791 field_list64
2792 = chainon (field_list64,
2793 make_descriptor_field
2794 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
2795 TREE_CODE (type) == ARRAY_TYPE
2796 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2797 break;
2799 case By_Descriptor_A:
2800 case By_Descriptor_NCA:
2801 field_list64 = chainon (field_list64,
2802 make_descriptor_field ("SCALE",
2803 gnat_type_for_size (8, 1),
2804 record64_type,
2805 size_zero_node));
2807 field_list64 = chainon (field_list64,
2808 make_descriptor_field ("DIGITS",
2809 gnat_type_for_size (8, 1),
2810 record64_type,
2811 size_zero_node));
2813 field_list64
2814 = chainon (field_list64,
2815 make_descriptor_field
2816 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
2817 size_int (mech == By_Descriptor_NCA
2819 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2820 : (TREE_CODE (type) == ARRAY_TYPE
2821 && TYPE_CONVENTION_FORTRAN_P (type)
2822 ? 224 : 192))));
2824 field_list64 = chainon (field_list64,
2825 make_descriptor_field ("DIMCT",
2826 gnat_type_for_size (8, 1),
2827 record64_type,
2828 size_int (ndim)));
2830 field_list64 = chainon (field_list64,
2831 make_descriptor_field ("MBZ",
2832 gnat_type_for_size (32, 1),
2833 record64_type,
2834 size_int (0)));
2835 field_list64 = chainon (field_list64,
2836 make_descriptor_field ("ARSIZE",
2837 gnat_type_for_size (64, 1),
2838 record64_type,
2839 size_in_bytes (type)));
2841 /* Now build a pointer to the 0,0,0... element. */
2842 tem = build0 (PLACEHOLDER_EXPR, type);
2843 for (i = 0, inner_type = type; i < ndim;
2844 i++, inner_type = TREE_TYPE (inner_type))
2845 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2846 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2847 NULL_TREE, NULL_TREE);
2849 field_list64
2850 = chainon (field_list64,
2851 make_descriptor_field
2852 ("A0",
2853 build_pointer_type_for_mode (inner_type, DImode, false),
2854 record64_type,
2855 build1 (ADDR_EXPR,
2856 build_pointer_type_for_mode (inner_type, DImode,
2857 false),
2858 tem)));
2860 /* Next come the addressing coefficients. */
2861 tem = size_one_node;
2862 for (i = 0; i < ndim; i++)
2864 char fname[3];
2865 tree idx_length
2866 = size_binop (MULT_EXPR, tem,
2867 size_binop (PLUS_EXPR,
2868 size_binop (MINUS_EXPR,
2869 TYPE_MAX_VALUE (idx_arr[i]),
2870 TYPE_MIN_VALUE (idx_arr[i])),
2871 size_int (1)));
2873 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2874 fname[1] = '0' + i, fname[2] = 0;
2875 field_list64
2876 = chainon (field_list64,
2877 make_descriptor_field (fname,
2878 gnat_type_for_size (64, 1),
2879 record64_type, idx_length));
2881 if (mech == By_Descriptor_NCA)
2882 tem = idx_length;
2885 /* Finally here are the bounds. */
2886 for (i = 0; i < ndim; i++)
2888 char fname[3];
2890 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2891 field_list64
2892 = chainon (field_list64,
2893 make_descriptor_field
2894 (fname, gnat_type_for_size (64, 1), record64_type,
2895 TYPE_MIN_VALUE (idx_arr[i])));
2897 fname[0] = 'U';
2898 field_list64
2899 = chainon (field_list64,
2900 make_descriptor_field
2901 (fname, gnat_type_for_size (64, 1), record64_type,
2902 TYPE_MAX_VALUE (idx_arr[i])));
2904 break;
2906 default:
2907 post_error ("unsupported descriptor type for &", gnat_entity);
2910 TYPE_NAME (record64_type) = create_concat_name (gnat_entity, "DESC64");
2911 finish_record_type (record64_type, field_list64, 0, false);
2912 return record64_type;
2915 /* Utility routine for above code to make a field. */
2917 static tree
2918 make_descriptor_field (const char *name, tree type,
2919 tree rec_type, tree initial)
2921 tree field
2922 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2924 DECL_INITIAL (field) = initial;
2925 return field;
2928 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
2929 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
2930 which the VMS descriptor is passed. */
2932 static tree
2933 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
2935 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
2936 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
2937 /* The CLASS field is the 3rd field in the descriptor. */
2938 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
2939 /* The POINTER field is the 6th field in the descriptor. */
2940 tree pointer = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (klass)));
2942 /* Retrieve the value of the POINTER field. */
2943 tree gnu_expr64
2944 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
2946 if (POINTER_TYPE_P (gnu_type))
2947 return convert (gnu_type, gnu_expr64);
2949 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
2951 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
2952 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
2953 tree template_type = TREE_TYPE (p_bounds_type);
2954 tree min_field = TYPE_FIELDS (template_type);
2955 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
2956 tree template_tree, template_addr, aflags, dimct, t, u;
2957 /* See the head comment of build_vms_descriptor. */
2958 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
2959 tree lfield, ufield;
2961 /* Convert POINTER to the pointer-to-array type. */
2962 gnu_expr64 = convert (p_array_type, gnu_expr64);
2964 switch (iklass)
2966 case 1: /* Class S */
2967 case 15: /* Class SB */
2968 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
2969 t = TREE_CHAIN (TREE_CHAIN (klass));
2970 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2971 t = tree_cons (min_field,
2972 convert (TREE_TYPE (min_field), integer_one_node),
2973 tree_cons (max_field,
2974 convert (TREE_TYPE (max_field), t),
2975 NULL_TREE));
2976 template_tree = gnat_build_constructor (template_type, t);
2977 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
2979 /* For class S, we are done. */
2980 if (iklass == 1)
2981 break;
2983 /* Test that we really have a SB descriptor, like DEC Ada. */
2984 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
2985 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
2986 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
2987 /* If so, there is already a template in the descriptor and
2988 it is located right after the POINTER field. The fields are
2989 64bits so they must be repacked. */
2990 t = TREE_CHAIN (pointer);
2991 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2992 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
2994 t = TREE_CHAIN (t);
2995 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2996 ufield = convert
2997 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
2999 /* Build the template in the form of a constructor. */
3000 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3001 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3002 ufield, NULL_TREE));
3003 template_tree = gnat_build_constructor (template_type, t);
3005 /* Otherwise use the {1, LENGTH} template we build above. */
3006 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3007 build_unary_op (ADDR_EXPR, p_bounds_type,
3008 template_tree),
3009 template_addr);
3010 break;
3012 case 4: /* Class A */
3013 /* The AFLAGS field is the 3rd field after the pointer in the
3014 descriptor. */
3015 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3016 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3017 /* The DIMCT field is the next field in the descriptor after
3018 aflags. */
3019 t = TREE_CHAIN (t);
3020 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3021 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3022 or FL_COEFF or FL_BOUNDS not set. */
3023 u = build_int_cst (TREE_TYPE (aflags), 192);
3024 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3025 build_binary_op (NE_EXPR, boolean_type_node,
3026 dimct,
3027 convert (TREE_TYPE (dimct),
3028 size_one_node)),
3029 build_binary_op (NE_EXPR, boolean_type_node,
3030 build2 (BIT_AND_EXPR,
3031 TREE_TYPE (aflags),
3032 aflags, u),
3033 u));
3034 /* There is already a template in the descriptor and it is located
3035 in block 3. The fields are 64bits so they must be repacked. */
3036 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3037 (t)))));
3038 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3039 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3041 t = TREE_CHAIN (t);
3042 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3043 ufield = convert
3044 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3046 /* Build the template in the form of a constructor. */
3047 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3048 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3049 ufield, NULL_TREE));
3050 template_tree = gnat_build_constructor (template_type, t);
3051 template_tree = build3 (COND_EXPR, template_type, u,
3052 build_call_raise (CE_Length_Check_Failed, Empty,
3053 N_Raise_Constraint_Error),
3054 template_tree);
3055 template_addr
3056 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3057 break;
3059 case 10: /* Class NCA */
3060 default:
3061 post_error ("unsupported descriptor type for &", gnat_subprog);
3062 template_addr = integer_zero_node;
3063 break;
3066 /* Build the fat pointer in the form of a constructor. */
3067 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3068 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3069 template_addr, NULL_TREE));
3070 return gnat_build_constructor (gnu_type, t);
3073 else
3074 gcc_unreachable ();
3077 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3078 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3079 which the VMS descriptor is passed. */
3081 static tree
3082 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3084 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3085 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3086 /* The CLASS field is the 3rd field in the descriptor. */
3087 tree klass = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3088 /* The POINTER field is the 4th field in the descriptor. */
3089 tree pointer = TREE_CHAIN (klass);
3091 /* Retrieve the value of the POINTER field. */
3092 tree gnu_expr32
3093 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3095 if (POINTER_TYPE_P (gnu_type))
3096 return convert (gnu_type, gnu_expr32);
3098 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3100 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3101 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3102 tree template_type = TREE_TYPE (p_bounds_type);
3103 tree min_field = TYPE_FIELDS (template_type);
3104 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3105 tree template_tree, template_addr, aflags, dimct, t, u;
3106 /* See the head comment of build_vms_descriptor. */
3107 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3109 /* Convert POINTER to the pointer-to-array type. */
3110 gnu_expr32 = convert (p_array_type, gnu_expr32);
3112 switch (iklass)
3114 case 1: /* Class S */
3115 case 15: /* Class SB */
3116 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3117 t = TYPE_FIELDS (desc_type);
3118 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3119 t = tree_cons (min_field,
3120 convert (TREE_TYPE (min_field), integer_one_node),
3121 tree_cons (max_field,
3122 convert (TREE_TYPE (max_field), t),
3123 NULL_TREE));
3124 template_tree = gnat_build_constructor (template_type, t);
3125 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3127 /* For class S, we are done. */
3128 if (iklass == 1)
3129 break;
3131 /* Test that we really have a SB descriptor, like DEC Ada. */
3132 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3133 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3134 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3135 /* If so, there is already a template in the descriptor and
3136 it is located right after the POINTER field. */
3137 t = TREE_CHAIN (pointer);
3138 template_tree
3139 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3140 /* Otherwise use the {1, LENGTH} template we build above. */
3141 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3142 build_unary_op (ADDR_EXPR, p_bounds_type,
3143 template_tree),
3144 template_addr);
3145 break;
3147 case 4: /* Class A */
3148 /* The AFLAGS field is the 7th field in the descriptor. */
3149 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3150 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3151 /* The DIMCT field is the 8th field in the descriptor. */
3152 t = TREE_CHAIN (t);
3153 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3154 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3155 or FL_COEFF or FL_BOUNDS not set. */
3156 u = build_int_cst (TREE_TYPE (aflags), 192);
3157 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3158 build_binary_op (NE_EXPR, boolean_type_node,
3159 dimct,
3160 convert (TREE_TYPE (dimct),
3161 size_one_node)),
3162 build_binary_op (NE_EXPR, boolean_type_node,
3163 build2 (BIT_AND_EXPR,
3164 TREE_TYPE (aflags),
3165 aflags, u),
3166 u));
3167 /* There is already a template in the descriptor and it is
3168 located at the start of block 3 (12th field). */
3169 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3170 template_tree
3171 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3172 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
3173 build_call_raise (CE_Length_Check_Failed, Empty,
3174 N_Raise_Constraint_Error),
3175 template_tree);
3176 template_addr
3177 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3178 break;
3180 case 10: /* Class NCA */
3181 default:
3182 post_error ("unsupported descriptor type for &", gnat_subprog);
3183 template_addr = integer_zero_node;
3184 break;
3187 /* Build the fat pointer in the form of a constructor. */
3188 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
3189 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3190 template_addr, NULL_TREE));
3192 return gnat_build_constructor (gnu_type, t);
3195 else
3196 gcc_unreachable ();
3199 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3200 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3201 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3202 VMS descriptor is passed. */
3204 static tree
3205 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3206 Entity_Id gnat_subprog)
3208 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3209 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3210 tree mbo = TYPE_FIELDS (desc_type);
3211 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3212 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
3213 tree is64bit, gnu_expr32, gnu_expr64;
3215 /* If the field name is not MBO, it must be 32-bit and no alternate.
3216 Otherwise primary must be 64-bit and alternate 32-bit. */
3217 if (strcmp (mbostr, "MBO") != 0)
3218 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3220 /* Build the test for 64-bit descriptor. */
3221 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3222 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3223 is64bit
3224 = build_binary_op (TRUTH_ANDIF_EXPR, boolean_type_node,
3225 build_binary_op (EQ_EXPR, boolean_type_node,
3226 convert (integer_type_node, mbo),
3227 integer_one_node),
3228 build_binary_op (EQ_EXPR, boolean_type_node,
3229 convert (integer_type_node, mbmo),
3230 integer_minus_one_node));
3232 /* Build the 2 possible end results. */
3233 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3234 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3235 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3237 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3240 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3241 and the GNAT node GNAT_SUBPROG. */
3243 void
3244 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3246 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3247 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3248 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3249 tree gnu_body;
3251 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3252 gnu_param_list = NULL_TREE;
3254 begin_subprog_body (gnu_stub_decl);
3255 gnat_pushlevel ();
3257 start_stmt_group ();
3259 /* Loop over the parameters of the stub and translate any of them
3260 passed by descriptor into a by reference one. */
3261 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3262 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3263 gnu_stub_param;
3264 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3265 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3267 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3268 gnu_param
3269 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3270 gnu_stub_param,
3271 DECL_PARM_ALT_TYPE (gnu_stub_param),
3272 gnat_subprog);
3273 else
3274 gnu_param = gnu_stub_param;
3276 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3279 gnu_body = end_stmt_group ();
3281 /* Invoke the internal subprogram. */
3282 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3283 gnu_subprog);
3284 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3285 gnu_subprog_addr,
3286 nreverse (gnu_param_list));
3288 /* Propagate the return value, if any. */
3289 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3290 append_to_statement_list (gnu_subprog_call, &gnu_body);
3291 else
3292 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3293 gnu_subprog_call),
3294 &gnu_body);
3296 gnat_poplevel ();
3298 allocate_struct_function (gnu_stub_decl, false);
3299 end_subprog_body (gnu_body);
3302 /* Build a type to be used to represent an aliased object whose nominal
3303 type is an unconstrained array. This consists of a RECORD_TYPE containing
3304 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3305 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3306 is used to represent an arbitrary unconstrained object. Use NAME
3307 as the name of the record. */
3309 tree
3310 build_unc_object_type (tree template_type, tree object_type, tree name)
3312 tree type = make_node (RECORD_TYPE);
3313 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3314 template_type, type, 0, 0, 0, 1);
3315 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3316 type, 0, 0, 0, 1);
3318 TYPE_NAME (type) = name;
3319 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3320 finish_record_type (type,
3321 chainon (chainon (NULL_TREE, template_field),
3322 array_field),
3323 0, true);
3325 return type;
3328 /* Same, taking a thin or fat pointer type instead of a template type. */
3330 tree
3331 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3332 tree name)
3334 tree template_type;
3336 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3338 template_type
3339 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3340 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3341 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3342 return build_unc_object_type (template_type, object_type, name);
3345 /* Shift the component offsets within an unconstrained object TYPE to make it
3346 suitable for use as a designated type for thin pointers. */
3348 void
3349 shift_unc_components_for_thin_pointers (tree type)
3351 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3352 allocated past the BOUNDS template. The designated type is adjusted to
3353 have ARRAY at position zero and the template at a negative offset, so
3354 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3356 tree bounds_field = TYPE_FIELDS (type);
3357 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3359 DECL_FIELD_OFFSET (bounds_field)
3360 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3362 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3363 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3366 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3367 In the normal case this is just two adjustments, but we have more to
3368 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3370 void
3371 update_pointer_to (tree old_type, tree new_type)
3373 tree ptr = TYPE_POINTER_TO (old_type);
3374 tree ref = TYPE_REFERENCE_TO (old_type);
3375 tree ptr1, ref1;
3376 tree type;
3378 /* If this is the main variant, process all the other variants first. */
3379 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3380 for (type = TYPE_NEXT_VARIANT (old_type); type;
3381 type = TYPE_NEXT_VARIANT (type))
3382 update_pointer_to (type, new_type);
3384 /* If no pointers and no references, we are done. */
3385 if (!ptr && !ref)
3386 return;
3388 /* Merge the old type qualifiers in the new type.
3390 Each old variant has qualifiers for specific reasons, and the new
3391 designated type as well. Each set of qualifiers represents useful
3392 information grabbed at some point, and merging the two simply unifies
3393 these inputs into the final type description.
3395 Consider for instance a volatile type frozen after an access to constant
3396 type designating it; after the designated type's freeze, we get here with
3397 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3398 when the access type was processed. We will make a volatile and readonly
3399 designated type, because that's what it really is.
3401 We might also get here for a non-dummy OLD_TYPE variant with different
3402 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3403 to private record type elaboration (see the comments around the call to
3404 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3405 the qualifiers in those cases too, to avoid accidentally discarding the
3406 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3407 new_type
3408 = build_qualified_type (new_type,
3409 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3411 /* If old type and new type are identical, there is nothing to do. */
3412 if (old_type == new_type)
3413 return;
3415 /* Otherwise, first handle the simple case. */
3416 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3418 TYPE_POINTER_TO (new_type) = ptr;
3419 TYPE_REFERENCE_TO (new_type) = ref;
3421 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3422 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3423 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3424 TREE_TYPE (ptr1) = new_type;
3426 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3427 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3428 ref1 = TYPE_NEXT_VARIANT (ref1))
3429 TREE_TYPE (ref1) = new_type;
3432 /* Now deal with the unconstrained array case. In this case the "pointer"
3433 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3434 Turn them into pointers to the correct types using update_pointer_to. */
3435 else if (!TYPE_IS_FAT_POINTER_P (ptr))
3436 gcc_unreachable ();
3438 else
3440 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3441 tree array_field = TYPE_FIELDS (ptr);
3442 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3443 tree new_ptr = TYPE_POINTER_TO (new_type);
3444 tree new_ref;
3445 tree var;
3447 /* Make pointers to the dummy template point to the real template. */
3448 update_pointer_to
3449 (TREE_TYPE (TREE_TYPE (bounds_field)),
3450 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3452 /* The references to the template bounds present in the array type
3453 are made through a PLACEHOLDER_EXPR of type NEW_PTR. Since we
3454 are updating PTR to make it a full replacement for NEW_PTR as
3455 pointer to NEW_TYPE, we must rework the PLACEHOLDER_EXPR so as
3456 to make it of type PTR. */
3457 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3458 build0 (PLACEHOLDER_EXPR, ptr),
3459 bounds_field, NULL_TREE);
3461 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
3462 to the dummy array point to it. */
3463 update_pointer_to
3464 (TREE_TYPE (TREE_TYPE (array_field)),
3465 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3466 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3468 /* Make PTR the pointer to NEW_TYPE. */
3469 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3470 = TREE_TYPE (new_type) = ptr;
3472 /* And show the original pointer NEW_PTR to the debugger. This is the
3473 counterpart of the equivalent processing in gnat_pushdecl when the
3474 unconstrained array type is frozen after access types to it. Note
3475 that update_pointer_to can be invoked multiple times on the same
3476 couple of types because of the type variants. */
3477 if (TYPE_NAME (ptr)
3478 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
3479 && !DECL_ORIGINAL_TYPE (TYPE_NAME (ptr)))
3481 DECL_ORIGINAL_TYPE (TYPE_NAME (ptr)) = new_ptr;
3482 DECL_ARTIFICIAL (TYPE_NAME (ptr)) = 0;
3484 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3485 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3487 /* Now handle updating the allocation record, what the thin pointer
3488 points to. Update all pointers from the old record into the new
3489 one, update the type of the array field, and recompute the size. */
3490 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3492 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3493 = TREE_TYPE (TREE_TYPE (array_field));
3495 /* The size recomputation needs to account for alignment constraints, so
3496 we let layout_type work it out. This will reset the field offsets to
3497 what they would be in a regular record, so we shift them back to what
3498 we want them to be for a thin pointer designated type afterwards. */
3499 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3500 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3501 TYPE_SIZE (new_obj_rec) = 0;
3502 layout_type (new_obj_rec);
3504 shift_unc_components_for_thin_pointers (new_obj_rec);
3506 /* We are done, at last. */
3507 rest_of_record_type_compilation (ptr);
3511 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3512 unconstrained one. This involves making or finding a template. */
3514 static tree
3515 convert_to_fat_pointer (tree type, tree expr)
3517 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3518 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3519 tree etype = TREE_TYPE (expr);
3520 tree template_tree;
3522 /* If EXPR is null, make a fat pointer that contains null pointers to the
3523 template and array. */
3524 if (integer_zerop (expr))
3525 return
3526 gnat_build_constructor
3527 (type,
3528 tree_cons (TYPE_FIELDS (type),
3529 convert (p_array_type, expr),
3530 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3531 convert (build_pointer_type (template_type),
3532 expr),
3533 NULL_TREE)));
3535 /* If EXPR is a thin pointer, make template and data from the record.. */
3536 else if (TYPE_IS_THIN_POINTER_P (etype))
3538 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3540 expr = gnat_protect_expr (expr);
3541 if (TREE_CODE (expr) == ADDR_EXPR)
3542 expr = TREE_OPERAND (expr, 0);
3543 else
3544 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3546 template_tree = build_component_ref (expr, NULL_TREE, fields, false);
3547 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3548 build_component_ref (expr, NULL_TREE,
3549 TREE_CHAIN (fields), false));
3552 /* Otherwise, build the constructor for the template. */
3553 else
3554 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
3556 /* The final result is a constructor for the fat pointer.
3558 If EXPR is an argument of a foreign convention subprogram, the type it
3559 points to is directly the component type. In this case, the expression
3560 type may not match the corresponding FIELD_DECL type at this point, so we
3561 call "convert" here to fix that up if necessary. This type consistency is
3562 required, for instance because it ensures that possible later folding of
3563 COMPONENT_REFs against this constructor always yields something of the
3564 same type as the initial reference.
3566 Note that the call to "build_template" above is still fine because it
3567 will only refer to the provided TEMPLATE_TYPE in this case. */
3568 return
3569 gnat_build_constructor
3570 (type,
3571 tree_cons (TYPE_FIELDS (type),
3572 convert (p_array_type, expr),
3573 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3574 build_unary_op (ADDR_EXPR, NULL_TREE,
3575 template_tree),
3576 NULL_TREE)));
3579 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3580 is something that is a fat pointer, so convert to it first if it EXPR
3581 is not already a fat pointer. */
3583 static tree
3584 convert_to_thin_pointer (tree type, tree expr)
3586 if (!TYPE_IS_FAT_POINTER_P (TREE_TYPE (expr)))
3587 expr
3588 = convert_to_fat_pointer
3589 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3591 /* We get the pointer to the data and use a NOP_EXPR to make it the
3592 proper GCC type. */
3593 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3594 false);
3595 expr = build1 (NOP_EXPR, type, expr);
3597 return expr;
3600 /* Create an expression whose value is that of EXPR,
3601 converted to type TYPE. The TREE_TYPE of the value
3602 is always TYPE. This function implements all reasonable
3603 conversions; callers should filter out those that are
3604 not permitted by the language being compiled. */
3606 tree
3607 convert (tree type, tree expr)
3609 tree etype = TREE_TYPE (expr);
3610 enum tree_code ecode = TREE_CODE (etype);
3611 enum tree_code code = TREE_CODE (type);
3613 /* If the expression is already of the right type, we are done. */
3614 if (etype == type)
3615 return expr;
3617 /* If both input and output have padding and are of variable size, do this
3618 as an unchecked conversion. Likewise if one is a mere variant of the
3619 other, so we avoid a pointless unpad/repad sequence. */
3620 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3621 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
3622 && (!TREE_CONSTANT (TYPE_SIZE (type))
3623 || !TREE_CONSTANT (TYPE_SIZE (etype))
3624 || gnat_types_compatible_p (type, etype)
3625 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3626 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3629 /* If the output type has padding, convert to the inner type and make a
3630 constructor to build the record, unless a variable size is involved. */
3631 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
3633 /* If we previously converted from another type and our type is
3634 of variable size, remove the conversion to avoid the need for
3635 variable-sized temporaries. Likewise for a conversion between
3636 original and packable version. */
3637 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3638 && (!TREE_CONSTANT (TYPE_SIZE (type))
3639 || (ecode == RECORD_TYPE
3640 && TYPE_NAME (etype)
3641 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3642 expr = TREE_OPERAND (expr, 0);
3644 /* If we are just removing the padding from expr, convert the original
3645 object if we have variable size in order to avoid the need for some
3646 variable-sized temporaries. Likewise if the padding is a variant
3647 of the other, so we avoid a pointless unpad/repad sequence. */
3648 if (TREE_CODE (expr) == COMPONENT_REF
3649 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3650 && (!TREE_CONSTANT (TYPE_SIZE (type))
3651 || gnat_types_compatible_p (type,
3652 TREE_TYPE (TREE_OPERAND (expr, 0)))
3653 || (ecode == RECORD_TYPE
3654 && TYPE_NAME (etype)
3655 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3656 return convert (type, TREE_OPERAND (expr, 0));
3658 /* If the inner type is of self-referential size and the expression type
3659 is a record, do this as an unchecked conversion. But first pad the
3660 expression if possible to have the same size on both sides. */
3661 if (ecode == RECORD_TYPE
3662 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3664 if (TREE_CONSTANT (TYPE_SIZE (etype)))
3665 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
3666 false, false, false, true), expr);
3667 return unchecked_convert (type, expr, false);
3670 /* If we are converting between array types with variable size, do the
3671 final conversion as an unchecked conversion, again to avoid the need
3672 for some variable-sized temporaries. If valid, this conversion is
3673 very likely purely technical and without real effects. */
3674 if (ecode == ARRAY_TYPE
3675 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
3676 && !TREE_CONSTANT (TYPE_SIZE (etype))
3677 && !TREE_CONSTANT (TYPE_SIZE (type)))
3678 return unchecked_convert (type,
3679 convert (TREE_TYPE (TYPE_FIELDS (type)),
3680 expr),
3681 false);
3683 return
3684 gnat_build_constructor (type,
3685 tree_cons (TYPE_FIELDS (type),
3686 convert (TREE_TYPE
3687 (TYPE_FIELDS (type)),
3688 expr),
3689 NULL_TREE));
3692 /* If the input type has padding, remove it and convert to the output type.
3693 The conditions ordering is arranged to ensure that the output type is not
3694 a padding type here, as it is not clear whether the conversion would
3695 always be correct if this was to happen. */
3696 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
3698 tree unpadded;
3700 /* If we have just converted to this padded type, just get the
3701 inner expression. */
3702 if (TREE_CODE (expr) == CONSTRUCTOR
3703 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3704 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3705 == TYPE_FIELDS (etype))
3706 unpadded
3707 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3709 /* Otherwise, build an explicit component reference. */
3710 else
3711 unpadded
3712 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3714 return convert (type, unpadded);
3717 /* If the input is a biased type, adjust first. */
3718 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3719 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3720 fold_convert (TREE_TYPE (etype),
3721 expr),
3722 TYPE_MIN_VALUE (etype)));
3724 /* If the input is a justified modular type, we need to extract the actual
3725 object before converting it to any other type with the exceptions of an
3726 unconstrained array or of a mere type variant. It is useful to avoid the
3727 extraction and conversion in the type variant case because it could end
3728 up replacing a VAR_DECL expr by a constructor and we might be about the
3729 take the address of the result. */
3730 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3731 && code != UNCONSTRAINED_ARRAY_TYPE
3732 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3733 return convert (type, build_component_ref (expr, NULL_TREE,
3734 TYPE_FIELDS (etype), false));
3736 /* If converting to a type that contains a template, convert to the data
3737 type and then build the template. */
3738 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3740 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3742 /* If the source already has a template, get a reference to the
3743 associated array only, as we are going to rebuild a template
3744 for the target type anyway. */
3745 expr = maybe_unconstrained_array (expr);
3747 return
3748 gnat_build_constructor
3749 (type,
3750 tree_cons (TYPE_FIELDS (type),
3751 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3752 obj_type, NULL_TREE),
3753 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3754 convert (obj_type, expr), NULL_TREE)));
3757 /* There are some special cases of expressions that we process
3758 specially. */
3759 switch (TREE_CODE (expr))
3761 case ERROR_MARK:
3762 return expr;
3764 case NULL_EXPR:
3765 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3766 conversion in gnat_expand_expr. NULL_EXPR does not represent
3767 and actual value, so no conversion is needed. */
3768 expr = copy_node (expr);
3769 TREE_TYPE (expr) = type;
3770 return expr;
3772 case STRING_CST:
3773 /* If we are converting a STRING_CST to another constrained array type,
3774 just make a new one in the proper type. */
3775 if (code == ecode && AGGREGATE_TYPE_P (etype)
3776 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3777 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3779 expr = copy_node (expr);
3780 TREE_TYPE (expr) = type;
3781 return expr;
3783 break;
3785 case VECTOR_CST:
3786 /* If we are converting a VECTOR_CST to a mere variant type, just make
3787 a new one in the proper type. */
3788 if (code == ecode && gnat_types_compatible_p (type, etype))
3790 expr = copy_node (expr);
3791 TREE_TYPE (expr) = type;
3792 return expr;
3795 case CONSTRUCTOR:
3796 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3797 a new one in the proper type. */
3798 if (code == ecode && gnat_types_compatible_p (type, etype))
3800 expr = copy_node (expr);
3801 TREE_TYPE (expr) = type;
3802 return expr;
3805 /* Likewise for a conversion between original and packable version, or
3806 conversion between types of the same size and with the same list of
3807 fields, but we have to work harder to preserve type consistency. */
3808 if (code == ecode
3809 && code == RECORD_TYPE
3810 && (TYPE_NAME (type) == TYPE_NAME (etype)
3811 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
3814 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3815 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3816 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
3817 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
3818 unsigned HOST_WIDE_INT idx;
3819 tree index, value;
3821 /* Whether we need to clear TREE_CONSTANT et al. on the output
3822 constructor when we convert in place. */
3823 bool clear_constant = false;
3825 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
3827 constructor_elt *elt;
3828 /* We expect only simple constructors. */
3829 if (!SAME_FIELD_P (index, efield))
3830 break;
3831 /* The field must be the same. */
3832 if (!SAME_FIELD_P (efield, field))
3833 break;
3834 elt = VEC_quick_push (constructor_elt, v, NULL);
3835 elt->index = field;
3836 elt->value = convert (TREE_TYPE (field), value);
3838 /* If packing has made this field a bitfield and the input
3839 value couldn't be emitted statically any more, we need to
3840 clear TREE_CONSTANT on our output. */
3841 if (!clear_constant
3842 && TREE_CONSTANT (expr)
3843 && !CONSTRUCTOR_BITFIELD_P (efield)
3844 && CONSTRUCTOR_BITFIELD_P (field)
3845 && !initializer_constant_valid_for_bitfield_p (value))
3846 clear_constant = true;
3848 efield = TREE_CHAIN (efield);
3849 field = TREE_CHAIN (field);
3852 /* If we have been able to match and convert all the input fields
3853 to their output type, convert in place now. We'll fallback to a
3854 view conversion downstream otherwise. */
3855 if (idx == len)
3857 expr = copy_node (expr);
3858 TREE_TYPE (expr) = type;
3859 CONSTRUCTOR_ELTS (expr) = v;
3860 if (clear_constant)
3861 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
3862 return expr;
3866 /* Likewise for a conversion between array type and vector type with a
3867 compatible representative array. */
3868 else if (code == VECTOR_TYPE
3869 && ecode == ARRAY_TYPE
3870 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
3871 etype))
3873 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3874 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3875 VEC(constructor_elt,gc) *v;
3876 unsigned HOST_WIDE_INT ix;
3877 tree value;
3879 /* Build a VECTOR_CST from a *constant* array constructor. */
3880 if (TREE_CONSTANT (expr))
3882 bool constant_p = true;
3884 /* Iterate through elements and check if all constructor
3885 elements are *_CSTs. */
3886 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
3887 if (!CONSTANT_CLASS_P (value))
3889 constant_p = false;
3890 break;
3893 if (constant_p)
3894 return build_vector_from_ctor (type,
3895 CONSTRUCTOR_ELTS (expr));
3898 /* Otherwise, build a regular vector constructor. */
3899 v = VEC_alloc (constructor_elt, gc, len);
3900 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
3902 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
3903 elt->index = NULL_TREE;
3904 elt->value = value;
3906 expr = copy_node (expr);
3907 TREE_TYPE (expr) = type;
3908 CONSTRUCTOR_ELTS (expr) = v;
3909 return expr;
3911 break;
3913 case UNCONSTRAINED_ARRAY_REF:
3914 /* Convert this to the type of the inner array by getting the address of
3915 the array from the template. */
3916 expr = TREE_OPERAND (expr, 0);
3917 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3918 build_component_ref (expr, NULL_TREE,
3919 TYPE_FIELDS
3920 (TREE_TYPE (expr)),
3921 false));
3922 etype = TREE_TYPE (expr);
3923 ecode = TREE_CODE (etype);
3924 break;
3926 case VIEW_CONVERT_EXPR:
3928 /* GCC 4.x is very sensitive to type consistency overall, and view
3929 conversions thus are very frequent. Even though just "convert"ing
3930 the inner operand to the output type is fine in most cases, it
3931 might expose unexpected input/output type mismatches in special
3932 circumstances so we avoid such recursive calls when we can. */
3933 tree op0 = TREE_OPERAND (expr, 0);
3935 /* If we are converting back to the original type, we can just
3936 lift the input conversion. This is a common occurrence with
3937 switches back-and-forth amongst type variants. */
3938 if (type == TREE_TYPE (op0))
3939 return op0;
3941 /* Otherwise, if we're converting between two aggregate or vector
3942 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
3943 target type in place or to just convert the inner expression. */
3944 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3945 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
3947 /* If we are converting between mere variants, we can just
3948 substitute the VIEW_CONVERT_EXPR in place. */
3949 if (gnat_types_compatible_p (type, etype))
3950 return build1 (VIEW_CONVERT_EXPR, type, op0);
3952 /* Otherwise, we may just bypass the input view conversion unless
3953 one of the types is a fat pointer, which is handled by
3954 specialized code below which relies on exact type matching. */
3955 else if (!TYPE_IS_FAT_POINTER_P (type)
3956 && !TYPE_IS_FAT_POINTER_P (etype))
3957 return convert (type, op0);
3960 break;
3962 default:
3963 break;
3966 /* Check for converting to a pointer to an unconstrained array. */
3967 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
3968 return convert_to_fat_pointer (type, expr);
3970 /* If we are converting between two aggregate or vector types that are mere
3971 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
3972 to a vector type from its representative array type. */
3973 else if ((code == ecode
3974 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
3975 && gnat_types_compatible_p (type, etype))
3976 || (code == VECTOR_TYPE
3977 && ecode == ARRAY_TYPE
3978 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
3979 etype)))
3980 return build1 (VIEW_CONVERT_EXPR, type, expr);
3982 /* If we are converting between tagged types, try to upcast properly. */
3983 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
3984 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
3986 tree child_etype = etype;
3987 do {
3988 tree field = TYPE_FIELDS (child_etype);
3989 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
3990 return build_component_ref (expr, NULL_TREE, field, false);
3991 child_etype = TREE_TYPE (field);
3992 } while (TREE_CODE (child_etype) == RECORD_TYPE);
3995 /* In all other cases of related types, make a NOP_EXPR. */
3996 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
3997 return fold_convert (type, expr);
3999 switch (code)
4001 case VOID_TYPE:
4002 return fold_build1 (CONVERT_EXPR, type, expr);
4004 case INTEGER_TYPE:
4005 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4006 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4007 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4008 return unchecked_convert (type, expr, false);
4009 else if (TYPE_BIASED_REPRESENTATION_P (type))
4010 return fold_convert (type,
4011 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4012 convert (TREE_TYPE (type), expr),
4013 TYPE_MIN_VALUE (type)));
4015 /* ... fall through ... */
4017 case ENUMERAL_TYPE:
4018 case BOOLEAN_TYPE:
4019 /* If we are converting an additive expression to an integer type
4020 with lower precision, be wary of the optimization that can be
4021 applied by convert_to_integer. There are 2 problematic cases:
4022 - if the first operand was originally of a biased type,
4023 because we could be recursively called to convert it
4024 to an intermediate type and thus rematerialize the
4025 additive operator endlessly,
4026 - if the expression contains a placeholder, because an
4027 intermediate conversion that changes the sign could
4028 be inserted and thus introduce an artificial overflow
4029 at compile time when the placeholder is substituted. */
4030 if (code == INTEGER_TYPE
4031 && ecode == INTEGER_TYPE
4032 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4033 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4035 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4037 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4038 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4039 || CONTAINS_PLACEHOLDER_P (expr))
4040 return build1 (NOP_EXPR, type, expr);
4043 return fold (convert_to_integer (type, expr));
4045 case POINTER_TYPE:
4046 case REFERENCE_TYPE:
4047 /* If converting between two pointers to records denoting
4048 both a template and type, adjust if needed to account
4049 for any differing offsets, since one might be negative. */
4050 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4052 tree bit_diff
4053 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4054 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4055 tree byte_diff
4056 = size_binop (CEIL_DIV_EXPR, bit_diff, sbitsize_unit_node);
4057 expr = build1 (NOP_EXPR, type, expr);
4058 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4059 if (integer_zerop (byte_diff))
4060 return expr;
4062 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4063 fold (convert (sizetype, byte_diff)));
4066 /* If converting to a thin pointer, handle specially. */
4067 if (TYPE_IS_THIN_POINTER_P (type)
4068 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4069 return convert_to_thin_pointer (type, expr);
4071 /* If converting fat pointer to normal pointer, get the pointer to the
4072 array and then convert it. */
4073 else if (TYPE_IS_FAT_POINTER_P (etype))
4074 expr
4075 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4077 return fold (convert_to_pointer (type, expr));
4079 case REAL_TYPE:
4080 return fold (convert_to_real (type, expr));
4082 case RECORD_TYPE:
4083 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4084 return
4085 gnat_build_constructor
4086 (type, tree_cons (TYPE_FIELDS (type),
4087 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4088 NULL_TREE));
4090 /* ... fall through ... */
4092 case ARRAY_TYPE:
4093 /* In these cases, assume the front-end has validated the conversion.
4094 If the conversion is valid, it will be a bit-wise conversion, so
4095 it can be viewed as an unchecked conversion. */
4096 return unchecked_convert (type, expr, false);
4098 case UNION_TYPE:
4099 /* This is a either a conversion between a tagged type and some
4100 subtype, which we have to mark as a UNION_TYPE because of
4101 overlapping fields or a conversion of an Unchecked_Union. */
4102 return unchecked_convert (type, expr, false);
4104 case UNCONSTRAINED_ARRAY_TYPE:
4105 /* If the input is a VECTOR_TYPE, convert to the representative
4106 array type first. */
4107 if (ecode == VECTOR_TYPE)
4109 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4110 etype = TREE_TYPE (expr);
4111 ecode = TREE_CODE (etype);
4114 /* If EXPR is a constrained array, take its address, convert it to a
4115 fat pointer, and then dereference it. Likewise if EXPR is a
4116 record containing both a template and a constrained array.
4117 Note that a record representing a justified modular type
4118 always represents a packed constrained array. */
4119 if (ecode == ARRAY_TYPE
4120 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4121 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4122 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4123 return
4124 build_unary_op
4125 (INDIRECT_REF, NULL_TREE,
4126 convert_to_fat_pointer (TREE_TYPE (type),
4127 build_unary_op (ADDR_EXPR,
4128 NULL_TREE, expr)));
4130 /* Do something very similar for converting one unconstrained
4131 array to another. */
4132 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4133 return
4134 build_unary_op (INDIRECT_REF, NULL_TREE,
4135 convert (TREE_TYPE (type),
4136 build_unary_op (ADDR_EXPR,
4137 NULL_TREE, expr)));
4138 else
4139 gcc_unreachable ();
4141 case COMPLEX_TYPE:
4142 return fold (convert_to_complex (type, expr));
4144 default:
4145 gcc_unreachable ();
4149 /* Remove all conversions that are done in EXP. This includes converting
4150 from a padded type or to a justified modular type. If TRUE_ADDRESS
4151 is true, always return the address of the containing object even if
4152 the address is not bit-aligned. */
4154 tree
4155 remove_conversions (tree exp, bool true_address)
4157 switch (TREE_CODE (exp))
4159 case CONSTRUCTOR:
4160 if (true_address
4161 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4162 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4163 return
4164 remove_conversions (VEC_index (constructor_elt,
4165 CONSTRUCTOR_ELTS (exp), 0)->value,
4166 true);
4167 break;
4169 case COMPONENT_REF:
4170 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4171 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4172 break;
4174 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4175 CASE_CONVERT:
4176 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4178 default:
4179 break;
4182 return exp;
4185 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4186 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4187 likewise return an expression pointing to the underlying array. */
4189 tree
4190 maybe_unconstrained_array (tree exp)
4192 enum tree_code code = TREE_CODE (exp);
4193 tree new_exp;
4195 switch (TREE_CODE (TREE_TYPE (exp)))
4197 case UNCONSTRAINED_ARRAY_TYPE:
4198 if (code == UNCONSTRAINED_ARRAY_REF)
4200 new_exp = TREE_OPERAND (exp, 0);
4201 new_exp
4202 = build_unary_op (INDIRECT_REF, NULL_TREE,
4203 build_component_ref (new_exp, NULL_TREE,
4204 TYPE_FIELDS
4205 (TREE_TYPE (new_exp)),
4206 false));
4207 TREE_READONLY (new_exp) = TREE_READONLY (exp);
4208 return new_exp;
4211 else if (code == NULL_EXPR)
4212 return build1 (NULL_EXPR,
4213 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4214 (TREE_TYPE (TREE_TYPE (exp))))),
4215 TREE_OPERAND (exp, 0));
4217 case RECORD_TYPE:
4218 /* If this is a padded type, convert to the unpadded type and see if
4219 it contains a template. */
4220 if (TYPE_PADDING_P (TREE_TYPE (exp)))
4222 new_exp = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4223 if (TREE_CODE (TREE_TYPE (new_exp)) == RECORD_TYPE
4224 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new_exp)))
4225 return
4226 build_component_ref (new_exp, NULL_TREE,
4227 TREE_CHAIN
4228 (TYPE_FIELDS (TREE_TYPE (new_exp))),
4229 false);
4231 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4232 return
4233 build_component_ref (exp, NULL_TREE,
4234 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))),
4235 false);
4236 break;
4238 default:
4239 break;
4242 return exp;
4245 /* If EXP's type is a VECTOR_TYPE, return EXP converted to the associated
4246 TYPE_REPRESENTATIVE_ARRAY. */
4248 tree
4249 maybe_vector_array (tree exp)
4251 tree etype = TREE_TYPE (exp);
4253 if (VECTOR_TYPE_P (etype))
4254 exp = convert (TYPE_REPRESENTATIVE_ARRAY (etype), exp);
4256 return exp;
4259 /* Return true if EXPR is an expression that can be folded as an operand
4260 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4262 static bool
4263 can_fold_for_view_convert_p (tree expr)
4265 tree t1, t2;
4267 /* The folder will fold NOP_EXPRs between integral types with the same
4268 precision (in the middle-end's sense). We cannot allow it if the
4269 types don't have the same precision in the Ada sense as well. */
4270 if (TREE_CODE (expr) != NOP_EXPR)
4271 return true;
4273 t1 = TREE_TYPE (expr);
4274 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4276 /* Defer to the folder for non-integral conversions. */
4277 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4278 return true;
4280 /* Only fold conversions that preserve both precisions. */
4281 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4282 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4283 return true;
4285 return false;
4288 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4289 If NOTRUNC_P is true, truncation operations should be suppressed.
4291 Special care is required with (source or target) integral types whose
4292 precision is not equal to their size, to make sure we fetch or assign
4293 the value bits whose location might depend on the endianness, e.g.
4295 Rmsize : constant := 8;
4296 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4298 type Bit_Array is array (1 .. Rmsize) of Boolean;
4299 pragma Pack (Bit_Array);
4301 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4303 Value : Int := 2#1000_0001#;
4304 Vbits : Bit_Array := To_Bit_Array (Value);
4306 we expect the 8 bits at Vbits'Address to always contain Value, while
4307 their original location depends on the endianness, at Value'Address
4308 on a little-endian architecture but not on a big-endian one. */
4310 tree
4311 unchecked_convert (tree type, tree expr, bool notrunc_p)
4313 tree etype = TREE_TYPE (expr);
4314 enum tree_code ecode = TREE_CODE (etype);
4315 enum tree_code code = TREE_CODE (type);
4317 /* If the expression is already of the right type, we are done. */
4318 if (etype == type)
4319 return expr;
4321 /* If both types types are integral just do a normal conversion.
4322 Likewise for a conversion to an unconstrained array. */
4323 if ((((INTEGRAL_TYPE_P (type)
4324 && !(code == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (type)))
4325 || (POINTER_TYPE_P (type) && ! TYPE_IS_THIN_POINTER_P (type))
4326 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4327 && ((INTEGRAL_TYPE_P (etype)
4328 && !(ecode == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (etype)))
4329 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4330 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4331 || code == UNCONSTRAINED_ARRAY_TYPE)
4333 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4335 tree ntype = copy_type (etype);
4336 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4337 TYPE_MAIN_VARIANT (ntype) = ntype;
4338 expr = build1 (NOP_EXPR, ntype, expr);
4341 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4343 tree rtype = copy_type (type);
4344 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4345 TYPE_MAIN_VARIANT (rtype) = rtype;
4346 expr = convert (rtype, expr);
4347 expr = build1 (NOP_EXPR, type, expr);
4349 else
4350 expr = convert (type, expr);
4353 /* If we are converting to an integral type whose precision is not equal
4354 to its size, first unchecked convert to a record that contains an
4355 object of the output type. Then extract the field. */
4356 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4357 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4358 GET_MODE_BITSIZE (TYPE_MODE (type))))
4360 tree rec_type = make_node (RECORD_TYPE);
4361 tree field = create_field_decl (get_identifier ("OBJ"), type,
4362 rec_type, 1, 0, 0, 0);
4364 TYPE_FIELDS (rec_type) = field;
4365 layout_type (rec_type);
4367 expr = unchecked_convert (rec_type, expr, notrunc_p);
4368 expr = build_component_ref (expr, NULL_TREE, field, false);
4371 /* Similarly if we are converting from an integral type whose precision
4372 is not equal to its size. */
4373 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4374 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4375 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4377 tree rec_type = make_node (RECORD_TYPE);
4378 tree field
4379 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4380 1, 0, 0, 0);
4382 TYPE_FIELDS (rec_type) = field;
4383 layout_type (rec_type);
4385 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4386 expr = unchecked_convert (type, expr, notrunc_p);
4389 /* We have a special case when we are converting between two unconstrained
4390 array types. In that case, take the address, convert the fat pointer
4391 types, and dereference. */
4392 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
4393 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4394 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4395 build_unary_op (ADDR_EXPR, NULL_TREE,
4396 expr)));
4398 /* Another special case is when we are converting to a vector type from its
4399 representative array type; this a regular conversion. */
4400 else if (code == VECTOR_TYPE
4401 && ecode == ARRAY_TYPE
4402 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4403 etype))
4404 expr = convert (type, expr);
4406 else
4408 expr = maybe_unconstrained_array (expr);
4409 etype = TREE_TYPE (expr);
4410 ecode = TREE_CODE (etype);
4411 if (can_fold_for_view_convert_p (expr))
4412 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4413 else
4414 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4417 /* If the result is an integral type whose precision is not equal to its
4418 size, sign- or zero-extend the result. We need not do this if the input
4419 is an integral type of the same precision and signedness or if the output
4420 is a biased type or if both the input and output are unsigned. */
4421 if (!notrunc_p
4422 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4423 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4424 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4425 GET_MODE_BITSIZE (TYPE_MODE (type)))
4426 && !(INTEGRAL_TYPE_P (etype)
4427 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4428 && operand_equal_p (TYPE_RM_SIZE (type),
4429 (TYPE_RM_SIZE (etype) != 0
4430 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4432 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4434 tree base_type
4435 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
4436 tree shift_expr
4437 = convert (base_type,
4438 size_binop (MINUS_EXPR,
4439 bitsize_int
4440 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4441 TYPE_RM_SIZE (type)));
4442 expr
4443 = convert (type,
4444 build_binary_op (RSHIFT_EXPR, base_type,
4445 build_binary_op (LSHIFT_EXPR, base_type,
4446 convert (base_type, expr),
4447 shift_expr),
4448 shift_expr));
4451 /* An unchecked conversion should never raise Constraint_Error. The code
4452 below assumes that GCC's conversion routines overflow the same way that
4453 the underlying hardware does. This is probably true. In the rare case
4454 when it is false, we can rely on the fact that such conversions are
4455 erroneous anyway. */
4456 if (TREE_CODE (expr) == INTEGER_CST)
4457 TREE_OVERFLOW (expr) = 0;
4459 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4460 show no longer constant. */
4461 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4462 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4463 OEP_ONLY_CONST))
4464 TREE_CONSTANT (expr) = 0;
4466 return expr;
4469 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4470 the latter being a record type as predicated by Is_Record_Type. */
4472 enum tree_code
4473 tree_code_for_record_type (Entity_Id gnat_type)
4475 Node_Id component_list
4476 = Component_List (Type_Definition
4477 (Declaration_Node
4478 (Implementation_Base_Type (gnat_type))));
4479 Node_Id component;
4481 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4482 we have a non-discriminant field outside a variant. In either case,
4483 it's a RECORD_TYPE. */
4485 if (!Is_Unchecked_Union (gnat_type))
4486 return RECORD_TYPE;
4488 for (component = First_Non_Pragma (Component_Items (component_list));
4489 Present (component);
4490 component = Next_Non_Pragma (component))
4491 if (Ekind (Defining_Entity (component)) == E_Component)
4492 return RECORD_TYPE;
4494 return UNION_TYPE;
4497 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
4498 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
4499 according to the presence of an alignment clause on the type or, if it
4500 is an array, on the component type. */
4502 bool
4503 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
4505 gnat_type = Underlying_Type (gnat_type);
4507 *align_clause = Present (Alignment_Clause (gnat_type));
4509 if (Is_Array_Type (gnat_type))
4511 gnat_type = Underlying_Type (Component_Type (gnat_type));
4512 if (Present (Alignment_Clause (gnat_type)))
4513 *align_clause = true;
4516 if (!Is_Floating_Point_Type (gnat_type))
4517 return false;
4519 if (UI_To_Int (Esize (gnat_type)) != 64)
4520 return false;
4522 return true;
4525 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
4526 size is greater or equal to 64 bits, or an array of such a type. Set
4527 ALIGN_CLAUSE according to the presence of an alignment clause on the
4528 type or, if it is an array, on the component type. */
4530 bool
4531 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
4533 gnat_type = Underlying_Type (gnat_type);
4535 *align_clause = Present (Alignment_Clause (gnat_type));
4537 if (Is_Array_Type (gnat_type))
4539 gnat_type = Underlying_Type (Component_Type (gnat_type));
4540 if (Present (Alignment_Clause (gnat_type)))
4541 *align_clause = true;
4544 if (!Is_Scalar_Type (gnat_type))
4545 return false;
4547 if (UI_To_Int (Esize (gnat_type)) < 64)
4548 return false;
4550 return true;
4553 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4554 component of an aggregate type. */
4556 bool
4557 type_for_nonaliased_component_p (tree gnu_type)
4559 /* If the type is passed by reference, we may have pointers to the
4560 component so it cannot be made non-aliased. */
4561 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4562 return false;
4564 /* We used to say that any component of aggregate type is aliased
4565 because the front-end may take 'Reference of it. The front-end
4566 has been enhanced in the meantime so as to use a renaming instead
4567 in most cases, but the back-end can probably take the address of
4568 such a component too so we go for the conservative stance.
4570 For instance, we might need the address of any array type, even
4571 if normally passed by copy, to construct a fat pointer if the
4572 component is used as an actual for an unconstrained formal.
4574 Likewise for record types: even if a specific record subtype is
4575 passed by copy, the parent type might be passed by ref (e.g. if
4576 it's of variable size) and we might take the address of a child
4577 component to pass to a parent formal. We have no way to check
4578 for such conditions here. */
4579 if (AGGREGATE_TYPE_P (gnu_type))
4580 return false;
4582 return true;
4585 /* Perform final processing on global variables. */
4587 void
4588 gnat_write_global_declarations (void)
4590 /* Proceed to optimize and emit assembly.
4591 FIXME: shouldn't be the front end's responsibility to call this. */
4592 cgraph_finalize_compilation_unit ();
4594 /* Emit debug info for all global declarations. */
4595 emit_debug_global_declarations (VEC_address (tree, global_decls),
4596 VEC_length (tree, global_decls));
4599 /* ************************************************************************
4600 * * GCC builtins support *
4601 * ************************************************************************ */
4603 /* The general scheme is fairly simple:
4605 For each builtin function/type to be declared, gnat_install_builtins calls
4606 internal facilities which eventually get to gnat_push_decl, which in turn
4607 tracks the so declared builtin function decls in the 'builtin_decls' global
4608 datastructure. When an Intrinsic subprogram declaration is processed, we
4609 search this global datastructure to retrieve the associated BUILT_IN DECL
4610 node. */
4612 /* Search the chain of currently available builtin declarations for a node
4613 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4614 found, if any, or NULL_TREE otherwise. */
4615 tree
4616 builtin_decl_for (tree name)
4618 unsigned i;
4619 tree decl;
4621 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4622 if (DECL_NAME (decl) == name)
4623 return decl;
4625 return NULL_TREE;
4628 /* The code below eventually exposes gnat_install_builtins, which declares
4629 the builtin types and functions we might need, either internally or as
4630 user accessible facilities.
4632 ??? This is a first implementation shot, still in rough shape. It is
4633 heavily inspired from the "C" family implementation, with chunks copied
4634 verbatim from there.
4636 Two obvious TODO candidates are
4637 o Use a more efficient name/decl mapping scheme
4638 o Devise a middle-end infrastructure to avoid having to copy
4639 pieces between front-ends. */
4641 /* ----------------------------------------------------------------------- *
4642 * BUILTIN ELEMENTARY TYPES *
4643 * ----------------------------------------------------------------------- */
4645 /* Standard data types to be used in builtin argument declarations. */
4647 enum c_tree_index
4649 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4650 CTI_STRING_TYPE,
4651 CTI_CONST_STRING_TYPE,
4653 CTI_MAX
4656 static tree c_global_trees[CTI_MAX];
4658 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4659 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4660 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4662 /* ??? In addition some attribute handlers, we currently don't support a
4663 (small) number of builtin-types, which in turns inhibits support for a
4664 number of builtin functions. */
4665 #define wint_type_node void_type_node
4666 #define intmax_type_node void_type_node
4667 #define uintmax_type_node void_type_node
4669 /* Build the void_list_node (void_type_node having been created). */
4671 static tree
4672 build_void_list_node (void)
4674 tree t = build_tree_list (NULL_TREE, void_type_node);
4675 return t;
4678 /* Used to help initialize the builtin-types.def table. When a type of
4679 the correct size doesn't exist, use error_mark_node instead of NULL.
4680 The later results in segfaults even when a decl using the type doesn't
4681 get invoked. */
4683 static tree
4684 builtin_type_for_size (int size, bool unsignedp)
4686 tree type = gnat_type_for_size (size, unsignedp);
4687 return type ? type : error_mark_node;
4690 /* Build/push the elementary type decls that builtin functions/types
4691 will need. */
4693 static void
4694 install_builtin_elementary_types (void)
4696 signed_size_type_node = gnat_signed_type (size_type_node);
4697 pid_type_node = integer_type_node;
4698 void_list_node = build_void_list_node ();
4700 string_type_node = build_pointer_type (char_type_node);
4701 const_string_type_node
4702 = build_pointer_type (build_qualified_type
4703 (char_type_node, TYPE_QUAL_CONST));
4706 /* ----------------------------------------------------------------------- *
4707 * BUILTIN FUNCTION TYPES *
4708 * ----------------------------------------------------------------------- */
4710 /* Now, builtin function types per se. */
4712 enum c_builtin_type
4714 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4715 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4716 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4717 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4718 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4719 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4720 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4721 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4722 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4723 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4724 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4725 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4726 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4727 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4728 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4729 NAME,
4730 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4731 #include "builtin-types.def"
4732 #undef DEF_PRIMITIVE_TYPE
4733 #undef DEF_FUNCTION_TYPE_0
4734 #undef DEF_FUNCTION_TYPE_1
4735 #undef DEF_FUNCTION_TYPE_2
4736 #undef DEF_FUNCTION_TYPE_3
4737 #undef DEF_FUNCTION_TYPE_4
4738 #undef DEF_FUNCTION_TYPE_5
4739 #undef DEF_FUNCTION_TYPE_6
4740 #undef DEF_FUNCTION_TYPE_7
4741 #undef DEF_FUNCTION_TYPE_VAR_0
4742 #undef DEF_FUNCTION_TYPE_VAR_1
4743 #undef DEF_FUNCTION_TYPE_VAR_2
4744 #undef DEF_FUNCTION_TYPE_VAR_3
4745 #undef DEF_FUNCTION_TYPE_VAR_4
4746 #undef DEF_FUNCTION_TYPE_VAR_5
4747 #undef DEF_POINTER_TYPE
4748 BT_LAST
4751 typedef enum c_builtin_type builtin_type;
4753 /* A temporary array used in communication with def_fn_type. */
4754 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4756 /* A helper function for install_builtin_types. Build function type
4757 for DEF with return type RET and N arguments. If VAR is true, then the
4758 function should be variadic after those N arguments.
4760 Takes special care not to ICE if any of the types involved are
4761 error_mark_node, which indicates that said type is not in fact available
4762 (see builtin_type_for_size). In which case the function type as a whole
4763 should be error_mark_node. */
4765 static void
4766 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4768 tree args = NULL, t;
4769 va_list list;
4770 int i;
4772 va_start (list, n);
4773 for (i = 0; i < n; ++i)
4775 builtin_type a = (builtin_type) va_arg (list, int);
4776 t = builtin_types[a];
4777 if (t == error_mark_node)
4778 goto egress;
4779 args = tree_cons (NULL_TREE, t, args);
4781 va_end (list);
4783 args = nreverse (args);
4784 if (!var)
4785 args = chainon (args, void_list_node);
4787 t = builtin_types[ret];
4788 if (t == error_mark_node)
4789 goto egress;
4790 t = build_function_type (t, args);
4792 egress:
4793 builtin_types[def] = t;
4796 /* Build the builtin function types and install them in the builtin_types
4797 array for later use in builtin function decls. */
4799 static void
4800 install_builtin_function_types (void)
4802 tree va_list_ref_type_node;
4803 tree va_list_arg_type_node;
4805 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4807 va_list_arg_type_node = va_list_ref_type_node =
4808 build_pointer_type (TREE_TYPE (va_list_type_node));
4810 else
4812 va_list_arg_type_node = va_list_type_node;
4813 va_list_ref_type_node = build_reference_type (va_list_type_node);
4816 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
4817 builtin_types[ENUM] = VALUE;
4818 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
4819 def_fn_type (ENUM, RETURN, 0, 0);
4820 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
4821 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
4822 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
4823 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
4824 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4825 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
4826 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4827 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
4828 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4829 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4830 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4831 ARG6) \
4832 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4833 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4834 ARG6, ARG7) \
4835 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4836 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
4837 def_fn_type (ENUM, RETURN, 1, 0);
4838 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
4839 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
4840 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
4841 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
4842 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4843 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
4844 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4845 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
4846 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4847 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4848 #define DEF_POINTER_TYPE(ENUM, TYPE) \
4849 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
4851 #include "builtin-types.def"
4853 #undef DEF_PRIMITIVE_TYPE
4854 #undef DEF_FUNCTION_TYPE_1
4855 #undef DEF_FUNCTION_TYPE_2
4856 #undef DEF_FUNCTION_TYPE_3
4857 #undef DEF_FUNCTION_TYPE_4
4858 #undef DEF_FUNCTION_TYPE_5
4859 #undef DEF_FUNCTION_TYPE_6
4860 #undef DEF_FUNCTION_TYPE_VAR_0
4861 #undef DEF_FUNCTION_TYPE_VAR_1
4862 #undef DEF_FUNCTION_TYPE_VAR_2
4863 #undef DEF_FUNCTION_TYPE_VAR_3
4864 #undef DEF_FUNCTION_TYPE_VAR_4
4865 #undef DEF_FUNCTION_TYPE_VAR_5
4866 #undef DEF_POINTER_TYPE
4867 builtin_types[(int) BT_LAST] = NULL_TREE;
4870 /* ----------------------------------------------------------------------- *
4871 * BUILTIN ATTRIBUTES *
4872 * ----------------------------------------------------------------------- */
4874 enum built_in_attribute
4876 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
4877 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
4878 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
4879 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
4880 #include "builtin-attrs.def"
4881 #undef DEF_ATTR_NULL_TREE
4882 #undef DEF_ATTR_INT
4883 #undef DEF_ATTR_IDENT
4884 #undef DEF_ATTR_TREE_LIST
4885 ATTR_LAST
4888 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
4890 static void
4891 install_builtin_attributes (void)
4893 /* Fill in the built_in_attributes array. */
4894 #define DEF_ATTR_NULL_TREE(ENUM) \
4895 built_in_attributes[(int) ENUM] = NULL_TREE;
4896 #define DEF_ATTR_INT(ENUM, VALUE) \
4897 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
4898 #define DEF_ATTR_IDENT(ENUM, STRING) \
4899 built_in_attributes[(int) ENUM] = get_identifier (STRING);
4900 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
4901 built_in_attributes[(int) ENUM] \
4902 = tree_cons (built_in_attributes[(int) PURPOSE], \
4903 built_in_attributes[(int) VALUE], \
4904 built_in_attributes[(int) CHAIN]);
4905 #include "builtin-attrs.def"
4906 #undef DEF_ATTR_NULL_TREE
4907 #undef DEF_ATTR_INT
4908 #undef DEF_ATTR_IDENT
4909 #undef DEF_ATTR_TREE_LIST
4912 /* Handle a "const" attribute; arguments as in
4913 struct attribute_spec.handler. */
4915 static tree
4916 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
4917 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4918 bool *no_add_attrs)
4920 if (TREE_CODE (*node) == FUNCTION_DECL)
4921 TREE_READONLY (*node) = 1;
4922 else
4923 *no_add_attrs = true;
4925 return NULL_TREE;
4928 /* Handle a "nothrow" attribute; arguments as in
4929 struct attribute_spec.handler. */
4931 static tree
4932 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
4933 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4934 bool *no_add_attrs)
4936 if (TREE_CODE (*node) == FUNCTION_DECL)
4937 TREE_NOTHROW (*node) = 1;
4938 else
4939 *no_add_attrs = true;
4941 return NULL_TREE;
4944 /* Handle a "pure" attribute; arguments as in
4945 struct attribute_spec.handler. */
4947 static tree
4948 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4949 int ARG_UNUSED (flags), bool *no_add_attrs)
4951 if (TREE_CODE (*node) == FUNCTION_DECL)
4952 DECL_PURE_P (*node) = 1;
4953 /* ??? TODO: Support types. */
4954 else
4956 warning (OPT_Wattributes, "%qs attribute ignored",
4957 IDENTIFIER_POINTER (name));
4958 *no_add_attrs = true;
4961 return NULL_TREE;
4964 /* Handle a "no vops" attribute; arguments as in
4965 struct attribute_spec.handler. */
4967 static tree
4968 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
4969 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4970 bool *ARG_UNUSED (no_add_attrs))
4972 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
4973 DECL_IS_NOVOPS (*node) = 1;
4974 return NULL_TREE;
4977 /* Helper for nonnull attribute handling; fetch the operand number
4978 from the attribute argument list. */
4980 static bool
4981 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
4983 /* Verify the arg number is a constant. */
4984 if (TREE_CODE (arg_num_expr) != INTEGER_CST
4985 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
4986 return false;
4988 *valp = TREE_INT_CST_LOW (arg_num_expr);
4989 return true;
4992 /* Handle the "nonnull" attribute. */
4993 static tree
4994 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
4995 tree args, int ARG_UNUSED (flags),
4996 bool *no_add_attrs)
4998 tree type = *node;
4999 unsigned HOST_WIDE_INT attr_arg_num;
5001 /* If no arguments are specified, all pointer arguments should be
5002 non-null. Verify a full prototype is given so that the arguments
5003 will have the correct types when we actually check them later. */
5004 if (!args)
5006 if (!TYPE_ARG_TYPES (type))
5008 error ("nonnull attribute without arguments on a non-prototype");
5009 *no_add_attrs = true;
5011 return NULL_TREE;
5014 /* Argument list specified. Verify that each argument number references
5015 a pointer argument. */
5016 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5018 tree argument;
5019 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5021 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5023 error ("nonnull argument has invalid operand number (argument %lu)",
5024 (unsigned long) attr_arg_num);
5025 *no_add_attrs = true;
5026 return NULL_TREE;
5029 argument = TYPE_ARG_TYPES (type);
5030 if (argument)
5032 for (ck_num = 1; ; ck_num++)
5034 if (!argument || ck_num == arg_num)
5035 break;
5036 argument = TREE_CHAIN (argument);
5039 if (!argument
5040 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5042 error ("nonnull argument with out-of-range operand number "
5043 "(argument %lu, operand %lu)",
5044 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5045 *no_add_attrs = true;
5046 return NULL_TREE;
5049 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5051 error ("nonnull argument references non-pointer operand "
5052 "(argument %lu, operand %lu)",
5053 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5054 *no_add_attrs = true;
5055 return NULL_TREE;
5060 return NULL_TREE;
5063 /* Handle a "sentinel" attribute. */
5065 static tree
5066 handle_sentinel_attribute (tree *node, tree name, tree args,
5067 int ARG_UNUSED (flags), bool *no_add_attrs)
5069 tree params = TYPE_ARG_TYPES (*node);
5071 if (!params)
5073 warning (OPT_Wattributes,
5074 "%qs attribute requires prototypes with named arguments",
5075 IDENTIFIER_POINTER (name));
5076 *no_add_attrs = true;
5078 else
5080 while (TREE_CHAIN (params))
5081 params = TREE_CHAIN (params);
5083 if (VOID_TYPE_P (TREE_VALUE (params)))
5085 warning (OPT_Wattributes,
5086 "%qs attribute only applies to variadic functions",
5087 IDENTIFIER_POINTER (name));
5088 *no_add_attrs = true;
5092 if (args)
5094 tree position = TREE_VALUE (args);
5096 if (TREE_CODE (position) != INTEGER_CST)
5098 warning (0, "requested position is not an integer constant");
5099 *no_add_attrs = true;
5101 else
5103 if (tree_int_cst_lt (position, integer_zero_node))
5105 warning (0, "requested position is less than zero");
5106 *no_add_attrs = true;
5111 return NULL_TREE;
5114 /* Handle a "noreturn" attribute; arguments as in
5115 struct attribute_spec.handler. */
5117 static tree
5118 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5119 int ARG_UNUSED (flags), bool *no_add_attrs)
5121 tree type = TREE_TYPE (*node);
5123 /* See FIXME comment in c_common_attribute_table. */
5124 if (TREE_CODE (*node) == FUNCTION_DECL)
5125 TREE_THIS_VOLATILE (*node) = 1;
5126 else if (TREE_CODE (type) == POINTER_TYPE
5127 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5128 TREE_TYPE (*node)
5129 = build_pointer_type
5130 (build_type_variant (TREE_TYPE (type),
5131 TYPE_READONLY (TREE_TYPE (type)), 1));
5132 else
5134 warning (OPT_Wattributes, "%qs attribute ignored",
5135 IDENTIFIER_POINTER (name));
5136 *no_add_attrs = true;
5139 return NULL_TREE;
5142 /* Handle a "malloc" attribute; arguments as in
5143 struct attribute_spec.handler. */
5145 static tree
5146 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5147 int ARG_UNUSED (flags), bool *no_add_attrs)
5149 if (TREE_CODE (*node) == FUNCTION_DECL
5150 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5151 DECL_IS_MALLOC (*node) = 1;
5152 else
5154 warning (OPT_Wattributes, "%qs attribute ignored",
5155 IDENTIFIER_POINTER (name));
5156 *no_add_attrs = true;
5159 return NULL_TREE;
5162 /* Fake handler for attributes we don't properly support. */
5164 tree
5165 fake_attribute_handler (tree * ARG_UNUSED (node),
5166 tree ARG_UNUSED (name),
5167 tree ARG_UNUSED (args),
5168 int ARG_UNUSED (flags),
5169 bool * ARG_UNUSED (no_add_attrs))
5171 return NULL_TREE;
5174 /* Handle a "type_generic" attribute. */
5176 static tree
5177 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5178 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5179 bool * ARG_UNUSED (no_add_attrs))
5181 tree params;
5183 /* Ensure we have a function type. */
5184 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5186 params = TYPE_ARG_TYPES (*node);
5187 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5188 params = TREE_CHAIN (params);
5190 /* Ensure we have a variadic function. */
5191 gcc_assert (!params);
5193 return NULL_TREE;
5196 /* Handle a "vector_size" attribute; arguments as in
5197 struct attribute_spec.handler. */
5199 static tree
5200 handle_vector_size_attribute (tree *node, tree name, tree args,
5201 int ARG_UNUSED (flags),
5202 bool *no_add_attrs)
5204 unsigned HOST_WIDE_INT vecsize, nunits;
5205 enum machine_mode orig_mode;
5206 tree type = *node, new_type, size;
5208 *no_add_attrs = true;
5210 size = TREE_VALUE (args);
5212 if (!host_integerp (size, 1))
5214 warning (OPT_Wattributes, "%qs attribute ignored",
5215 IDENTIFIER_POINTER (name));
5216 return NULL_TREE;
5219 /* Get the vector size (in bytes). */
5220 vecsize = tree_low_cst (size, 1);
5222 /* We need to provide for vector pointers, vector arrays, and
5223 functions returning vectors. For example:
5225 __attribute__((vector_size(16))) short *foo;
5227 In this case, the mode is SI, but the type being modified is
5228 HI, so we need to look further. */
5230 while (POINTER_TYPE_P (type)
5231 || TREE_CODE (type) == FUNCTION_TYPE
5232 || TREE_CODE (type) == METHOD_TYPE
5233 || TREE_CODE (type) == ARRAY_TYPE
5234 || TREE_CODE (type) == OFFSET_TYPE)
5235 type = TREE_TYPE (type);
5237 /* Get the mode of the type being modified. */
5238 orig_mode = TYPE_MODE (type);
5240 if ((!INTEGRAL_TYPE_P (type)
5241 && !SCALAR_FLOAT_TYPE_P (type)
5242 && !FIXED_POINT_TYPE_P (type))
5243 || (!SCALAR_FLOAT_MODE_P (orig_mode)
5244 && GET_MODE_CLASS (orig_mode) != MODE_INT
5245 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
5246 || !host_integerp (TYPE_SIZE_UNIT (type), 1)
5247 || TREE_CODE (type) == BOOLEAN_TYPE)
5249 error ("invalid vector type for attribute %qs",
5250 IDENTIFIER_POINTER (name));
5251 return NULL_TREE;
5254 if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
5256 error ("vector size not an integral multiple of component size");
5257 return NULL;
5260 if (vecsize == 0)
5262 error ("zero vector size");
5263 return NULL;
5266 /* Calculate how many units fit in the vector. */
5267 nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5268 if (nunits & (nunits - 1))
5270 error ("number of components of the vector not a power of two");
5271 return NULL_TREE;
5274 new_type = build_vector_type (type, nunits);
5276 /* Build back pointers if needed. */
5277 *node = lang_hooks.types.reconstruct_complex_type (*node, new_type);
5279 return NULL_TREE;
5282 /* Handle a "vector_type" attribute; arguments as in
5283 struct attribute_spec.handler. */
5285 static tree
5286 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5287 int ARG_UNUSED (flags),
5288 bool *no_add_attrs)
5290 /* Vector representative type and size. */
5291 tree rep_type = *node;
5292 tree rep_size = TYPE_SIZE_UNIT (rep_type);
5293 tree rep_name;
5295 /* Vector size in bytes and number of units. */
5296 unsigned HOST_WIDE_INT vec_bytes, vec_units;
5298 /* Vector element type and mode. */
5299 tree elem_type;
5300 enum machine_mode elem_mode;
5302 *no_add_attrs = true;
5304 /* Get the representative array type, possibly nested within a
5305 padding record e.g. for alignment purposes. */
5307 if (TYPE_IS_PADDING_P (rep_type))
5308 rep_type = TREE_TYPE (TYPE_FIELDS (rep_type));
5310 if (TREE_CODE (rep_type) != ARRAY_TYPE)
5312 error ("attribute %qs applies to array types only",
5313 IDENTIFIER_POINTER (name));
5314 return NULL_TREE;
5317 /* Silently punt on variable sizes. We can't make vector types for them,
5318 need to ignore them on front-end generated subtypes of unconstrained
5319 bases, and this attribute is for binding implementors, not end-users, so
5320 we should never get there from legitimate explicit uses. */
5322 if (!host_integerp (rep_size, 1))
5323 return NULL_TREE;
5325 /* Get the element type/mode and check this is something we know
5326 how to make vectors of. */
5328 elem_type = TREE_TYPE (rep_type);
5329 elem_mode = TYPE_MODE (elem_type);
5331 if ((!INTEGRAL_TYPE_P (elem_type)
5332 && !SCALAR_FLOAT_TYPE_P (elem_type)
5333 && !FIXED_POINT_TYPE_P (elem_type))
5334 || (!SCALAR_FLOAT_MODE_P (elem_mode)
5335 && GET_MODE_CLASS (elem_mode) != MODE_INT
5336 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
5337 || !host_integerp (TYPE_SIZE_UNIT (elem_type), 1))
5339 error ("invalid element type for attribute %qs",
5340 IDENTIFIER_POINTER (name));
5341 return NULL_TREE;
5344 /* Sanity check the vector size and element type consistency. */
5346 vec_bytes = tree_low_cst (rep_size, 1);
5348 if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
5350 error ("vector size not an integral multiple of component size");
5351 return NULL;
5354 if (vec_bytes == 0)
5356 error ("zero vector size");
5357 return NULL;
5360 vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
5361 if (vec_units & (vec_units - 1))
5363 error ("number of components of the vector not a power of two");
5364 return NULL_TREE;
5367 /* Build the vector type and replace. */
5369 *node = build_vector_type (elem_type, vec_units);
5370 rep_name = TYPE_NAME (rep_type);
5371 if (TREE_CODE (rep_name) == TYPE_DECL)
5372 rep_name = DECL_NAME (rep_name);
5373 TYPE_NAME (*node) = rep_name;
5374 TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
5376 return NULL_TREE;
5379 /* ----------------------------------------------------------------------- *
5380 * BUILTIN FUNCTIONS *
5381 * ----------------------------------------------------------------------- */
5383 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5384 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5385 if nonansi_p and flag_no_nonansi_builtin. */
5387 static void
5388 def_builtin_1 (enum built_in_function fncode,
5389 const char *name,
5390 enum built_in_class fnclass,
5391 tree fntype, tree libtype,
5392 bool both_p, bool fallback_p,
5393 bool nonansi_p ATTRIBUTE_UNUSED,
5394 tree fnattrs, bool implicit_p)
5396 tree decl;
5397 const char *libname;
5399 /* Preserve an already installed decl. It most likely was setup in advance
5400 (e.g. as part of the internal builtins) for specific reasons. */
5401 if (built_in_decls[(int) fncode] != NULL_TREE)
5402 return;
5404 gcc_assert ((!both_p && !fallback_p)
5405 || !strncmp (name, "__builtin_",
5406 strlen ("__builtin_")));
5408 libname = name + strlen ("__builtin_");
5409 decl = add_builtin_function (name, fntype, fncode, fnclass,
5410 (fallback_p ? libname : NULL),
5411 fnattrs);
5412 if (both_p)
5413 /* ??? This is normally further controlled by command-line options
5414 like -fno-builtin, but we don't have them for Ada. */
5415 add_builtin_function (libname, libtype, fncode, fnclass,
5416 NULL, fnattrs);
5418 built_in_decls[(int) fncode] = decl;
5419 if (implicit_p)
5420 implicit_built_in_decls[(int) fncode] = decl;
5423 static int flag_isoc94 = 0;
5424 static int flag_isoc99 = 0;
5426 /* Install what the common builtins.def offers. */
5428 static void
5429 install_builtin_functions (void)
5431 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5432 NONANSI_P, ATTRS, IMPLICIT, COND) \
5433 if (NAME && COND) \
5434 def_builtin_1 (ENUM, NAME, CLASS, \
5435 builtin_types[(int) TYPE], \
5436 builtin_types[(int) LIBTYPE], \
5437 BOTH_P, FALLBACK_P, NONANSI_P, \
5438 built_in_attributes[(int) ATTRS], IMPLICIT);
5439 #include "builtins.def"
5440 #undef DEF_BUILTIN
5443 /* ----------------------------------------------------------------------- *
5444 * BUILTIN FUNCTIONS *
5445 * ----------------------------------------------------------------------- */
5447 /* Install the builtin functions we might need. */
5449 void
5450 gnat_install_builtins (void)
5452 install_builtin_elementary_types ();
5453 install_builtin_function_types ();
5454 install_builtin_attributes ();
5456 /* Install builtins used by generic middle-end pieces first. Some of these
5457 know about internal specificities and control attributes accordingly, for
5458 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5459 the generic definition from builtins.def. */
5460 build_common_builtin_nodes ();
5462 /* Now, install the target specific builtins, such as the AltiVec family on
5463 ppc, and the common set as exposed by builtins.def. */
5464 targetm.init_builtins ();
5465 install_builtin_functions ();
5468 #include "gt-ada-utils.h"
5469 #include "gtype-ada.h"