2009-04-21 Taras Glek <tglek@mozilla.com>
[official-gcc.git] / gcc / ada / gcc-interface / utils.c
blob088c83ad23d74bf000bf154a89fffc61e2300eb5
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2009, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 /* We have attribute handlers using C specific format specifiers in warning
27 messages. Make sure they are properly recognized. */
28 #define GCC_DIAG_STYLE __gcc_cdiag__
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "tree.h"
35 #include "flags.h"
36 #include "defaults.h"
37 #include "toplev.h"
38 #include "output.h"
39 #include "ggc.h"
40 #include "debug.h"
41 #include "convert.h"
42 #include "target.h"
43 #include "function.h"
44 #include "cgraph.h"
45 #include "tree-inline.h"
46 #include "tree-iterator.h"
47 #include "gimple.h"
48 #include "tree-dump.h"
49 #include "pointer-set.h"
50 #include "langhooks.h"
51 #include "rtl.h"
53 #include "ada.h"
54 #include "types.h"
55 #include "atree.h"
56 #include "elists.h"
57 #include "namet.h"
58 #include "nlists.h"
59 #include "stringt.h"
60 #include "uintp.h"
61 #include "fe.h"
62 #include "sinfo.h"
63 #include "einfo.h"
64 #include "ada-tree.h"
65 #include "gigi.h"
67 #ifndef MAX_FIXED_MODE_SIZE
68 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
69 #endif
71 #ifndef MAX_BITS_PER_WORD
72 #define MAX_BITS_PER_WORD BITS_PER_WORD
73 #endif
75 /* If nonzero, pretend we are allocating at global level. */
76 int force_global;
78 /* Tree nodes for the various types and decls we create. */
79 tree gnat_std_decls[(int) ADT_LAST];
81 /* Functions to call for each of the possible raise reasons. */
82 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
84 /* Forward declarations for handlers of attributes. */
85 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
86 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
95 /* Fake handler for attributes we don't properly support, typically because
96 they'd require dragging a lot of the common-c front-end circuitry. */
97 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
99 /* Table of machine-independent internal attributes for Ada. We support
100 this minimal set of attributes to accommodate the needs of builtins. */
101 const struct attribute_spec gnat_internal_attribute_table[] =
103 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
104 { "const", 0, 0, true, false, false, handle_const_attribute },
105 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
106 { "pure", 0, 0, true, false, false, handle_pure_attribute },
107 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
108 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
109 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
110 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
111 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
112 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
114 /* ??? format and format_arg are heavy and not supported, which actually
115 prevents support for stdio builtins, which we however declare as part
116 of the common builtins.def contents. */
117 { "format", 3, 3, false, true, true, fake_attribute_handler },
118 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
120 { NULL, 0, 0, false, false, false, NULL }
123 /* Associates a GNAT tree node to a GCC tree node. It is used in
124 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
125 of `save_gnu_tree' for more info. */
126 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
128 #define GET_GNU_TREE(GNAT_ENTITY) \
129 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
131 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
132 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
134 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
135 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
137 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
138 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
140 #define GET_DUMMY_NODE(GNAT_ENTITY) \
141 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
143 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
144 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
146 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
147 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
149 /* This variable keeps a table for types for each precision so that we only
150 allocate each of them once. Signed and unsigned types are kept separate.
152 Note that these types are only used when fold-const requests something
153 special. Perhaps we should NOT share these types; we'll see how it
154 goes later. */
155 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
157 /* Likewise for float types, but record these by mode. */
158 static GTY(()) tree float_types[NUM_MACHINE_MODES];
160 /* For each binding contour we allocate a binding_level structure to indicate
161 the binding depth. */
163 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
164 /* The binding level containing this one (the enclosing binding level). */
165 struct gnat_binding_level *chain;
166 /* The BLOCK node for this level. */
167 tree block;
168 /* If nonzero, the setjmp buffer that needs to be updated for any
169 variable-sized definition within this context. */
170 tree jmpbuf_decl;
173 /* The binding level currently in effect. */
174 static GTY(()) struct gnat_binding_level *current_binding_level;
176 /* A chain of gnat_binding_level structures awaiting reuse. */
177 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
179 /* An array of global declarations. */
180 static GTY(()) VEC(tree,gc) *global_decls;
182 /* An array of builtin function declarations. */
183 static GTY(()) VEC(tree,gc) *builtin_decls;
185 /* An array of global renaming pointers. */
186 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
188 /* A chain of unused BLOCK nodes. */
189 static GTY((deletable)) tree free_block_chain;
191 static tree merge_sizes (tree, tree, tree, bool, bool);
192 static tree compute_related_constant (tree, tree);
193 static tree split_plus (tree, tree *);
194 static void gnat_gimplify_function (tree);
195 static tree float_type_for_precision (int, enum machine_mode);
196 static tree convert_to_fat_pointer (tree, tree);
197 static tree convert_to_thin_pointer (tree, tree);
198 static tree make_descriptor_field (const char *,tree, tree, tree);
199 static bool potential_alignment_gap (tree, tree, tree);
201 /* Initialize the association of GNAT nodes to GCC trees. */
203 void
204 init_gnat_to_gnu (void)
206 associate_gnat_to_gnu
207 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
210 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
211 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
212 a ..._DECL node. If NO_CHECK is true, the latter check is suppressed.
214 If GNU_DECL is zero, a previous association is to be reset. */
216 void
217 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
219 /* Check that GNAT_ENTITY is not already defined and that it is being set
220 to something which is a decl. Raise gigi 401 if not. Usually, this
221 means GNAT_ENTITY is defined twice, but occasionally is due to some
222 Gigi problem. */
223 gcc_assert (!(gnu_decl
224 && (PRESENT_GNU_TREE (gnat_entity)
225 || (!no_check && !DECL_P (gnu_decl)))));
227 SET_GNU_TREE (gnat_entity, gnu_decl);
230 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
231 Return the ..._DECL node that was associated with it. If there is no tree
232 node associated with GNAT_ENTITY, abort.
234 In some cases, such as delayed elaboration or expressions that need to
235 be elaborated only once, GNAT_ENTITY is really not an entity. */
237 tree
238 get_gnu_tree (Entity_Id gnat_entity)
240 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
241 return GET_GNU_TREE (gnat_entity);
244 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
246 bool
247 present_gnu_tree (Entity_Id gnat_entity)
249 return PRESENT_GNU_TREE (gnat_entity);
252 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
254 void
255 init_dummy_type (void)
257 dummy_node_table
258 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
261 /* Make a dummy type corresponding to GNAT_TYPE. */
263 tree
264 make_dummy_type (Entity_Id gnat_type)
266 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
267 tree gnu_type;
269 /* If there is an equivalent type, get its underlying type. */
270 if (Present (gnat_underlying))
271 gnat_underlying = Underlying_Type (gnat_underlying);
273 /* If there was no equivalent type (can only happen when just annotating
274 types) or underlying type, go back to the original type. */
275 if (No (gnat_underlying))
276 gnat_underlying = gnat_type;
278 /* If it there already a dummy type, use that one. Else make one. */
279 if (PRESENT_DUMMY_NODE (gnat_underlying))
280 return GET_DUMMY_NODE (gnat_underlying);
282 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
283 an ENUMERAL_TYPE. */
284 gnu_type = make_node (Is_Record_Type (gnat_underlying)
285 ? tree_code_for_record_type (gnat_underlying)
286 : ENUMERAL_TYPE);
287 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
288 TYPE_DUMMY_P (gnu_type) = 1;
289 TYPE_STUB_DECL (gnu_type)
290 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
291 if (AGGREGATE_TYPE_P (gnu_type))
292 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
294 SET_DUMMY_NODE (gnat_underlying, gnu_type);
296 return gnu_type;
299 /* Return nonzero if we are currently in the global binding level. */
302 global_bindings_p (void)
304 return ((force_global || !current_function_decl) ? -1 : 0);
307 /* Enter a new binding level. */
309 void
310 gnat_pushlevel ()
312 struct gnat_binding_level *newlevel = NULL;
314 /* Reuse a struct for this binding level, if there is one. */
315 if (free_binding_level)
317 newlevel = free_binding_level;
318 free_binding_level = free_binding_level->chain;
320 else
321 newlevel
322 = (struct gnat_binding_level *)
323 ggc_alloc (sizeof (struct gnat_binding_level));
325 /* Use a free BLOCK, if any; otherwise, allocate one. */
326 if (free_block_chain)
328 newlevel->block = free_block_chain;
329 free_block_chain = BLOCK_CHAIN (free_block_chain);
330 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
332 else
333 newlevel->block = make_node (BLOCK);
335 /* Point the BLOCK we just made to its parent. */
336 if (current_binding_level)
337 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
339 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
340 TREE_USED (newlevel->block) = 1;
342 /* Add this level to the front of the chain (stack) of levels that are
343 active. */
344 newlevel->chain = current_binding_level;
345 newlevel->jmpbuf_decl = NULL_TREE;
346 current_binding_level = newlevel;
349 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
350 and point FNDECL to this BLOCK. */
352 void
353 set_current_block_context (tree fndecl)
355 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
356 DECL_INITIAL (fndecl) = current_binding_level->block;
359 /* Set the jmpbuf_decl for the current binding level to DECL. */
361 void
362 set_block_jmpbuf_decl (tree decl)
364 current_binding_level->jmpbuf_decl = decl;
367 /* Get the jmpbuf_decl, if any, for the current binding level. */
369 tree
370 get_block_jmpbuf_decl ()
372 return current_binding_level->jmpbuf_decl;
375 /* Exit a binding level. Set any BLOCK into the current code group. */
377 void
378 gnat_poplevel ()
380 struct gnat_binding_level *level = current_binding_level;
381 tree block = level->block;
383 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
384 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
386 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
387 are no variables free the block and merge its subblocks into those of its
388 parent block. Otherwise, add it to the list of its parent. */
389 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
391 else if (BLOCK_VARS (block) == NULL_TREE)
393 BLOCK_SUBBLOCKS (level->chain->block)
394 = chainon (BLOCK_SUBBLOCKS (block),
395 BLOCK_SUBBLOCKS (level->chain->block));
396 BLOCK_CHAIN (block) = free_block_chain;
397 free_block_chain = block;
399 else
401 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
402 BLOCK_SUBBLOCKS (level->chain->block) = block;
403 TREE_USED (block) = 1;
404 set_block_for_group (block);
407 /* Free this binding structure. */
408 current_binding_level = level->chain;
409 level->chain = free_binding_level;
410 free_binding_level = level;
414 /* Records a ..._DECL node DECL as belonging to the current lexical scope
415 and uses GNAT_NODE for location information and propagating flags. */
417 void
418 gnat_pushdecl (tree decl, Node_Id gnat_node)
420 /* If this decl is public external or at toplevel, there is no context.
421 But PARM_DECLs always go in the level of its function. */
422 if (TREE_CODE (decl) != PARM_DECL
423 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
424 || global_bindings_p ()))
425 DECL_CONTEXT (decl) = 0;
426 else
428 DECL_CONTEXT (decl) = current_function_decl;
430 /* Functions imported in another function are not really nested. */
431 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
432 DECL_NO_STATIC_CHAIN (decl) = 1;
435 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
437 /* Set the location of DECL and emit a declaration for it. */
438 if (Present (gnat_node))
439 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
440 add_decl_expr (decl, gnat_node);
442 /* Put the declaration on the list. The list of declarations is in reverse
443 order. The list will be reversed later. Put global variables in the
444 globals list and builtin functions in a dedicated list to speed up
445 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
446 the list, as they will cause trouble with the debugger and aren't needed
447 anyway. */
448 if (TREE_CODE (decl) != TYPE_DECL
449 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
451 if (global_bindings_p ())
453 VEC_safe_push (tree, gc, global_decls, decl);
455 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
456 VEC_safe_push (tree, gc, builtin_decls, decl);
458 else
460 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
461 BLOCK_VARS (current_binding_level->block) = decl;
465 /* For the declaration of a type, set its name if it either is not already
466 set or if the previous type name was not derived from a source name.
467 We'd rather have the type named with a real name and all the pointer
468 types to the same object have the same POINTER_TYPE node. Code in the
469 equivalent function of c-decl.c makes a copy of the type node here, but
470 that may cause us trouble with incomplete types. We make an exception
471 for fat pointer types because the compiler automatically builds them
472 for unconstrained array types and the debugger uses them to represent
473 both these and pointers to these. */
474 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
476 tree t = TREE_TYPE (decl);
478 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
480 else if (TYPE_FAT_POINTER_P (t))
482 tree tt = build_variant_type_copy (t);
483 TYPE_NAME (tt) = decl;
484 TREE_USED (tt) = TREE_USED (t);
485 TREE_TYPE (decl) = tt;
486 DECL_ORIGINAL_TYPE (decl) = t;
487 t = NULL_TREE;
489 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
491 else
492 t = NULL_TREE;
494 /* Propagate the name to all the variants. This is needed for
495 the type qualifiers machinery to work properly. */
496 if (t)
497 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
498 TYPE_NAME (t) = decl;
502 /* Do little here. Set up the standard declarations later after the
503 front end has been run. */
505 void
506 gnat_init_decl_processing (void)
508 /* Make the binding_level structure for global names. */
509 current_function_decl = 0;
510 current_binding_level = 0;
511 free_binding_level = 0;
512 gnat_pushlevel ();
514 build_common_tree_nodes (true, true);
516 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
517 corresponding to the width of Pmode. In most cases when ptr_mode
518 and Pmode differ, C will use the width of ptr_mode for SIZETYPE.
519 But we get far better code using the width of Pmode. */
520 size_type_node = gnat_type_for_mode (Pmode, 0);
521 set_sizetype (size_type_node);
523 /* In Ada, we use an unsigned 8-bit type for the default boolean type. */
524 boolean_type_node = make_node (BOOLEAN_TYPE);
525 TYPE_PRECISION (boolean_type_node) = 1;
526 fixup_unsigned_type (boolean_type_node);
527 TYPE_RM_SIZE (boolean_type_node) = bitsize_int (1);
529 build_common_tree_nodes_2 (0);
531 ptr_void_type_node = build_pointer_type (void_type_node);
534 /* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
536 void
537 record_builtin_type (const char *name, tree type)
539 tree type_decl = build_decl (TYPE_DECL, get_identifier (name), type);
541 gnat_pushdecl (type_decl, Empty);
543 if (debug_hooks->type_decl)
544 debug_hooks->type_decl (type_decl, false);
547 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
548 finish constructing the record or union type. If REP_LEVEL is zero, this
549 record has no representation clause and so will be entirely laid out here.
550 If REP_LEVEL is one, this record has a representation clause and has been
551 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
552 this record is derived from a parent record and thus inherits its layout;
553 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
554 true, the record type is expected to be modified afterwards so it will
555 not be sent to the back-end for finalization. */
557 void
558 finish_record_type (tree record_type, tree fieldlist, int rep_level,
559 bool do_not_finalize)
561 enum tree_code code = TREE_CODE (record_type);
562 tree name = TYPE_NAME (record_type);
563 tree ada_size = bitsize_zero_node;
564 tree size = bitsize_zero_node;
565 bool had_size = TYPE_SIZE (record_type) != 0;
566 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
567 bool had_align = TYPE_ALIGN (record_type) != 0;
568 tree field;
570 TYPE_FIELDS (record_type) = fieldlist;
572 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
573 generate debug info and have a parallel type. */
574 if (name && TREE_CODE (name) == TYPE_DECL)
575 name = DECL_NAME (name);
576 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
578 /* Globally initialize the record first. If this is a rep'ed record,
579 that just means some initializations; otherwise, layout the record. */
580 if (rep_level > 0)
582 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
583 SET_TYPE_MODE (record_type, BLKmode);
585 if (!had_size_unit)
586 TYPE_SIZE_UNIT (record_type) = size_zero_node;
587 if (!had_size)
588 TYPE_SIZE (record_type) = bitsize_zero_node;
590 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
591 out just like a UNION_TYPE, since the size will be fixed. */
592 else if (code == QUAL_UNION_TYPE)
593 code = UNION_TYPE;
595 else
597 /* Ensure there isn't a size already set. There can be in an error
598 case where there is a rep clause but all fields have errors and
599 no longer have a position. */
600 TYPE_SIZE (record_type) = 0;
601 layout_type (record_type);
604 /* At this point, the position and size of each field is known. It was
605 either set before entry by a rep clause, or by laying out the type above.
607 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
608 to compute the Ada size; the GCC size and alignment (for rep'ed records
609 that are not padding types); and the mode (for rep'ed records). We also
610 clear the DECL_BIT_FIELD indication for the cases we know have not been
611 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
613 if (code == QUAL_UNION_TYPE)
614 fieldlist = nreverse (fieldlist);
616 for (field = fieldlist; field; field = TREE_CHAIN (field))
618 tree type = TREE_TYPE (field);
619 tree pos = bit_position (field);
620 tree this_size = DECL_SIZE (field);
621 tree this_ada_size;
623 if ((TREE_CODE (type) == RECORD_TYPE
624 || TREE_CODE (type) == UNION_TYPE
625 || TREE_CODE (type) == QUAL_UNION_TYPE)
626 && !TYPE_IS_FAT_POINTER_P (type)
627 && !TYPE_CONTAINS_TEMPLATE_P (type)
628 && TYPE_ADA_SIZE (type))
629 this_ada_size = TYPE_ADA_SIZE (type);
630 else
631 this_ada_size = this_size;
633 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
634 if (DECL_BIT_FIELD (field)
635 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
637 unsigned int align = TYPE_ALIGN (type);
639 /* In the general case, type alignment is required. */
640 if (value_factor_p (pos, align))
642 /* The enclosing record type must be sufficiently aligned.
643 Otherwise, if no alignment was specified for it and it
644 has been laid out already, bump its alignment to the
645 desired one if this is compatible with its size. */
646 if (TYPE_ALIGN (record_type) >= align)
648 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
649 DECL_BIT_FIELD (field) = 0;
651 else if (!had_align
652 && rep_level == 0
653 && value_factor_p (TYPE_SIZE (record_type), align))
655 TYPE_ALIGN (record_type) = align;
656 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
657 DECL_BIT_FIELD (field) = 0;
661 /* In the non-strict alignment case, only byte alignment is. */
662 if (!STRICT_ALIGNMENT
663 && DECL_BIT_FIELD (field)
664 && value_factor_p (pos, BITS_PER_UNIT))
665 DECL_BIT_FIELD (field) = 0;
668 /* If we still have DECL_BIT_FIELD set at this point, we know the field
669 is technically not addressable. Except that it can actually be
670 addressed if the field is BLKmode and happens to be properly
671 aligned. */
672 DECL_NONADDRESSABLE_P (field)
673 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
675 /* A type must be as aligned as its most aligned field that is not
676 a bit-field. But this is already enforced by layout_type. */
677 if (rep_level > 0 && !DECL_BIT_FIELD (field))
678 TYPE_ALIGN (record_type)
679 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
681 switch (code)
683 case UNION_TYPE:
684 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
685 size = size_binop (MAX_EXPR, size, this_size);
686 break;
688 case QUAL_UNION_TYPE:
689 ada_size
690 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
691 this_ada_size, ada_size);
692 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
693 this_size, size);
694 break;
696 case RECORD_TYPE:
697 /* Since we know here that all fields are sorted in order of
698 increasing bit position, the size of the record is one
699 higher than the ending bit of the last field processed
700 unless we have a rep clause, since in that case we might
701 have a field outside a QUAL_UNION_TYPE that has a higher ending
702 position. So use a MAX in that case. Also, if this field is a
703 QUAL_UNION_TYPE, we need to take into account the previous size in
704 the case of empty variants. */
705 ada_size
706 = merge_sizes (ada_size, pos, this_ada_size,
707 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
708 size
709 = merge_sizes (size, pos, this_size,
710 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
711 break;
713 default:
714 gcc_unreachable ();
718 if (code == QUAL_UNION_TYPE)
719 nreverse (fieldlist);
721 /* If the type is discriminated, it can be used to access all its
722 constrained subtypes, so force structural equality checks. */
723 if (CONTAINS_PLACEHOLDER_P (size))
724 SET_TYPE_STRUCTURAL_EQUALITY (record_type);
726 if (rep_level < 2)
728 /* If this is a padding record, we never want to make the size smaller
729 than what was specified in it, if any. */
730 if (TREE_CODE (record_type) == RECORD_TYPE
731 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
732 size = TYPE_SIZE (record_type);
734 /* Now set any of the values we've just computed that apply. */
735 if (!TYPE_IS_FAT_POINTER_P (record_type)
736 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
737 SET_TYPE_ADA_SIZE (record_type, ada_size);
739 if (rep_level > 0)
741 tree size_unit = had_size_unit
742 ? TYPE_SIZE_UNIT (record_type)
743 : convert (sizetype,
744 size_binop (CEIL_DIV_EXPR, size,
745 bitsize_unit_node));
746 unsigned int align = TYPE_ALIGN (record_type);
748 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
749 TYPE_SIZE_UNIT (record_type)
750 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
752 compute_record_mode (record_type);
756 if (!do_not_finalize)
757 rest_of_record_type_compilation (record_type);
760 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
761 the debug information associated with it. It need not be invoked
762 directly in most cases since finish_record_type takes care of doing
763 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
765 void
766 rest_of_record_type_compilation (tree record_type)
768 tree fieldlist = TYPE_FIELDS (record_type);
769 tree field;
770 enum tree_code code = TREE_CODE (record_type);
771 bool var_size = false;
773 for (field = fieldlist; field; field = TREE_CHAIN (field))
775 /* We need to make an XVE/XVU record if any field has variable size,
776 whether or not the record does. For example, if we have a union,
777 it may be that all fields, rounded up to the alignment, have the
778 same size, in which case we'll use that size. But the debug
779 output routines (except Dwarf2) won't be able to output the fields,
780 so we need to make the special record. */
781 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
782 /* If a field has a non-constant qualifier, the record will have
783 variable size too. */
784 || (code == QUAL_UNION_TYPE
785 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
787 var_size = true;
788 break;
792 /* If this record is of variable size, rename it so that the
793 debugger knows it is and make a new, parallel, record
794 that tells the debugger how the record is laid out. See
795 exp_dbug.ads. But don't do this for records that are padding
796 since they confuse GDB. */
797 if (var_size
798 && !(TREE_CODE (record_type) == RECORD_TYPE
799 && TYPE_IS_PADDING_P (record_type)))
801 tree new_record_type
802 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
803 ? UNION_TYPE : TREE_CODE (record_type));
804 tree orig_name = TYPE_NAME (record_type), new_name;
805 tree last_pos = bitsize_zero_node;
806 tree old_field, prev_old_field = NULL_TREE;
808 if (TREE_CODE (orig_name) == TYPE_DECL)
809 orig_name = DECL_NAME (orig_name);
811 new_name
812 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
813 ? "XVU" : "XVE");
814 TYPE_NAME (new_record_type) = new_name;
815 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
816 TYPE_STUB_DECL (new_record_type)
817 = create_type_stub_decl (new_name, new_record_type);
818 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
819 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
820 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
821 TYPE_SIZE_UNIT (new_record_type)
822 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
824 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
826 /* Now scan all the fields, replacing each field with a new
827 field corresponding to the new encoding. */
828 for (old_field = TYPE_FIELDS (record_type); old_field;
829 old_field = TREE_CHAIN (old_field))
831 tree field_type = TREE_TYPE (old_field);
832 tree field_name = DECL_NAME (old_field);
833 tree new_field;
834 tree curpos = bit_position (old_field);
835 bool var = false;
836 unsigned int align = 0;
837 tree pos;
839 /* See how the position was modified from the last position.
841 There are two basic cases we support: a value was added
842 to the last position or the last position was rounded to
843 a boundary and they something was added. Check for the
844 first case first. If not, see if there is any evidence
845 of rounding. If so, round the last position and try
846 again.
848 If this is a union, the position can be taken as zero. */
850 /* Some computations depend on the shape of the position expression,
851 so strip conversions to make sure it's exposed. */
852 curpos = remove_conversions (curpos, true);
854 if (TREE_CODE (new_record_type) == UNION_TYPE)
855 pos = bitsize_zero_node, align = 0;
856 else
857 pos = compute_related_constant (curpos, last_pos);
859 if (!pos && TREE_CODE (curpos) == MULT_EXPR
860 && host_integerp (TREE_OPERAND (curpos, 1), 1))
862 tree offset = TREE_OPERAND (curpos, 0);
863 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
865 /* An offset which is a bitwise AND with a negative power of 2
866 means an alignment corresponding to this power of 2. */
867 offset = remove_conversions (offset, true);
868 if (TREE_CODE (offset) == BIT_AND_EXPR
869 && host_integerp (TREE_OPERAND (offset, 1), 0)
870 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
872 unsigned int pow
873 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
874 if (exact_log2 (pow) > 0)
875 align *= pow;
878 pos = compute_related_constant (curpos,
879 round_up (last_pos, align));
881 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
882 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
883 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
884 && host_integerp (TREE_OPERAND
885 (TREE_OPERAND (curpos, 0), 1),
888 align
889 = tree_low_cst
890 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
891 pos = compute_related_constant (curpos,
892 round_up (last_pos, align));
894 else if (potential_alignment_gap (prev_old_field, old_field,
895 pos))
897 align = TYPE_ALIGN (field_type);
898 pos = compute_related_constant (curpos,
899 round_up (last_pos, align));
902 /* If we can't compute a position, set it to zero.
904 ??? We really should abort here, but it's too much work
905 to get this correct for all cases. */
907 if (!pos)
908 pos = bitsize_zero_node;
910 /* See if this type is variable-sized and make a pointer type
911 and indicate the indirection if so. Beware that the debug
912 back-end may adjust the position computed above according
913 to the alignment of the field type, i.e. the pointer type
914 in this case, if we don't preventively counter that. */
915 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
917 field_type = build_pointer_type (field_type);
918 if (align != 0 && TYPE_ALIGN (field_type) > align)
920 field_type = copy_node (field_type);
921 TYPE_ALIGN (field_type) = align;
923 var = true;
926 /* Make a new field name, if necessary. */
927 if (var || align != 0)
929 char suffix[16];
931 if (align != 0)
932 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
933 align / BITS_PER_UNIT);
934 else
935 strcpy (suffix, "XVL");
937 field_name = concat_name (field_name, suffix);
940 new_field = create_field_decl (field_name, field_type,
941 new_record_type, 0,
942 DECL_SIZE (old_field), pos, 0);
943 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
944 TYPE_FIELDS (new_record_type) = new_field;
946 /* If old_field is a QUAL_UNION_TYPE, take its size as being
947 zero. The only time it's not the last field of the record
948 is when there are other components at fixed positions after
949 it (meaning there was a rep clause for every field) and we
950 want to be able to encode them. */
951 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
952 (TREE_CODE (TREE_TYPE (old_field))
953 == QUAL_UNION_TYPE)
954 ? bitsize_zero_node
955 : DECL_SIZE (old_field));
956 prev_old_field = old_field;
959 TYPE_FIELDS (new_record_type)
960 = nreverse (TYPE_FIELDS (new_record_type));
962 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
965 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
968 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
970 void
971 add_parallel_type (tree decl, tree parallel_type)
973 tree d = decl;
975 while (DECL_PARALLEL_TYPE (d))
976 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
978 SET_DECL_PARALLEL_TYPE (d, parallel_type);
981 /* Return the parallel type associated to a type, if any. */
983 tree
984 get_parallel_type (tree type)
986 if (TYPE_STUB_DECL (type))
987 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
988 else
989 return NULL_TREE;
992 /* Utility function of above to merge LAST_SIZE, the previous size of a record
993 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
994 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
995 replace a value of zero with the old size. If HAS_REP is true, we take the
996 MAX of the end position of this field with LAST_SIZE. In all other cases,
997 we use FIRST_BIT plus SIZE. Return an expression for the size. */
999 static tree
1000 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1001 bool has_rep)
1003 tree type = TREE_TYPE (last_size);
1004 tree new;
1006 if (!special || TREE_CODE (size) != COND_EXPR)
1008 new = size_binop (PLUS_EXPR, first_bit, size);
1009 if (has_rep)
1010 new = size_binop (MAX_EXPR, last_size, new);
1013 else
1014 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1015 integer_zerop (TREE_OPERAND (size, 1))
1016 ? last_size : merge_sizes (last_size, first_bit,
1017 TREE_OPERAND (size, 1),
1018 1, has_rep),
1019 integer_zerop (TREE_OPERAND (size, 2))
1020 ? last_size : merge_sizes (last_size, first_bit,
1021 TREE_OPERAND (size, 2),
1022 1, has_rep));
1024 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1025 when fed through substitute_in_expr) into thinking that a constant
1026 size is not constant. */
1027 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1028 new = TREE_OPERAND (new, 0);
1030 return new;
1033 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1034 related by the addition of a constant. Return that constant if so. */
1036 static tree
1037 compute_related_constant (tree op0, tree op1)
1039 tree op0_var, op1_var;
1040 tree op0_con = split_plus (op0, &op0_var);
1041 tree op1_con = split_plus (op1, &op1_var);
1042 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1044 if (operand_equal_p (op0_var, op1_var, 0))
1045 return result;
1046 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1047 return result;
1048 else
1049 return 0;
1052 /* Utility function of above to split a tree OP which may be a sum, into a
1053 constant part, which is returned, and a variable part, which is stored
1054 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1055 bitsizetype. */
1057 static tree
1058 split_plus (tree in, tree *pvar)
1060 /* Strip NOPS in order to ease the tree traversal and maximize the
1061 potential for constant or plus/minus discovery. We need to be careful
1062 to always return and set *pvar to bitsizetype trees, but it's worth
1063 the effort. */
1064 STRIP_NOPS (in);
1066 *pvar = convert (bitsizetype, in);
1068 if (TREE_CODE (in) == INTEGER_CST)
1070 *pvar = bitsize_zero_node;
1071 return convert (bitsizetype, in);
1073 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1075 tree lhs_var, rhs_var;
1076 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1077 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1079 if (lhs_var == TREE_OPERAND (in, 0)
1080 && rhs_var == TREE_OPERAND (in, 1))
1081 return bitsize_zero_node;
1083 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1084 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1086 else
1087 return bitsize_zero_node;
1090 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1091 subprogram. If it is void_type_node, then we are dealing with a procedure,
1092 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1093 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1094 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1095 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1096 object. RETURNS_BY_REF is true if the function returns by reference.
1097 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1098 first parameter) the address of the place to copy its result. */
1100 tree
1101 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1102 bool returns_unconstrained, bool returns_by_ref,
1103 bool returns_by_target_ptr)
1105 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1106 the subprogram formal parameters. This list is generated by traversing the
1107 input list of PARM_DECL nodes. */
1108 tree param_type_list = NULL;
1109 tree param_decl;
1110 tree type;
1112 for (param_decl = param_decl_list; param_decl;
1113 param_decl = TREE_CHAIN (param_decl))
1114 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1115 param_type_list);
1117 /* The list of the function parameter types has to be terminated by the void
1118 type to signal to the back-end that we are not dealing with a variable
1119 parameter subprogram, but that the subprogram has a fixed number of
1120 parameters. */
1121 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1123 /* The list of argument types has been created in reverse
1124 so nreverse it. */
1125 param_type_list = nreverse (param_type_list);
1127 type = build_function_type (return_type, param_type_list);
1129 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1130 or the new type should, make a copy of TYPE. Likewise for
1131 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1132 if (TYPE_CI_CO_LIST (type) || cico_list
1133 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1134 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1135 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1136 type = copy_type (type);
1138 TYPE_CI_CO_LIST (type) = cico_list;
1139 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1140 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1141 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1142 return type;
1145 /* Return a copy of TYPE but safe to modify in any way. */
1147 tree
1148 copy_type (tree type)
1150 tree new = copy_node (type);
1152 /* copy_node clears this field instead of copying it, because it is
1153 aliased with TREE_CHAIN. */
1154 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1156 TYPE_POINTER_TO (new) = 0;
1157 TYPE_REFERENCE_TO (new) = 0;
1158 TYPE_MAIN_VARIANT (new) = new;
1159 TYPE_NEXT_VARIANT (new) = 0;
1161 return new;
1164 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1165 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1166 the decl. */
1168 tree
1169 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1171 /* First build a type for the desired range. */
1172 tree type = build_index_2_type (min, max);
1174 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1175 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1176 is set, but not to INDEX, make a copy of this type with the requested
1177 index type. Note that we have no way of sharing these types, but that's
1178 only a small hole. */
1179 if (TYPE_INDEX_TYPE (type) == index)
1180 return type;
1181 else if (TYPE_INDEX_TYPE (type))
1182 type = copy_type (type);
1184 SET_TYPE_INDEX_TYPE (type, index);
1185 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1186 return type;
1189 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1190 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1191 its data type. */
1193 tree
1194 create_type_stub_decl (tree type_name, tree type)
1196 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1197 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1198 emitted in DWARF. */
1199 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1200 DECL_ARTIFICIAL (type_decl) = 1;
1201 return type_decl;
1204 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1205 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1206 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1207 true if we need to write debug information about this type. GNAT_NODE
1208 is used for the position of the decl. */
1210 tree
1211 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1212 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1214 enum tree_code code = TREE_CODE (type);
1215 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1216 tree type_decl;
1218 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1219 gcc_assert (!TYPE_IS_DUMMY_P (type));
1221 /* If the type hasn't been named yet, we're naming it; preserve an existing
1222 TYPE_STUB_DECL that has been attached to it for some purpose. */
1223 if (!named && TYPE_STUB_DECL (type))
1225 type_decl = TYPE_STUB_DECL (type);
1226 DECL_NAME (type_decl) = type_name;
1228 else
1229 type_decl = build_decl (TYPE_DECL, type_name, type);
1231 DECL_ARTIFICIAL (type_decl) = artificial_p;
1232 gnat_pushdecl (type_decl, gnat_node);
1233 process_attributes (type_decl, attr_list);
1235 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1236 This causes the name to be also viewed as a "tag" by the debug
1237 back-end, with the advantage that no DW_TAG_typedef is emitted
1238 for artificial "tagged" types in DWARF. */
1239 if (!named)
1240 TYPE_STUB_DECL (type) = type_decl;
1242 /* Pass the type declaration to the debug back-end unless this is an
1243 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1244 type for which debugging information was not requested, or else an
1245 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1246 handled separately. And do not pass dummy types either. */
1247 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1248 DECL_IGNORED_P (type_decl) = 1;
1249 else if (code != ENUMERAL_TYPE
1250 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1251 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1252 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1253 && !(code == RECORD_TYPE
1254 && TYPE_IS_DUMMY_P
1255 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1256 rest_of_type_decl_compilation (type_decl);
1258 return type_decl;
1261 /* Return a VAR_DECL or CONST_DECL node.
1263 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1264 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1265 the GCC tree for an optional initial expression; NULL_TREE if none.
1267 CONST_FLAG is true if this variable is constant, in which case we might
1268 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1270 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1271 definition to be made visible outside of the current compilation unit, for
1272 instance variable definitions in a package specification.
1274 EXTERN_FLAG is true when processing an external variable declaration (as
1275 opposed to a definition: no storage is to be allocated for the variable).
1277 STATIC_FLAG is only relevant when not at top level. In that case
1278 it indicates whether to always allocate storage to the variable.
1280 GNAT_NODE is used for the position of the decl. */
1282 tree
1283 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1284 bool const_flag, bool public_flag, bool extern_flag,
1285 bool static_flag, bool const_decl_allowed_p,
1286 struct attrib *attr_list, Node_Id gnat_node)
1288 bool init_const
1289 = (var_init != 0
1290 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1291 && (global_bindings_p () || static_flag
1292 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1293 : TREE_CONSTANT (var_init)));
1295 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1296 case the initializer may be used in-lieu of the DECL node (as done in
1297 Identifier_to_gnu). This is useful to prevent the need of elaboration
1298 code when an identifier for which such a decl is made is in turn used as
1299 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1300 but extra constraints apply to this choice (see below) and are not
1301 relevant to the distinction we wish to make. */
1302 bool constant_p = const_flag && init_const;
1304 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1305 and may be used for scalars in general but not for aggregates. */
1306 tree var_decl
1307 = build_decl ((constant_p && const_decl_allowed_p
1308 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1309 var_name, type);
1311 /* If this is external, throw away any initializations (they will be done
1312 elsewhere) unless this is a constant for which we would like to remain
1313 able to get the initializer. If we are defining a global here, leave a
1314 constant initialization and save any variable elaborations for the
1315 elaboration routine. If we are just annotating types, throw away the
1316 initialization if it isn't a constant. */
1317 if ((extern_flag && !constant_p)
1318 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1319 var_init = NULL_TREE;
1321 /* At the global level, an initializer requiring code to be generated
1322 produces elaboration statements. Check that such statements are allowed,
1323 that is, not violating a No_Elaboration_Code restriction. */
1324 if (global_bindings_p () && var_init != 0 && ! init_const)
1325 Check_Elaboration_Code_Allowed (gnat_node);
1327 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1328 try to fiddle with DECL_COMMON. However, on platforms that don't
1329 support global BSS sections, uninitialized global variables would
1330 go in DATA instead, thus increasing the size of the executable. */
1331 if (!flag_no_common
1332 && TREE_CODE (var_decl) == VAR_DECL
1333 && !have_global_bss_p ())
1334 DECL_COMMON (var_decl) = 1;
1335 DECL_INITIAL (var_decl) = var_init;
1336 TREE_READONLY (var_decl) = const_flag;
1337 DECL_EXTERNAL (var_decl) = extern_flag;
1338 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1339 TREE_CONSTANT (var_decl) = constant_p;
1340 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1341 = TYPE_VOLATILE (type);
1343 /* If it's public and not external, always allocate storage for it.
1344 At the global binding level we need to allocate static storage for the
1345 variable if and only if it's not external. If we are not at the top level
1346 we allocate automatic storage unless requested not to. */
1347 TREE_STATIC (var_decl)
1348 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1350 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1351 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1353 process_attributes (var_decl, attr_list);
1355 /* Add this decl to the current binding level. */
1356 gnat_pushdecl (var_decl, gnat_node);
1358 if (TREE_SIDE_EFFECTS (var_decl))
1359 TREE_ADDRESSABLE (var_decl) = 1;
1361 if (TREE_CODE (var_decl) != CONST_DECL)
1363 if (global_bindings_p ())
1364 rest_of_decl_compilation (var_decl, true, 0);
1366 else
1367 expand_decl (var_decl);
1369 return var_decl;
1372 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1374 static bool
1375 aggregate_type_contains_array_p (tree type)
1377 switch (TREE_CODE (type))
1379 case RECORD_TYPE:
1380 case UNION_TYPE:
1381 case QUAL_UNION_TYPE:
1383 tree field;
1384 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1385 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1386 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1387 return true;
1388 return false;
1391 case ARRAY_TYPE:
1392 return true;
1394 default:
1395 gcc_unreachable ();
1399 /* Return a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1400 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1401 this field is in a record type with a "pragma pack". If SIZE is nonzero
1402 it is the specified size for this field. If POS is nonzero, it is the bit
1403 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1404 the address of this field for aliasing purposes. If it is negative, we
1405 should not make a bitfield, which is used by make_aligning_type. */
1407 tree
1408 create_field_decl (tree field_name, tree field_type, tree record_type,
1409 int packed, tree size, tree pos, int addressable)
1411 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1413 DECL_CONTEXT (field_decl) = record_type;
1414 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1416 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1417 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1418 Likewise for an aggregate without specified position that contains an
1419 array, because in this case slices of variable length of this array
1420 must be handled by GCC and variable-sized objects need to be aligned
1421 to at least a byte boundary. */
1422 if (packed && (TYPE_MODE (field_type) == BLKmode
1423 || (!pos
1424 && AGGREGATE_TYPE_P (field_type)
1425 && aggregate_type_contains_array_p (field_type))))
1426 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1428 /* If a size is specified, use it. Otherwise, if the record type is packed
1429 compute a size to use, which may differ from the object's natural size.
1430 We always set a size in this case to trigger the checks for bitfield
1431 creation below, which is typically required when no position has been
1432 specified. */
1433 if (size)
1434 size = convert (bitsizetype, size);
1435 else if (packed == 1)
1437 size = rm_size (field_type);
1439 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1440 byte. */
1441 if (TREE_CODE (size) == INTEGER_CST
1442 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1443 size = round_up (size, BITS_PER_UNIT);
1446 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1447 specified for two reasons: first if the size differs from the natural
1448 size. Second, if the alignment is insufficient. There are a number of
1449 ways the latter can be true.
1451 We never make a bitfield if the type of the field has a nonconstant size,
1452 because no such entity requiring bitfield operations should reach here.
1454 We do *preventively* make a bitfield when there might be the need for it
1455 but we don't have all the necessary information to decide, as is the case
1456 of a field with no specified position in a packed record.
1458 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1459 in layout_decl or finish_record_type to clear the bit_field indication if
1460 it is in fact not needed. */
1461 if (addressable >= 0
1462 && size
1463 && TREE_CODE (size) == INTEGER_CST
1464 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1465 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1466 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1467 || packed
1468 || (TYPE_ALIGN (record_type) != 0
1469 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1471 DECL_BIT_FIELD (field_decl) = 1;
1472 DECL_SIZE (field_decl) = size;
1473 if (!packed && !pos)
1475 if (TYPE_ALIGN (record_type) != 0
1476 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1477 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1478 else
1479 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1483 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1485 /* Bump the alignment if need be, either for bitfield/packing purposes or
1486 to satisfy the type requirements if no such consideration applies. When
1487 we get the alignment from the type, indicate if this is from an explicit
1488 user request, which prevents stor-layout from lowering it later on. */
1490 unsigned int bit_align
1491 = (DECL_BIT_FIELD (field_decl) ? 1
1492 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1494 if (bit_align > DECL_ALIGN (field_decl))
1495 DECL_ALIGN (field_decl) = bit_align;
1496 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1498 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1499 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1503 if (pos)
1505 /* We need to pass in the alignment the DECL is known to have.
1506 This is the lowest-order bit set in POS, but no more than
1507 the alignment of the record, if one is specified. Note
1508 that an alignment of 0 is taken as infinite. */
1509 unsigned int known_align;
1511 if (host_integerp (pos, 1))
1512 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1513 else
1514 known_align = BITS_PER_UNIT;
1516 if (TYPE_ALIGN (record_type)
1517 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1518 known_align = TYPE_ALIGN (record_type);
1520 layout_decl (field_decl, known_align);
1521 SET_DECL_OFFSET_ALIGN (field_decl,
1522 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1523 : BITS_PER_UNIT);
1524 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1525 &DECL_FIELD_BIT_OFFSET (field_decl),
1526 DECL_OFFSET_ALIGN (field_decl), pos);
1529 /* In addition to what our caller says, claim the field is addressable if we
1530 know that its type is not suitable.
1532 The field may also be "technically" nonaddressable, meaning that even if
1533 we attempt to take the field's address we will actually get the address
1534 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1535 value we have at this point is not accurate enough, so we don't account
1536 for this here and let finish_record_type decide. */
1537 if (!addressable && !type_for_nonaliased_component_p (field_type))
1538 addressable = 1;
1540 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1542 return field_decl;
1545 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1546 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1547 (either an In parameter or an address of a pass-by-ref parameter). */
1549 tree
1550 create_param_decl (tree param_name, tree param_type, bool readonly)
1552 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1554 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1555 can lead to various ABI violations. */
1556 if (targetm.calls.promote_prototypes (NULL_TREE)
1557 && INTEGRAL_TYPE_P (param_type)
1558 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1560 /* We have to be careful about biased types here. Make a subtype
1561 of integer_type_node with the proper biasing. */
1562 if (TREE_CODE (param_type) == INTEGER_TYPE
1563 && TYPE_BIASED_REPRESENTATION_P (param_type))
1565 param_type
1566 = copy_type (build_range_type (integer_type_node,
1567 TYPE_MIN_VALUE (param_type),
1568 TYPE_MAX_VALUE (param_type)));
1570 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1572 else
1573 param_type = integer_type_node;
1576 DECL_ARG_TYPE (param_decl) = param_type;
1577 TREE_READONLY (param_decl) = readonly;
1578 return param_decl;
1581 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1583 void
1584 process_attributes (tree decl, struct attrib *attr_list)
1586 for (; attr_list; attr_list = attr_list->next)
1587 switch (attr_list->type)
1589 case ATTR_MACHINE_ATTRIBUTE:
1590 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1591 NULL_TREE),
1592 ATTR_FLAG_TYPE_IN_PLACE);
1593 break;
1595 case ATTR_LINK_ALIAS:
1596 if (! DECL_EXTERNAL (decl))
1598 TREE_STATIC (decl) = 1;
1599 assemble_alias (decl, attr_list->name);
1601 break;
1603 case ATTR_WEAK_EXTERNAL:
1604 if (SUPPORTS_WEAK)
1605 declare_weak (decl);
1606 else
1607 post_error ("?weak declarations not supported on this target",
1608 attr_list->error_point);
1609 break;
1611 case ATTR_LINK_SECTION:
1612 if (targetm.have_named_sections)
1614 DECL_SECTION_NAME (decl)
1615 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1616 IDENTIFIER_POINTER (attr_list->name));
1617 DECL_COMMON (decl) = 0;
1619 else
1620 post_error ("?section attributes are not supported for this target",
1621 attr_list->error_point);
1622 break;
1624 case ATTR_LINK_CONSTRUCTOR:
1625 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1626 TREE_USED (decl) = 1;
1627 break;
1629 case ATTR_LINK_DESTRUCTOR:
1630 DECL_STATIC_DESTRUCTOR (decl) = 1;
1631 TREE_USED (decl) = 1;
1632 break;
1634 case ATTR_THREAD_LOCAL_STORAGE:
1635 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1636 DECL_COMMON (decl) = 0;
1637 break;
1641 /* Record DECL as a global renaming pointer. */
1643 void
1644 record_global_renaming_pointer (tree decl)
1646 gcc_assert (DECL_RENAMED_OBJECT (decl));
1647 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1650 /* Invalidate the global renaming pointers. */
1652 void
1653 invalidate_global_renaming_pointers (void)
1655 unsigned int i;
1656 tree iter;
1658 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1659 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1661 VEC_free (tree, gc, global_renaming_pointers);
1664 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1665 a power of 2. */
1667 bool
1668 value_factor_p (tree value, HOST_WIDE_INT factor)
1670 if (host_integerp (value, 1))
1671 return tree_low_cst (value, 1) % factor == 0;
1673 if (TREE_CODE (value) == MULT_EXPR)
1674 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1675 || value_factor_p (TREE_OPERAND (value, 1), factor));
1677 return false;
1680 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1681 unless we can prove these 2 fields are laid out in such a way that no gap
1682 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1683 is the distance in bits between the end of PREV_FIELD and the starting
1684 position of CURR_FIELD. It is ignored if null. */
1686 static bool
1687 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1689 /* If this is the first field of the record, there cannot be any gap */
1690 if (!prev_field)
1691 return false;
1693 /* If the previous field is a union type, then return False: The only
1694 time when such a field is not the last field of the record is when
1695 there are other components at fixed positions after it (meaning there
1696 was a rep clause for every field), in which case we don't want the
1697 alignment constraint to override them. */
1698 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1699 return false;
1701 /* If the distance between the end of prev_field and the beginning of
1702 curr_field is constant, then there is a gap if the value of this
1703 constant is not null. */
1704 if (offset && host_integerp (offset, 1))
1705 return !integer_zerop (offset);
1707 /* If the size and position of the previous field are constant,
1708 then check the sum of this size and position. There will be a gap
1709 iff it is not multiple of the current field alignment. */
1710 if (host_integerp (DECL_SIZE (prev_field), 1)
1711 && host_integerp (bit_position (prev_field), 1))
1712 return ((tree_low_cst (bit_position (prev_field), 1)
1713 + tree_low_cst (DECL_SIZE (prev_field), 1))
1714 % DECL_ALIGN (curr_field) != 0);
1716 /* If both the position and size of the previous field are multiples
1717 of the current field alignment, there cannot be any gap. */
1718 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1719 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1720 return false;
1722 /* Fallback, return that there may be a potential gap */
1723 return true;
1726 /* Returns a LABEL_DECL node for LABEL_NAME. */
1728 tree
1729 create_label_decl (tree label_name)
1731 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1733 DECL_CONTEXT (label_decl) = current_function_decl;
1734 DECL_MODE (label_decl) = VOIDmode;
1735 DECL_SOURCE_LOCATION (label_decl) = input_location;
1737 return label_decl;
1740 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1741 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1742 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1743 PARM_DECL nodes chained through the TREE_CHAIN field).
1745 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1746 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1748 tree
1749 create_subprog_decl (tree subprog_name, tree asm_name,
1750 tree subprog_type, tree param_decl_list, bool inline_flag,
1751 bool public_flag, bool extern_flag,
1752 struct attrib *attr_list, Node_Id gnat_node)
1754 tree return_type = TREE_TYPE (subprog_type);
1755 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1757 /* If this is a non-inline function nested inside an inlined external
1758 function, we cannot honor both requests without cloning the nested
1759 function in the current unit since it is private to the other unit.
1760 We could inline the nested function as well but it's probably better
1761 to err on the side of too little inlining. */
1762 if (!inline_flag
1763 && current_function_decl
1764 && DECL_DECLARED_INLINE_P (current_function_decl)
1765 && DECL_EXTERNAL (current_function_decl))
1766 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1768 DECL_EXTERNAL (subprog_decl) = extern_flag;
1769 TREE_PUBLIC (subprog_decl) = public_flag;
1770 TREE_STATIC (subprog_decl) = 1;
1771 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1772 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1773 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1774 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1775 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1776 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1777 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1778 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1780 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1781 target by-reference return mechanism. This is not supported all the
1782 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1783 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1784 the RESULT_DECL instead - see gnat_genericize for more details. */
1785 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1787 tree result_decl = DECL_RESULT (subprog_decl);
1789 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1790 DECL_BY_REFERENCE (result_decl) = 1;
1793 if (asm_name)
1795 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1797 /* The expand_main_function circuitry expects "main_identifier_node" to
1798 designate the DECL_NAME of the 'main' entry point, in turn expected
1799 to be declared as the "main" function literally by default. Ada
1800 program entry points are typically declared with a different name
1801 within the binder generated file, exported as 'main' to satisfy the
1802 system expectations. Redirect main_identifier_node in this case. */
1803 if (asm_name == main_identifier_node)
1804 main_identifier_node = DECL_NAME (subprog_decl);
1807 process_attributes (subprog_decl, attr_list);
1809 /* Add this decl to the current binding level. */
1810 gnat_pushdecl (subprog_decl, gnat_node);
1812 /* Output the assembler code and/or RTL for the declaration. */
1813 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1815 return subprog_decl;
1818 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1819 body. This routine needs to be invoked before processing the declarations
1820 appearing in the subprogram. */
1822 void
1823 begin_subprog_body (tree subprog_decl)
1825 tree param_decl;
1827 current_function_decl = subprog_decl;
1828 announce_function (subprog_decl);
1830 /* Enter a new binding level and show that all the parameters belong to
1831 this function. */
1832 gnat_pushlevel ();
1833 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1834 param_decl = TREE_CHAIN (param_decl))
1835 DECL_CONTEXT (param_decl) = subprog_decl;
1837 make_decl_rtl (subprog_decl);
1839 /* We handle pending sizes via the elaboration of types, so we don't need to
1840 save them. This causes them to be marked as part of the outer function
1841 and then discarded. */
1842 get_pending_sizes ();
1846 /* Helper for the genericization callback. Return a dereference of VAL
1847 if it is of a reference type. */
1849 static tree
1850 convert_from_reference (tree val)
1852 tree value_type, ref;
1854 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1855 return val;
1857 value_type = TREE_TYPE (TREE_TYPE (val));
1858 ref = build1 (INDIRECT_REF, value_type, val);
1860 /* See if what we reference is CONST or VOLATILE, which requires
1861 looking into array types to get to the component type. */
1863 while (TREE_CODE (value_type) == ARRAY_TYPE)
1864 value_type = TREE_TYPE (value_type);
1866 TREE_READONLY (ref)
1867 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1868 TREE_THIS_VOLATILE (ref)
1869 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1871 TREE_SIDE_EFFECTS (ref)
1872 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1874 return ref;
1877 /* Helper for the genericization callback. Returns true if T denotes
1878 a RESULT_DECL with DECL_BY_REFERENCE set. */
1880 static inline bool
1881 is_byref_result (tree t)
1883 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
1887 /* Tree walking callback for gnat_genericize. Currently ...
1889 o Adjust references to the function's DECL_RESULT if it is marked
1890 DECL_BY_REFERENCE and so has had its type turned into a reference
1891 type at the end of the function compilation. */
1893 static tree
1894 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1896 /* This implementation is modeled after what the C++ front-end is
1897 doing, basis of the downstream passes behavior. */
1899 tree stmt = *stmt_p;
1900 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
1902 /* If we have a direct mention of the result decl, dereference. */
1903 if (is_byref_result (stmt))
1905 *stmt_p = convert_from_reference (stmt);
1906 *walk_subtrees = 0;
1907 return NULL;
1910 /* Otherwise, no need to walk the same tree twice. */
1911 if (pointer_set_contains (p_set, stmt))
1913 *walk_subtrees = 0;
1914 return NULL_TREE;
1917 /* If we are taking the address of what now is a reference, just get the
1918 reference value. */
1919 if (TREE_CODE (stmt) == ADDR_EXPR
1920 && is_byref_result (TREE_OPERAND (stmt, 0)))
1922 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1923 *walk_subtrees = 0;
1926 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
1927 else if (TREE_CODE (stmt) == RETURN_EXPR
1928 && TREE_OPERAND (stmt, 0)
1929 && is_byref_result (TREE_OPERAND (stmt, 0)))
1930 *walk_subtrees = 0;
1932 /* Don't look inside trees that cannot embed references of interest. */
1933 else if (IS_TYPE_OR_DECL_P (stmt))
1934 *walk_subtrees = 0;
1936 pointer_set_insert (p_set, *stmt_p);
1938 return NULL;
1941 /* Perform lowering of Ada trees to GENERIC. In particular:
1943 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
1944 and adjust all the references to this decl accordingly. */
1946 static void
1947 gnat_genericize (tree fndecl)
1949 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
1950 was handled by simply setting TREE_ADDRESSABLE on the result type.
1951 Everything required to actually pass by invisible ref using the target
1952 mechanism (e.g. extra parameter) was handled at RTL expansion time.
1954 This doesn't work with GCC 4 any more for several reasons. First, the
1955 gimplification process might need the creation of temporaries of this
1956 type, and the gimplifier ICEs on such attempts. Second, the middle-end
1957 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
1958 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
1959 be explicitly accounted for by the front-end in the function body.
1961 We achieve the complete transformation in two steps:
1963 1/ create_subprog_decl performs early attribute tweaks: it clears
1964 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
1965 the result decl. The former ensures that the bit isn't set in the GCC
1966 tree saved for the function, so prevents ICEs on temporary creation.
1967 The latter we use here to trigger the rest of the processing.
1969 2/ This function performs the type transformation on the result decl
1970 and adjusts all the references to this decl from the function body
1971 accordingly.
1973 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
1974 strategy, which escapes the gimplifier temporary creation issues by
1975 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
1976 on simple specific support code in aggregate_value_p to look at the
1977 target function result decl explicitly. */
1979 struct pointer_set_t *p_set;
1980 tree decl_result = DECL_RESULT (fndecl);
1982 if (!DECL_BY_REFERENCE (decl_result))
1983 return;
1985 /* Make the DECL_RESULT explicitly by-reference and adjust all the
1986 occurrences in the function body using the common tree-walking facility.
1987 We want to see every occurrence of the result decl to adjust the
1988 referencing tree, so need to use our own pointer set to control which
1989 trees should be visited again or not. */
1991 p_set = pointer_set_create ();
1993 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
1994 TREE_ADDRESSABLE (decl_result) = 0;
1995 relayout_decl (decl_result);
1997 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
1999 pointer_set_destroy (p_set);
2002 /* Finish the definition of the current subprogram BODY and compile it all the
2003 way to assembler language output. ELAB_P tells if this is called for an
2004 elaboration routine, to be entirely discarded if empty. */
2006 void
2007 end_subprog_body (tree body, bool elab_p)
2009 tree fndecl = current_function_decl;
2011 /* Mark the BLOCK for this level as being for this function and pop the
2012 level. Since the vars in it are the parameters, clear them. */
2013 BLOCK_VARS (current_binding_level->block) = 0;
2014 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2015 DECL_INITIAL (fndecl) = current_binding_level->block;
2016 gnat_poplevel ();
2018 /* We handle pending sizes via the elaboration of types, so we don't
2019 need to save them. */
2020 get_pending_sizes ();
2022 /* Mark the RESULT_DECL as being in this subprogram. */
2023 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2025 DECL_SAVED_TREE (fndecl) = body;
2027 current_function_decl = DECL_CONTEXT (fndecl);
2028 set_cfun (NULL);
2030 /* We cannot track the location of errors past this point. */
2031 error_gnat_node = Empty;
2033 /* If we're only annotating types, don't actually compile this function. */
2034 if (type_annotate_only)
2035 return;
2037 /* Perform the required pre-gimplification transformations on the tree. */
2038 gnat_genericize (fndecl);
2040 /* We do different things for nested and non-nested functions.
2041 ??? This should be in cgraph. */
2042 if (!DECL_CONTEXT (fndecl))
2044 gnat_gimplify_function (fndecl);
2046 /* If this is an empty elaboration proc, just discard the node.
2047 Otherwise, compile further. */
2048 if (elab_p && empty_body_p (gimple_body (fndecl)))
2049 cgraph_remove_node (cgraph_node (fndecl));
2050 else
2051 cgraph_finalize_function (fndecl, false);
2053 else
2054 /* Register this function with cgraph just far enough to get it
2055 added to our parent's nested function list. */
2056 (void) cgraph_node (fndecl);
2059 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2061 static void
2062 gnat_gimplify_function (tree fndecl)
2064 struct cgraph_node *cgn;
2066 dump_function (TDI_original, fndecl);
2067 gimplify_function_tree (fndecl);
2068 dump_function (TDI_generic, fndecl);
2070 /* Convert all nested functions to GIMPLE now. We do things in this order
2071 so that items like VLA sizes are expanded properly in the context of the
2072 correct function. */
2073 cgn = cgraph_node (fndecl);
2074 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2075 gnat_gimplify_function (cgn->decl);
2078 tree
2079 gnat_builtin_function (tree decl)
2081 gnat_pushdecl (decl, Empty);
2082 return decl;
2085 /* Return an integer type with the number of bits of precision given by
2086 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2087 it is a signed type. */
2089 tree
2090 gnat_type_for_size (unsigned precision, int unsignedp)
2092 tree t;
2093 char type_name[20];
2095 if (precision <= 2 * MAX_BITS_PER_WORD
2096 && signed_and_unsigned_types[precision][unsignedp])
2097 return signed_and_unsigned_types[precision][unsignedp];
2099 if (unsignedp)
2100 t = make_unsigned_type (precision);
2101 else
2102 t = make_signed_type (precision);
2104 if (precision <= 2 * MAX_BITS_PER_WORD)
2105 signed_and_unsigned_types[precision][unsignedp] = t;
2107 if (!TYPE_NAME (t))
2109 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2110 TYPE_NAME (t) = get_identifier (type_name);
2113 return t;
2116 /* Likewise for floating-point types. */
2118 static tree
2119 float_type_for_precision (int precision, enum machine_mode mode)
2121 tree t;
2122 char type_name[20];
2124 if (float_types[(int) mode])
2125 return float_types[(int) mode];
2127 float_types[(int) mode] = t = make_node (REAL_TYPE);
2128 TYPE_PRECISION (t) = precision;
2129 layout_type (t);
2131 gcc_assert (TYPE_MODE (t) == mode);
2132 if (!TYPE_NAME (t))
2134 sprintf (type_name, "FLOAT_%d", precision);
2135 TYPE_NAME (t) = get_identifier (type_name);
2138 return t;
2141 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2142 an unsigned type; otherwise a signed type is returned. */
2144 tree
2145 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2147 if (mode == BLKmode)
2148 return NULL_TREE;
2149 else if (mode == VOIDmode)
2150 return void_type_node;
2151 else if (COMPLEX_MODE_P (mode))
2152 return NULL_TREE;
2153 else if (SCALAR_FLOAT_MODE_P (mode))
2154 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2155 else if (SCALAR_INT_MODE_P (mode))
2156 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2157 else
2158 return NULL_TREE;
2161 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2163 tree
2164 gnat_unsigned_type (tree type_node)
2166 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2168 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2170 type = copy_node (type);
2171 TREE_TYPE (type) = type_node;
2173 else if (TREE_TYPE (type_node)
2174 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2175 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2177 type = copy_node (type);
2178 TREE_TYPE (type) = TREE_TYPE (type_node);
2181 return type;
2184 /* Return the signed version of a TYPE_NODE, a scalar type. */
2186 tree
2187 gnat_signed_type (tree type_node)
2189 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2191 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2193 type = copy_node (type);
2194 TREE_TYPE (type) = type_node;
2196 else if (TREE_TYPE (type_node)
2197 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2198 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2200 type = copy_node (type);
2201 TREE_TYPE (type) = TREE_TYPE (type_node);
2204 return type;
2207 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2208 transparently converted to each other. */
2211 gnat_types_compatible_p (tree t1, tree t2)
2213 enum tree_code code;
2215 /* This is the default criterion. */
2216 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2217 return 1;
2219 /* We only check structural equivalence here. */
2220 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2221 return 0;
2223 /* Array types are also compatible if they are constrained and have
2224 the same component type and the same domain. */
2225 if (code == ARRAY_TYPE
2226 && TREE_TYPE (t1) == TREE_TYPE (t2)
2227 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2228 || (TYPE_DOMAIN (t1)
2229 && TYPE_DOMAIN (t2)
2230 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2231 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2232 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2233 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
2234 return 1;
2236 /* Padding record types are also compatible if they pad the same
2237 type and have the same constant size. */
2238 if (code == RECORD_TYPE
2239 && TYPE_IS_PADDING_P (t1) && TYPE_IS_PADDING_P (t2)
2240 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2241 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2242 return 1;
2244 return 0;
2247 /* EXP is an expression for the size of an object. If this size contains
2248 discriminant references, replace them with the maximum (if MAX_P) or
2249 minimum (if !MAX_P) possible value of the discriminant. */
2251 tree
2252 max_size (tree exp, bool max_p)
2254 enum tree_code code = TREE_CODE (exp);
2255 tree type = TREE_TYPE (exp);
2257 switch (TREE_CODE_CLASS (code))
2259 case tcc_declaration:
2260 case tcc_constant:
2261 return exp;
2263 case tcc_vl_exp:
2264 if (code == CALL_EXPR)
2266 tree *argarray;
2267 int i, n = call_expr_nargs (exp);
2268 gcc_assert (n > 0);
2270 argarray = (tree *) alloca (n * sizeof (tree));
2271 for (i = 0; i < n; i++)
2272 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2273 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2275 break;
2277 case tcc_reference:
2278 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2279 modify. Otherwise, we treat it like a variable. */
2280 if (!CONTAINS_PLACEHOLDER_P (exp))
2281 return exp;
2283 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2284 return
2285 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2287 case tcc_comparison:
2288 return max_p ? size_one_node : size_zero_node;
2290 case tcc_unary:
2291 case tcc_binary:
2292 case tcc_expression:
2293 switch (TREE_CODE_LENGTH (code))
2295 case 1:
2296 if (code == NON_LVALUE_EXPR)
2297 return max_size (TREE_OPERAND (exp, 0), max_p);
2298 else
2299 return
2300 fold_build1 (code, type,
2301 max_size (TREE_OPERAND (exp, 0),
2302 code == NEGATE_EXPR ? !max_p : max_p));
2304 case 2:
2305 if (code == COMPOUND_EXPR)
2306 return max_size (TREE_OPERAND (exp, 1), max_p);
2308 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2309 may provide a tighter bound on max_size. */
2310 if (code == MINUS_EXPR
2311 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2313 tree lhs = fold_build2 (MINUS_EXPR, type,
2314 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2315 TREE_OPERAND (exp, 1));
2316 tree rhs = fold_build2 (MINUS_EXPR, type,
2317 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2318 TREE_OPERAND (exp, 1));
2319 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2320 max_size (lhs, max_p),
2321 max_size (rhs, max_p));
2325 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2326 tree rhs = max_size (TREE_OPERAND (exp, 1),
2327 code == MINUS_EXPR ? !max_p : max_p);
2329 /* Special-case wanting the maximum value of a MIN_EXPR.
2330 In that case, if one side overflows, return the other.
2331 sizetype is signed, but we know sizes are non-negative.
2332 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2333 overflowing or the maximum possible value and the RHS
2334 a variable. */
2335 if (max_p
2336 && code == MIN_EXPR
2337 && TREE_CODE (rhs) == INTEGER_CST
2338 && TREE_OVERFLOW (rhs))
2339 return lhs;
2340 else if (max_p
2341 && code == MIN_EXPR
2342 && TREE_CODE (lhs) == INTEGER_CST
2343 && TREE_OVERFLOW (lhs))
2344 return rhs;
2345 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2346 && ((TREE_CODE (lhs) == INTEGER_CST
2347 && TREE_OVERFLOW (lhs))
2348 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2349 && !TREE_CONSTANT (rhs))
2350 return lhs;
2351 else
2352 return fold_build2 (code, type, lhs, rhs);
2355 case 3:
2356 if (code == SAVE_EXPR)
2357 return exp;
2358 else if (code == COND_EXPR)
2359 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2360 max_size (TREE_OPERAND (exp, 1), max_p),
2361 max_size (TREE_OPERAND (exp, 2), max_p));
2364 /* Other tree classes cannot happen. */
2365 default:
2366 break;
2369 gcc_unreachable ();
2372 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2373 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2374 Return a constructor for the template. */
2376 tree
2377 build_template (tree template_type, tree array_type, tree expr)
2379 tree template_elts = NULL_TREE;
2380 tree bound_list = NULL_TREE;
2381 tree field;
2383 while (TREE_CODE (array_type) == RECORD_TYPE
2384 && (TYPE_IS_PADDING_P (array_type)
2385 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2386 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2388 if (TREE_CODE (array_type) == ARRAY_TYPE
2389 || (TREE_CODE (array_type) == INTEGER_TYPE
2390 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2391 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2393 /* First make the list for a CONSTRUCTOR for the template. Go down the
2394 field list of the template instead of the type chain because this
2395 array might be an Ada array of arrays and we can't tell where the
2396 nested arrays stop being the underlying object. */
2398 for (field = TYPE_FIELDS (template_type); field;
2399 (bound_list
2400 ? (bound_list = TREE_CHAIN (bound_list))
2401 : (array_type = TREE_TYPE (array_type))),
2402 field = TREE_CHAIN (TREE_CHAIN (field)))
2404 tree bounds, min, max;
2406 /* If we have a bound list, get the bounds from there. Likewise
2407 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2408 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2409 This will give us a maximum range. */
2410 if (bound_list)
2411 bounds = TREE_VALUE (bound_list);
2412 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2413 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2414 else if (expr && TREE_CODE (expr) == PARM_DECL
2415 && DECL_BY_COMPONENT_PTR_P (expr))
2416 bounds = TREE_TYPE (field);
2417 else
2418 gcc_unreachable ();
2420 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2421 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2423 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2424 substitute it from OBJECT. */
2425 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2426 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2428 template_elts = tree_cons (TREE_CHAIN (field), max,
2429 tree_cons (field, min, template_elts));
2432 return gnat_build_constructor (template_type, nreverse (template_elts));
2435 /* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
2436 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2437 in the type contains in its DECL_INITIAL the expression to use when
2438 a constructor is made for the type. GNAT_ENTITY is an entity used
2439 to print out an error message if the mechanism cannot be applied to
2440 an object of that type and also for the name. */
2442 tree
2443 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2445 tree record_type = make_node (RECORD_TYPE);
2446 tree pointer32_type;
2447 tree field_list = 0;
2448 int class;
2449 int dtype = 0;
2450 tree inner_type;
2451 int ndim;
2452 int i;
2453 tree *idx_arr;
2454 tree tem;
2456 /* If TYPE is an unconstrained array, use the underlying array type. */
2457 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2458 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2460 /* If this is an array, compute the number of dimensions in the array,
2461 get the index types, and point to the inner type. */
2462 if (TREE_CODE (type) != ARRAY_TYPE)
2463 ndim = 0;
2464 else
2465 for (ndim = 1, inner_type = type;
2466 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2467 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2468 ndim++, inner_type = TREE_TYPE (inner_type))
2471 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2473 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2474 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2475 for (i = ndim - 1, inner_type = type;
2476 i >= 0;
2477 i--, inner_type = TREE_TYPE (inner_type))
2478 idx_arr[i] = TYPE_DOMAIN (inner_type);
2479 else
2480 for (i = 0, inner_type = type;
2481 i < ndim;
2482 i++, inner_type = TREE_TYPE (inner_type))
2483 idx_arr[i] = TYPE_DOMAIN (inner_type);
2485 /* Now get the DTYPE value. */
2486 switch (TREE_CODE (type))
2488 case INTEGER_TYPE:
2489 case ENUMERAL_TYPE:
2490 case BOOLEAN_TYPE:
2491 if (TYPE_VAX_FLOATING_POINT_P (type))
2492 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2494 case 6:
2495 dtype = 10;
2496 break;
2497 case 9:
2498 dtype = 11;
2499 break;
2500 case 15:
2501 dtype = 27;
2502 break;
2504 else
2505 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2507 case 8:
2508 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2509 break;
2510 case 16:
2511 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2512 break;
2513 case 32:
2514 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2515 break;
2516 case 64:
2517 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2518 break;
2519 case 128:
2520 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2521 break;
2523 break;
2525 case REAL_TYPE:
2526 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2527 break;
2529 case COMPLEX_TYPE:
2530 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2531 && TYPE_VAX_FLOATING_POINT_P (type))
2532 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2534 case 6:
2535 dtype = 12;
2536 break;
2537 case 9:
2538 dtype = 13;
2539 break;
2540 case 15:
2541 dtype = 29;
2543 else
2544 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2545 break;
2547 case ARRAY_TYPE:
2548 dtype = 14;
2549 break;
2551 default:
2552 break;
2555 /* Get the CLASS value. */
2556 switch (mech)
2558 case By_Descriptor_A:
2559 case By_Short_Descriptor_A:
2560 class = 4;
2561 break;
2562 case By_Descriptor_NCA:
2563 case By_Short_Descriptor_NCA:
2564 class = 10;
2565 break;
2566 case By_Descriptor_SB:
2567 case By_Short_Descriptor_SB:
2568 class = 15;
2569 break;
2570 case By_Descriptor:
2571 case By_Short_Descriptor:
2572 case By_Descriptor_S:
2573 case By_Short_Descriptor_S:
2574 default:
2575 class = 1;
2576 break;
2579 /* Make the type for a descriptor for VMS. The first four fields
2580 are the same for all types. */
2582 field_list
2583 = chainon (field_list,
2584 make_descriptor_field
2585 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2586 size_in_bytes ((mech == By_Descriptor_A ||
2587 mech == By_Short_Descriptor_A)
2588 ? inner_type : type)));
2590 field_list = chainon (field_list,
2591 make_descriptor_field ("DTYPE",
2592 gnat_type_for_size (8, 1),
2593 record_type, size_int (dtype)));
2594 field_list = chainon (field_list,
2595 make_descriptor_field ("CLASS",
2596 gnat_type_for_size (8, 1),
2597 record_type, size_int (class)));
2599 /* Of course this will crash at run-time if the address space is not
2600 within the low 32 bits, but there is nothing else we can do. */
2601 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2603 field_list
2604 = chainon (field_list,
2605 make_descriptor_field
2606 ("POINTER", pointer32_type, record_type,
2607 build_unary_op (ADDR_EXPR,
2608 pointer32_type,
2609 build0 (PLACEHOLDER_EXPR, type))));
2611 switch (mech)
2613 case By_Descriptor:
2614 case By_Short_Descriptor:
2615 case By_Descriptor_S:
2616 case By_Short_Descriptor_S:
2617 break;
2619 case By_Descriptor_SB:
2620 case By_Short_Descriptor_SB:
2621 field_list
2622 = chainon (field_list,
2623 make_descriptor_field
2624 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2625 TREE_CODE (type) == ARRAY_TYPE
2626 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2627 field_list
2628 = chainon (field_list,
2629 make_descriptor_field
2630 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2631 TREE_CODE (type) == ARRAY_TYPE
2632 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2633 break;
2635 case By_Descriptor_A:
2636 case By_Short_Descriptor_A:
2637 case By_Descriptor_NCA:
2638 case By_Short_Descriptor_NCA:
2639 field_list = chainon (field_list,
2640 make_descriptor_field ("SCALE",
2641 gnat_type_for_size (8, 1),
2642 record_type,
2643 size_zero_node));
2645 field_list = chainon (field_list,
2646 make_descriptor_field ("DIGITS",
2647 gnat_type_for_size (8, 1),
2648 record_type,
2649 size_zero_node));
2651 field_list
2652 = chainon (field_list,
2653 make_descriptor_field
2654 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2655 size_int ((mech == By_Descriptor_NCA ||
2656 mech == By_Short_Descriptor_NCA)
2658 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2659 : (TREE_CODE (type) == ARRAY_TYPE
2660 && TYPE_CONVENTION_FORTRAN_P (type)
2661 ? 224 : 192))));
2663 field_list = chainon (field_list,
2664 make_descriptor_field ("DIMCT",
2665 gnat_type_for_size (8, 1),
2666 record_type,
2667 size_int (ndim)));
2669 field_list = chainon (field_list,
2670 make_descriptor_field ("ARSIZE",
2671 gnat_type_for_size (32, 1),
2672 record_type,
2673 size_in_bytes (type)));
2675 /* Now build a pointer to the 0,0,0... element. */
2676 tem = build0 (PLACEHOLDER_EXPR, type);
2677 for (i = 0, inner_type = type; i < ndim;
2678 i++, inner_type = TREE_TYPE (inner_type))
2679 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2680 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2681 NULL_TREE, NULL_TREE);
2683 field_list
2684 = chainon (field_list,
2685 make_descriptor_field
2686 ("A0",
2687 build_pointer_type_for_mode (inner_type, SImode, false),
2688 record_type,
2689 build1 (ADDR_EXPR,
2690 build_pointer_type_for_mode (inner_type, SImode,
2691 false),
2692 tem)));
2694 /* Next come the addressing coefficients. */
2695 tem = size_one_node;
2696 for (i = 0; i < ndim; i++)
2698 char fname[3];
2699 tree idx_length
2700 = size_binop (MULT_EXPR, tem,
2701 size_binop (PLUS_EXPR,
2702 size_binop (MINUS_EXPR,
2703 TYPE_MAX_VALUE (idx_arr[i]),
2704 TYPE_MIN_VALUE (idx_arr[i])),
2705 size_int (1)));
2707 fname[0] = ((mech == By_Descriptor_NCA ||
2708 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2709 fname[1] = '0' + i, fname[2] = 0;
2710 field_list
2711 = chainon (field_list,
2712 make_descriptor_field (fname,
2713 gnat_type_for_size (32, 1),
2714 record_type, idx_length));
2716 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2717 tem = idx_length;
2720 /* Finally here are the bounds. */
2721 for (i = 0; i < ndim; i++)
2723 char fname[3];
2725 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2726 field_list
2727 = chainon (field_list,
2728 make_descriptor_field
2729 (fname, gnat_type_for_size (32, 1), record_type,
2730 TYPE_MIN_VALUE (idx_arr[i])));
2732 fname[0] = 'U';
2733 field_list
2734 = chainon (field_list,
2735 make_descriptor_field
2736 (fname, gnat_type_for_size (32, 1), record_type,
2737 TYPE_MAX_VALUE (idx_arr[i])));
2739 break;
2741 default:
2742 post_error ("unsupported descriptor type for &", gnat_entity);
2745 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2746 finish_record_type (record_type, field_list, 0, true);
2747 return record_type;
2750 /* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2751 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2752 in the type contains in its DECL_INITIAL the expression to use when
2753 a constructor is made for the type. GNAT_ENTITY is an entity used
2754 to print out an error message if the mechanism cannot be applied to
2755 an object of that type and also for the name. */
2757 tree
2758 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2760 tree record64_type = make_node (RECORD_TYPE);
2761 tree pointer64_type;
2762 tree field_list64 = 0;
2763 int class;
2764 int dtype = 0;
2765 tree inner_type;
2766 int ndim;
2767 int i;
2768 tree *idx_arr;
2769 tree tem;
2771 /* If TYPE is an unconstrained array, use the underlying array type. */
2772 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2773 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2775 /* If this is an array, compute the number of dimensions in the array,
2776 get the index types, and point to the inner type. */
2777 if (TREE_CODE (type) != ARRAY_TYPE)
2778 ndim = 0;
2779 else
2780 for (ndim = 1, inner_type = type;
2781 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2782 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2783 ndim++, inner_type = TREE_TYPE (inner_type))
2786 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2788 if (mech != By_Descriptor_NCA
2789 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2790 for (i = ndim - 1, inner_type = type;
2791 i >= 0;
2792 i--, inner_type = TREE_TYPE (inner_type))
2793 idx_arr[i] = TYPE_DOMAIN (inner_type);
2794 else
2795 for (i = 0, inner_type = type;
2796 i < ndim;
2797 i++, inner_type = TREE_TYPE (inner_type))
2798 idx_arr[i] = TYPE_DOMAIN (inner_type);
2800 /* Now get the DTYPE value. */
2801 switch (TREE_CODE (type))
2803 case INTEGER_TYPE:
2804 case ENUMERAL_TYPE:
2805 case BOOLEAN_TYPE:
2806 if (TYPE_VAX_FLOATING_POINT_P (type))
2807 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2809 case 6:
2810 dtype = 10;
2811 break;
2812 case 9:
2813 dtype = 11;
2814 break;
2815 case 15:
2816 dtype = 27;
2817 break;
2819 else
2820 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2822 case 8:
2823 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2824 break;
2825 case 16:
2826 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2827 break;
2828 case 32:
2829 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2830 break;
2831 case 64:
2832 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2833 break;
2834 case 128:
2835 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2836 break;
2838 break;
2840 case REAL_TYPE:
2841 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2842 break;
2844 case COMPLEX_TYPE:
2845 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2846 && TYPE_VAX_FLOATING_POINT_P (type))
2847 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2849 case 6:
2850 dtype = 12;
2851 break;
2852 case 9:
2853 dtype = 13;
2854 break;
2855 case 15:
2856 dtype = 29;
2858 else
2859 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2860 break;
2862 case ARRAY_TYPE:
2863 dtype = 14;
2864 break;
2866 default:
2867 break;
2870 /* Get the CLASS value. */
2871 switch (mech)
2873 case By_Descriptor_A:
2874 class = 4;
2875 break;
2876 case By_Descriptor_NCA:
2877 class = 10;
2878 break;
2879 case By_Descriptor_SB:
2880 class = 15;
2881 break;
2882 case By_Descriptor:
2883 case By_Descriptor_S:
2884 default:
2885 class = 1;
2886 break;
2889 /* Make the type for a 64bit descriptor for VMS. The first six fields
2890 are the same for all types. */
2892 field_list64 = chainon (field_list64,
2893 make_descriptor_field ("MBO",
2894 gnat_type_for_size (16, 1),
2895 record64_type, size_int (1)));
2897 field_list64 = chainon (field_list64,
2898 make_descriptor_field ("DTYPE",
2899 gnat_type_for_size (8, 1),
2900 record64_type, size_int (dtype)));
2901 field_list64 = chainon (field_list64,
2902 make_descriptor_field ("CLASS",
2903 gnat_type_for_size (8, 1),
2904 record64_type, size_int (class)));
2906 field_list64 = chainon (field_list64,
2907 make_descriptor_field ("MBMO",
2908 gnat_type_for_size (32, 1),
2909 record64_type, ssize_int (-1)));
2911 field_list64
2912 = chainon (field_list64,
2913 make_descriptor_field
2914 ("LENGTH", gnat_type_for_size (64, 1), record64_type,
2915 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2917 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2919 field_list64
2920 = chainon (field_list64,
2921 make_descriptor_field
2922 ("POINTER", pointer64_type, record64_type,
2923 build_unary_op (ADDR_EXPR,
2924 pointer64_type,
2925 build0 (PLACEHOLDER_EXPR, type))));
2927 switch (mech)
2929 case By_Descriptor:
2930 case By_Descriptor_S:
2931 break;
2933 case By_Descriptor_SB:
2934 field_list64
2935 = chainon (field_list64,
2936 make_descriptor_field
2937 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
2938 TREE_CODE (type) == ARRAY_TYPE
2939 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2940 field_list64
2941 = chainon (field_list64,
2942 make_descriptor_field
2943 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
2944 TREE_CODE (type) == ARRAY_TYPE
2945 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2946 break;
2948 case By_Descriptor_A:
2949 case By_Descriptor_NCA:
2950 field_list64 = chainon (field_list64,
2951 make_descriptor_field ("SCALE",
2952 gnat_type_for_size (8, 1),
2953 record64_type,
2954 size_zero_node));
2956 field_list64 = chainon (field_list64,
2957 make_descriptor_field ("DIGITS",
2958 gnat_type_for_size (8, 1),
2959 record64_type,
2960 size_zero_node));
2962 field_list64
2963 = chainon (field_list64,
2964 make_descriptor_field
2965 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
2966 size_int (mech == By_Descriptor_NCA
2968 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2969 : (TREE_CODE (type) == ARRAY_TYPE
2970 && TYPE_CONVENTION_FORTRAN_P (type)
2971 ? 224 : 192))));
2973 field_list64 = chainon (field_list64,
2974 make_descriptor_field ("DIMCT",
2975 gnat_type_for_size (8, 1),
2976 record64_type,
2977 size_int (ndim)));
2979 field_list64 = chainon (field_list64,
2980 make_descriptor_field ("MBZ",
2981 gnat_type_for_size (32, 1),
2982 record64_type,
2983 size_int (0)));
2984 field_list64 = chainon (field_list64,
2985 make_descriptor_field ("ARSIZE",
2986 gnat_type_for_size (64, 1),
2987 record64_type,
2988 size_in_bytes (type)));
2990 /* Now build a pointer to the 0,0,0... element. */
2991 tem = build0 (PLACEHOLDER_EXPR, type);
2992 for (i = 0, inner_type = type; i < ndim;
2993 i++, inner_type = TREE_TYPE (inner_type))
2994 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2995 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2996 NULL_TREE, NULL_TREE);
2998 field_list64
2999 = chainon (field_list64,
3000 make_descriptor_field
3001 ("A0",
3002 build_pointer_type_for_mode (inner_type, DImode, false),
3003 record64_type,
3004 build1 (ADDR_EXPR,
3005 build_pointer_type_for_mode (inner_type, DImode,
3006 false),
3007 tem)));
3009 /* Next come the addressing coefficients. */
3010 tem = size_one_node;
3011 for (i = 0; i < ndim; i++)
3013 char fname[3];
3014 tree idx_length
3015 = size_binop (MULT_EXPR, tem,
3016 size_binop (PLUS_EXPR,
3017 size_binop (MINUS_EXPR,
3018 TYPE_MAX_VALUE (idx_arr[i]),
3019 TYPE_MIN_VALUE (idx_arr[i])),
3020 size_int (1)));
3022 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3023 fname[1] = '0' + i, fname[2] = 0;
3024 field_list64
3025 = chainon (field_list64,
3026 make_descriptor_field (fname,
3027 gnat_type_for_size (64, 1),
3028 record64_type, idx_length));
3030 if (mech == By_Descriptor_NCA)
3031 tem = idx_length;
3034 /* Finally here are the bounds. */
3035 for (i = 0; i < ndim; i++)
3037 char fname[3];
3039 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3040 field_list64
3041 = chainon (field_list64,
3042 make_descriptor_field
3043 (fname, gnat_type_for_size (64, 1), record64_type,
3044 TYPE_MIN_VALUE (idx_arr[i])));
3046 fname[0] = 'U';
3047 field_list64
3048 = chainon (field_list64,
3049 make_descriptor_field
3050 (fname, gnat_type_for_size (64, 1), record64_type,
3051 TYPE_MAX_VALUE (idx_arr[i])));
3053 break;
3055 default:
3056 post_error ("unsupported descriptor type for &", gnat_entity);
3059 TYPE_NAME (record64_type) = create_concat_name (gnat_entity, "DESC64");
3060 finish_record_type (record64_type, field_list64, 0, true);
3061 return record64_type;
3064 /* Utility routine for above code to make a field. */
3066 static tree
3067 make_descriptor_field (const char *name, tree type,
3068 tree rec_type, tree initial)
3070 tree field
3071 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
3073 DECL_INITIAL (field) = initial;
3074 return field;
3077 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3078 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3079 which the VMS descriptor is passed. */
3081 static tree
3082 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3084 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3085 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3086 /* The CLASS field is the 3rd field in the descriptor. */
3087 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3088 /* The POINTER field is the 6th field in the descriptor. */
3089 tree pointer64 = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (class)));
3091 /* Retrieve the value of the POINTER field. */
3092 tree gnu_expr64
3093 = build3 (COMPONENT_REF, TREE_TYPE (pointer64), desc, pointer64, NULL_TREE);
3095 if (POINTER_TYPE_P (gnu_type))
3096 return convert (gnu_type, gnu_expr64);
3098 else if (TYPE_FAT_POINTER_P (gnu_type))
3100 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3101 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3102 tree template_type = TREE_TYPE (p_bounds_type);
3103 tree min_field = TYPE_FIELDS (template_type);
3104 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3105 tree template, template_addr, aflags, dimct, t, u;
3106 /* See the head comment of build_vms_descriptor. */
3107 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3108 tree lfield, ufield;
3110 /* Convert POINTER to the type of the P_ARRAY field. */
3111 gnu_expr64 = convert (p_array_type, gnu_expr64);
3113 switch (iclass)
3115 case 1: /* Class S */
3116 case 15: /* Class SB */
3117 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3118 t = TREE_CHAIN (TREE_CHAIN (class));
3119 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3120 t = tree_cons (min_field,
3121 convert (TREE_TYPE (min_field), integer_one_node),
3122 tree_cons (max_field,
3123 convert (TREE_TYPE (max_field), t),
3124 NULL_TREE));
3125 template = gnat_build_constructor (template_type, t);
3126 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3128 /* For class S, we are done. */
3129 if (iclass == 1)
3130 break;
3132 /* Test that we really have a SB descriptor, like DEC Ada. */
3133 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3134 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3135 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3136 /* If so, there is already a template in the descriptor and
3137 it is located right after the POINTER field. The fields are
3138 64bits so they must be repacked. */
3139 t = TREE_CHAIN (pointer64);
3140 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3141 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3143 t = TREE_CHAIN (t);
3144 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3145 ufield = convert
3146 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3148 /* Build the template in the form of a constructor. */
3149 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3150 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3151 ufield, NULL_TREE));
3152 template = gnat_build_constructor (template_type, t);
3154 /* Otherwise use the {1, LENGTH} template we build above. */
3155 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3156 build_unary_op (ADDR_EXPR, p_bounds_type,
3157 template),
3158 template_addr);
3159 break;
3161 case 4: /* Class A */
3162 /* The AFLAGS field is the 3rd field after the pointer in the
3163 descriptor. */
3164 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64)));
3165 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3166 /* The DIMCT field is the next field in the descriptor after
3167 aflags. */
3168 t = TREE_CHAIN (t);
3169 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3170 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3171 or FL_COEFF or FL_BOUNDS not set. */
3172 u = build_int_cst (TREE_TYPE (aflags), 192);
3173 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3174 build_binary_op (NE_EXPR, integer_type_node,
3175 dimct,
3176 convert (TREE_TYPE (dimct),
3177 size_one_node)),
3178 build_binary_op (NE_EXPR, integer_type_node,
3179 build2 (BIT_AND_EXPR,
3180 TREE_TYPE (aflags),
3181 aflags, u),
3182 u));
3183 /* There is already a template in the descriptor and it is located
3184 in block 3. The fields are 64bits so they must be repacked. */
3185 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3186 (t)))));
3187 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3188 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3190 t = TREE_CHAIN (t);
3191 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3192 ufield = convert
3193 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3195 /* Build the template in the form of a constructor. */
3196 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3197 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3198 ufield, NULL_TREE));
3199 template = gnat_build_constructor (template_type, t);
3200 template = build3 (COND_EXPR, p_bounds_type, u,
3201 build_call_raise (CE_Length_Check_Failed, Empty,
3202 N_Raise_Constraint_Error),
3203 template);
3204 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3205 break;
3207 case 10: /* Class NCA */
3208 default:
3209 post_error ("unsupported descriptor type for &", gnat_subprog);
3210 template_addr = integer_zero_node;
3211 break;
3214 /* Build the fat pointer in the form of a constructor. */
3215 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3216 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3217 template_addr, NULL_TREE));
3218 return gnat_build_constructor (gnu_type, t);
3221 else
3222 gcc_unreachable ();
3225 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3226 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3227 which the VMS descriptor is passed. */
3229 static tree
3230 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3232 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3233 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3234 /* The CLASS field is the 3rd field in the descriptor. */
3235 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3236 /* The POINTER field is the 4th field in the descriptor. */
3237 tree pointer = TREE_CHAIN (class);
3239 /* Retrieve the value of the POINTER field. */
3240 tree gnu_expr32
3241 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3243 if (POINTER_TYPE_P (gnu_type))
3244 return convert (gnu_type, gnu_expr32);
3246 else if (TYPE_FAT_POINTER_P (gnu_type))
3248 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3249 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3250 tree template_type = TREE_TYPE (p_bounds_type);
3251 tree min_field = TYPE_FIELDS (template_type);
3252 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3253 tree template, template_addr, aflags, dimct, t, u;
3254 /* See the head comment of build_vms_descriptor. */
3255 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3257 /* Convert POINTER to the type of the P_ARRAY field. */
3258 gnu_expr32 = convert (p_array_type, gnu_expr32);
3260 switch (iclass)
3262 case 1: /* Class S */
3263 case 15: /* Class SB */
3264 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3265 t = TYPE_FIELDS (desc_type);
3266 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3267 t = tree_cons (min_field,
3268 convert (TREE_TYPE (min_field), integer_one_node),
3269 tree_cons (max_field,
3270 convert (TREE_TYPE (max_field), t),
3271 NULL_TREE));
3272 template = gnat_build_constructor (template_type, t);
3273 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3275 /* For class S, we are done. */
3276 if (iclass == 1)
3277 break;
3279 /* Test that we really have a SB descriptor, like DEC Ada. */
3280 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3281 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3282 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3283 /* If so, there is already a template in the descriptor and
3284 it is located right after the POINTER field. */
3285 t = TREE_CHAIN (pointer);
3286 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3287 /* Otherwise use the {1, LENGTH} template we build above. */
3288 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3289 build_unary_op (ADDR_EXPR, p_bounds_type,
3290 template),
3291 template_addr);
3292 break;
3294 case 4: /* Class A */
3295 /* The AFLAGS field is the 7th field in the descriptor. */
3296 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3297 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3298 /* The DIMCT field is the 8th field in the descriptor. */
3299 t = TREE_CHAIN (t);
3300 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3301 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3302 or FL_COEFF or FL_BOUNDS not set. */
3303 u = build_int_cst (TREE_TYPE (aflags), 192);
3304 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3305 build_binary_op (NE_EXPR, integer_type_node,
3306 dimct,
3307 convert (TREE_TYPE (dimct),
3308 size_one_node)),
3309 build_binary_op (NE_EXPR, integer_type_node,
3310 build2 (BIT_AND_EXPR,
3311 TREE_TYPE (aflags),
3312 aflags, u),
3313 u));
3314 /* There is already a template in the descriptor and it is
3315 located at the start of block 3 (12th field). */
3316 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3317 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3318 template = build3 (COND_EXPR, p_bounds_type, u,
3319 build_call_raise (CE_Length_Check_Failed, Empty,
3320 N_Raise_Constraint_Error),
3321 template);
3322 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3323 break;
3325 case 10: /* Class NCA */
3326 default:
3327 post_error ("unsupported descriptor type for &", gnat_subprog);
3328 template_addr = integer_zero_node;
3329 break;
3332 /* Build the fat pointer in the form of a constructor. */
3333 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
3334 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3335 template_addr, NULL_TREE));
3337 return gnat_build_constructor (gnu_type, t);
3340 else
3341 gcc_unreachable ();
3344 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3345 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3346 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3347 VMS descriptor is passed. */
3349 static tree
3350 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3351 Entity_Id gnat_subprog)
3353 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3354 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3355 tree mbo = TYPE_FIELDS (desc_type);
3356 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3357 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
3358 tree is64bit, gnu_expr32, gnu_expr64;
3360 /* If the field name is not MBO, it must be 32-bit and no alternate.
3361 Otherwise primary must be 64-bit and alternate 32-bit. */
3362 if (strcmp (mbostr, "MBO") != 0)
3363 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3365 /* Build the test for 64-bit descriptor. */
3366 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3367 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3368 is64bit
3369 = build_binary_op (TRUTH_ANDIF_EXPR, integer_type_node,
3370 build_binary_op (EQ_EXPR, integer_type_node,
3371 convert (integer_type_node, mbo),
3372 integer_one_node),
3373 build_binary_op (EQ_EXPR, integer_type_node,
3374 convert (integer_type_node, mbmo),
3375 integer_minus_one_node));
3377 /* Build the 2 possible end results. */
3378 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3379 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3380 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3382 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3385 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3386 and the GNAT node GNAT_SUBPROG. */
3388 void
3389 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3391 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3392 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3393 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3394 tree gnu_body;
3396 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3397 gnu_param_list = NULL_TREE;
3399 begin_subprog_body (gnu_stub_decl);
3400 gnat_pushlevel ();
3402 start_stmt_group ();
3404 /* Loop over the parameters of the stub and translate any of them
3405 passed by descriptor into a by reference one. */
3406 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3407 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3408 gnu_stub_param;
3409 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3410 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3412 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3413 gnu_param
3414 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3415 gnu_stub_param,
3416 DECL_PARM_ALT_TYPE (gnu_stub_param),
3417 gnat_subprog);
3418 else
3419 gnu_param = gnu_stub_param;
3421 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3424 gnu_body = end_stmt_group ();
3426 /* Invoke the internal subprogram. */
3427 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3428 gnu_subprog);
3429 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3430 gnu_subprog_addr,
3431 nreverse (gnu_param_list));
3433 /* Propagate the return value, if any. */
3434 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3435 append_to_statement_list (gnu_subprog_call, &gnu_body);
3436 else
3437 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3438 gnu_subprog_call),
3439 &gnu_body);
3441 gnat_poplevel ();
3443 allocate_struct_function (gnu_stub_decl, false);
3444 end_subprog_body (gnu_body, false);
3447 /* Build a type to be used to represent an aliased object whose nominal
3448 type is an unconstrained array. This consists of a RECORD_TYPE containing
3449 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3450 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3451 is used to represent an arbitrary unconstrained object. Use NAME
3452 as the name of the record. */
3454 tree
3455 build_unc_object_type (tree template_type, tree object_type, tree name)
3457 tree type = make_node (RECORD_TYPE);
3458 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3459 template_type, type, 0, 0, 0, 1);
3460 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3461 type, 0, 0, 0, 1);
3463 TYPE_NAME (type) = name;
3464 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3465 finish_record_type (type,
3466 chainon (chainon (NULL_TREE, template_field),
3467 array_field),
3468 0, false);
3470 return type;
3473 /* Same, taking a thin or fat pointer type instead of a template type. */
3475 tree
3476 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3477 tree name)
3479 tree template_type;
3481 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3483 template_type
3484 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3485 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3486 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3487 return build_unc_object_type (template_type, object_type, name);
3490 /* Shift the component offsets within an unconstrained object TYPE to make it
3491 suitable for use as a designated type for thin pointers. */
3493 void
3494 shift_unc_components_for_thin_pointers (tree type)
3496 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3497 allocated past the BOUNDS template. The designated type is adjusted to
3498 have ARRAY at position zero and the template at a negative offset, so
3499 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3501 tree bounds_field = TYPE_FIELDS (type);
3502 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3504 DECL_FIELD_OFFSET (bounds_field)
3505 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3507 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3508 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3511 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3512 In the normal case this is just two adjustments, but we have more to
3513 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3515 void
3516 update_pointer_to (tree old_type, tree new_type)
3518 tree ptr = TYPE_POINTER_TO (old_type);
3519 tree ref = TYPE_REFERENCE_TO (old_type);
3520 tree ptr1, ref1;
3521 tree type;
3523 /* If this is the main variant, process all the other variants first. */
3524 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3525 for (type = TYPE_NEXT_VARIANT (old_type); type;
3526 type = TYPE_NEXT_VARIANT (type))
3527 update_pointer_to (type, new_type);
3529 /* If no pointers and no references, we are done. */
3530 if (!ptr && !ref)
3531 return;
3533 /* Merge the old type qualifiers in the new type.
3535 Each old variant has qualifiers for specific reasons, and the new
3536 designated type as well. Each set of qualifiers represents useful
3537 information grabbed at some point, and merging the two simply unifies
3538 these inputs into the final type description.
3540 Consider for instance a volatile type frozen after an access to constant
3541 type designating it; after the designated type's freeze, we get here with
3542 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3543 when the access type was processed. We will make a volatile and readonly
3544 designated type, because that's what it really is.
3546 We might also get here for a non-dummy OLD_TYPE variant with different
3547 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3548 to private record type elaboration (see the comments around the call to
3549 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3550 the qualifiers in those cases too, to avoid accidentally discarding the
3551 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3552 new_type
3553 = build_qualified_type (new_type,
3554 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3556 /* If old type and new type are identical, there is nothing to do. */
3557 if (old_type == new_type)
3558 return;
3560 /* Otherwise, first handle the simple case. */
3561 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3563 TYPE_POINTER_TO (new_type) = ptr;
3564 TYPE_REFERENCE_TO (new_type) = ref;
3566 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3567 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3568 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3569 TREE_TYPE (ptr1) = new_type;
3571 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3572 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3573 ref1 = TYPE_NEXT_VARIANT (ref1))
3574 TREE_TYPE (ref1) = new_type;
3577 /* Now deal with the unconstrained array case. In this case the "pointer"
3578 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3579 Turn them into pointers to the correct types using update_pointer_to. */
3580 else if (!TYPE_FAT_POINTER_P (ptr))
3581 gcc_unreachable ();
3583 else
3585 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3586 tree array_field = TYPE_FIELDS (ptr);
3587 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3588 tree new_ptr = TYPE_POINTER_TO (new_type);
3589 tree new_ref;
3590 tree var;
3592 /* Make pointers to the dummy template point to the real template. */
3593 update_pointer_to
3594 (TREE_TYPE (TREE_TYPE (bounds_field)),
3595 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3597 /* The references to the template bounds present in the array type
3598 are made through a PLACEHOLDER_EXPR of type NEW_PTR. Since we
3599 are updating PTR to make it a full replacement for NEW_PTR as
3600 pointer to NEW_TYPE, we must rework the PLACEHOLDER_EXPR so as
3601 to make it of type PTR. */
3602 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3603 build0 (PLACEHOLDER_EXPR, ptr),
3604 bounds_field, NULL_TREE);
3606 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
3607 to the dummy array point to it. */
3608 update_pointer_to
3609 (TREE_TYPE (TREE_TYPE (array_field)),
3610 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3611 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3613 /* Make PTR the pointer to NEW_TYPE. */
3614 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3615 = TREE_TYPE (new_type) = ptr;
3617 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3618 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3620 /* Now handle updating the allocation record, what the thin pointer
3621 points to. Update all pointers from the old record into the new
3622 one, update the type of the array field, and recompute the size. */
3623 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3625 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3626 = TREE_TYPE (TREE_TYPE (array_field));
3628 /* The size recomputation needs to account for alignment constraints, so
3629 we let layout_type work it out. This will reset the field offsets to
3630 what they would be in a regular record, so we shift them back to what
3631 we want them to be for a thin pointer designated type afterwards. */
3632 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3633 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3634 TYPE_SIZE (new_obj_rec) = 0;
3635 layout_type (new_obj_rec);
3637 shift_unc_components_for_thin_pointers (new_obj_rec);
3639 /* We are done, at last. */
3640 rest_of_record_type_compilation (ptr);
3644 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3645 unconstrained one. This involves making or finding a template. */
3647 static tree
3648 convert_to_fat_pointer (tree type, tree expr)
3650 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3651 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3652 tree etype = TREE_TYPE (expr);
3653 tree template;
3655 /* If EXPR is null, make a fat pointer that contains null pointers to the
3656 template and array. */
3657 if (integer_zerop (expr))
3658 return
3659 gnat_build_constructor
3660 (type,
3661 tree_cons (TYPE_FIELDS (type),
3662 convert (p_array_type, expr),
3663 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3664 convert (build_pointer_type (template_type),
3665 expr),
3666 NULL_TREE)));
3668 /* If EXPR is a thin pointer, make template and data from the record.. */
3669 else if (TYPE_THIN_POINTER_P (etype))
3671 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3673 expr = save_expr (expr);
3674 if (TREE_CODE (expr) == ADDR_EXPR)
3675 expr = TREE_OPERAND (expr, 0);
3676 else
3677 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3679 template = build_component_ref (expr, NULL_TREE, fields, false);
3680 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3681 build_component_ref (expr, NULL_TREE,
3682 TREE_CHAIN (fields), false));
3685 /* Otherwise, build the constructor for the template. */
3686 else
3687 template = build_template (template_type, TREE_TYPE (etype), expr);
3689 /* The final result is a constructor for the fat pointer.
3691 If EXPR is an argument of a foreign convention subprogram, the type it
3692 points to is directly the component type. In this case, the expression
3693 type may not match the corresponding FIELD_DECL type at this point, so we
3694 call "convert" here to fix that up if necessary. This type consistency is
3695 required, for instance because it ensures that possible later folding of
3696 COMPONENT_REFs against this constructor always yields something of the
3697 same type as the initial reference.
3699 Note that the call to "build_template" above is still fine because it
3700 will only refer to the provided TEMPLATE_TYPE in this case. */
3701 return
3702 gnat_build_constructor
3703 (type,
3704 tree_cons (TYPE_FIELDS (type),
3705 convert (p_array_type, expr),
3706 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3707 build_unary_op (ADDR_EXPR, NULL_TREE, template),
3708 NULL_TREE)));
3711 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3712 is something that is a fat pointer, so convert to it first if it EXPR
3713 is not already a fat pointer. */
3715 static tree
3716 convert_to_thin_pointer (tree type, tree expr)
3718 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3719 expr
3720 = convert_to_fat_pointer
3721 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3723 /* We get the pointer to the data and use a NOP_EXPR to make it the
3724 proper GCC type. */
3725 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3726 false);
3727 expr = build1 (NOP_EXPR, type, expr);
3729 return expr;
3732 /* Create an expression whose value is that of EXPR,
3733 converted to type TYPE. The TREE_TYPE of the value
3734 is always TYPE. This function implements all reasonable
3735 conversions; callers should filter out those that are
3736 not permitted by the language being compiled. */
3738 tree
3739 convert (tree type, tree expr)
3741 enum tree_code code = TREE_CODE (type);
3742 tree etype = TREE_TYPE (expr);
3743 enum tree_code ecode = TREE_CODE (etype);
3745 /* If EXPR is already the right type, we are done. */
3746 if (type == etype)
3747 return expr;
3749 /* If both input and output have padding and are of variable size, do this
3750 as an unchecked conversion. Likewise if one is a mere variant of the
3751 other, so we avoid a pointless unpad/repad sequence. */
3752 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3753 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3754 && (!TREE_CONSTANT (TYPE_SIZE (type))
3755 || !TREE_CONSTANT (TYPE_SIZE (etype))
3756 || gnat_types_compatible_p (type, etype)
3757 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3758 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3761 /* If the output type has padding, convert to the inner type and
3762 make a constructor to build the record. */
3763 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3765 /* If we previously converted from another type and our type is
3766 of variable size, remove the conversion to avoid the need for
3767 variable-size temporaries. Likewise for a conversion between
3768 original and packable version. */
3769 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3770 && (!TREE_CONSTANT (TYPE_SIZE (type))
3771 || (ecode == RECORD_TYPE
3772 && TYPE_NAME (etype)
3773 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3774 expr = TREE_OPERAND (expr, 0);
3776 /* If we are just removing the padding from expr, convert the original
3777 object if we have variable size in order to avoid the need for some
3778 variable-size temporaries. Likewise if the padding is a mere variant
3779 of the other, so we avoid a pointless unpad/repad sequence. */
3780 if (TREE_CODE (expr) == COMPONENT_REF
3781 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3782 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3783 && (!TREE_CONSTANT (TYPE_SIZE (type))
3784 || gnat_types_compatible_p (type,
3785 TREE_TYPE (TREE_OPERAND (expr, 0)))
3786 || (ecode == RECORD_TYPE
3787 && TYPE_NAME (etype)
3788 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3789 return convert (type, TREE_OPERAND (expr, 0));
3791 /* If the result type is a padded type with a self-referentially-sized
3792 field and the expression type is a record, do this as an
3793 unchecked conversion. */
3794 else if (TREE_CODE (etype) == RECORD_TYPE
3795 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3796 return unchecked_convert (type, expr, false);
3798 else
3799 return
3800 gnat_build_constructor (type,
3801 tree_cons (TYPE_FIELDS (type),
3802 convert (TREE_TYPE
3803 (TYPE_FIELDS (type)),
3804 expr),
3805 NULL_TREE));
3808 /* If the input type has padding, remove it and convert to the output type.
3809 The conditions ordering is arranged to ensure that the output type is not
3810 a padding type here, as it is not clear whether the conversion would
3811 always be correct if this was to happen. */
3812 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3814 tree unpadded;
3816 /* If we have just converted to this padded type, just get the
3817 inner expression. */
3818 if (TREE_CODE (expr) == CONSTRUCTOR
3819 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3820 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3821 == TYPE_FIELDS (etype))
3822 unpadded
3823 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3825 /* Otherwise, build an explicit component reference. */
3826 else
3827 unpadded
3828 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3830 return convert (type, unpadded);
3833 /* If the input is a biased type, adjust first. */
3834 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3835 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3836 fold_convert (TREE_TYPE (etype),
3837 expr),
3838 TYPE_MIN_VALUE (etype)));
3840 /* If the input is a justified modular type, we need to extract the actual
3841 object before converting it to any other type with the exceptions of an
3842 unconstrained array or of a mere type variant. It is useful to avoid the
3843 extraction and conversion in the type variant case because it could end
3844 up replacing a VAR_DECL expr by a constructor and we might be about the
3845 take the address of the result. */
3846 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3847 && code != UNCONSTRAINED_ARRAY_TYPE
3848 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3849 return convert (type, build_component_ref (expr, NULL_TREE,
3850 TYPE_FIELDS (etype), false));
3852 /* If converting to a type that contains a template, convert to the data
3853 type and then build the template. */
3854 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3856 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3858 /* If the source already has a template, get a reference to the
3859 associated array only, as we are going to rebuild a template
3860 for the target type anyway. */
3861 expr = maybe_unconstrained_array (expr);
3863 return
3864 gnat_build_constructor
3865 (type,
3866 tree_cons (TYPE_FIELDS (type),
3867 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3868 obj_type, NULL_TREE),
3869 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3870 convert (obj_type, expr), NULL_TREE)));
3873 /* There are some special cases of expressions that we process
3874 specially. */
3875 switch (TREE_CODE (expr))
3877 case ERROR_MARK:
3878 return expr;
3880 case NULL_EXPR:
3881 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3882 conversion in gnat_expand_expr. NULL_EXPR does not represent
3883 and actual value, so no conversion is needed. */
3884 expr = copy_node (expr);
3885 TREE_TYPE (expr) = type;
3886 return expr;
3888 case STRING_CST:
3889 /* If we are converting a STRING_CST to another constrained array type,
3890 just make a new one in the proper type. */
3891 if (code == ecode && AGGREGATE_TYPE_P (etype)
3892 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3893 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3895 expr = copy_node (expr);
3896 TREE_TYPE (expr) = type;
3897 return expr;
3899 break;
3901 case CONSTRUCTOR:
3902 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3903 a new one in the proper type. */
3904 if (code == ecode && gnat_types_compatible_p (type, etype))
3906 expr = copy_node (expr);
3907 TREE_TYPE (expr) = type;
3908 return expr;
3911 /* Likewise for a conversion between original and packable version, but
3912 we have to work harder in order to preserve type consistency. */
3913 if (code == ecode
3914 && code == RECORD_TYPE
3915 && TYPE_NAME (type) == TYPE_NAME (etype))
3917 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3918 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3919 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
3920 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
3921 unsigned HOST_WIDE_INT idx;
3922 tree index, value;
3924 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
3926 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
3927 /* We expect only simple constructors. Otherwise, punt. */
3928 if (!(index == efield || index == DECL_ORIGINAL_FIELD (efield)))
3929 break;
3930 elt->index = field;
3931 elt->value = convert (TREE_TYPE (field), value);
3932 efield = TREE_CHAIN (efield);
3933 field = TREE_CHAIN (field);
3936 if (idx == len)
3938 expr = copy_node (expr);
3939 TREE_TYPE (expr) = type;
3940 CONSTRUCTOR_ELTS (expr) = v;
3941 return expr;
3944 break;
3946 case UNCONSTRAINED_ARRAY_REF:
3947 /* Convert this to the type of the inner array by getting the address of
3948 the array from the template. */
3949 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3950 build_component_ref (TREE_OPERAND (expr, 0),
3951 get_identifier ("P_ARRAY"),
3952 NULL_TREE, false));
3953 etype = TREE_TYPE (expr);
3954 ecode = TREE_CODE (etype);
3955 break;
3957 case VIEW_CONVERT_EXPR:
3959 /* GCC 4.x is very sensitive to type consistency overall, and view
3960 conversions thus are very frequent. Even though just "convert"ing
3961 the inner operand to the output type is fine in most cases, it
3962 might expose unexpected input/output type mismatches in special
3963 circumstances so we avoid such recursive calls when we can. */
3964 tree op0 = TREE_OPERAND (expr, 0);
3966 /* If we are converting back to the original type, we can just
3967 lift the input conversion. This is a common occurrence with
3968 switches back-and-forth amongst type variants. */
3969 if (type == TREE_TYPE (op0))
3970 return op0;
3972 /* Otherwise, if we're converting between two aggregate types, we
3973 might be allowed to substitute the VIEW_CONVERT_EXPR target type
3974 in place or to just convert the inner expression. */
3975 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3977 /* If we are converting between mere variants, we can just
3978 substitute the VIEW_CONVERT_EXPR in place. */
3979 if (gnat_types_compatible_p (type, etype))
3980 return build1 (VIEW_CONVERT_EXPR, type, op0);
3982 /* Otherwise, we may just bypass the input view conversion unless
3983 one of the types is a fat pointer, which is handled by
3984 specialized code below which relies on exact type matching. */
3985 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3986 return convert (type, op0);
3989 break;
3991 case INDIRECT_REF:
3992 /* If both types are record types, just convert the pointer and
3993 make a new INDIRECT_REF.
3995 ??? Disable this for now since it causes problems with the
3996 code in build_binary_op for MODIFY_EXPR which wants to
3997 strip off conversions. But that code really is a mess and
3998 we need to do this a much better way some time. */
3999 if (0
4000 && (TREE_CODE (type) == RECORD_TYPE
4001 || TREE_CODE (type) == UNION_TYPE)
4002 && (TREE_CODE (etype) == RECORD_TYPE
4003 || TREE_CODE (etype) == UNION_TYPE)
4004 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4005 return build_unary_op (INDIRECT_REF, NULL_TREE,
4006 convert (build_pointer_type (type),
4007 TREE_OPERAND (expr, 0)));
4008 break;
4010 default:
4011 break;
4014 /* Check for converting to a pointer to an unconstrained array. */
4015 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4016 return convert_to_fat_pointer (type, expr);
4018 /* If we are converting between two aggregate types that are mere
4019 variants, just make a VIEW_CONVERT_EXPR. */
4020 else if (code == ecode
4021 && AGGREGATE_TYPE_P (type)
4022 && gnat_types_compatible_p (type, etype))
4023 return build1 (VIEW_CONVERT_EXPR, type, expr);
4025 /* In all other cases of related types, make a NOP_EXPR. */
4026 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4027 || (code == INTEGER_CST && ecode == INTEGER_CST
4028 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
4029 return fold_convert (type, expr);
4031 switch (code)
4033 case VOID_TYPE:
4034 return fold_build1 (CONVERT_EXPR, type, expr);
4036 case INTEGER_TYPE:
4037 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4038 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4039 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4040 return unchecked_convert (type, expr, false);
4041 else if (TYPE_BIASED_REPRESENTATION_P (type))
4042 return fold_convert (type,
4043 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4044 convert (TREE_TYPE (type), expr),
4045 TYPE_MIN_VALUE (type)));
4047 /* ... fall through ... */
4049 case ENUMERAL_TYPE:
4050 case BOOLEAN_TYPE:
4051 /* If we are converting an additive expression to an integer type
4052 with lower precision, be wary of the optimization that can be
4053 applied by convert_to_integer. There are 2 problematic cases:
4054 - if the first operand was originally of a biased type,
4055 because we could be recursively called to convert it
4056 to an intermediate type and thus rematerialize the
4057 additive operator endlessly,
4058 - if the expression contains a placeholder, because an
4059 intermediate conversion that changes the sign could
4060 be inserted and thus introduce an artificial overflow
4061 at compile time when the placeholder is substituted. */
4062 if (code == INTEGER_TYPE
4063 && ecode == INTEGER_TYPE
4064 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4065 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4067 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4069 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4070 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4071 || CONTAINS_PLACEHOLDER_P (expr))
4072 return build1 (NOP_EXPR, type, expr);
4075 return fold (convert_to_integer (type, expr));
4077 case POINTER_TYPE:
4078 case REFERENCE_TYPE:
4079 /* If converting between two pointers to records denoting
4080 both a template and type, adjust if needed to account
4081 for any differing offsets, since one might be negative. */
4082 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
4084 tree bit_diff
4085 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4086 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4087 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
4088 sbitsize_int (BITS_PER_UNIT));
4090 expr = build1 (NOP_EXPR, type, expr);
4091 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4092 if (integer_zerop (byte_diff))
4093 return expr;
4095 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4096 fold (convert (sizetype, byte_diff)));
4099 /* If converting to a thin pointer, handle specially. */
4100 if (TYPE_THIN_POINTER_P (type)
4101 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4102 return convert_to_thin_pointer (type, expr);
4104 /* If converting fat pointer to normal pointer, get the pointer to the
4105 array and then convert it. */
4106 else if (TYPE_FAT_POINTER_P (etype))
4107 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
4108 NULL_TREE, false);
4110 return fold (convert_to_pointer (type, expr));
4112 case REAL_TYPE:
4113 return fold (convert_to_real (type, expr));
4115 case RECORD_TYPE:
4116 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4117 return
4118 gnat_build_constructor
4119 (type, tree_cons (TYPE_FIELDS (type),
4120 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4121 NULL_TREE));
4123 /* ... fall through ... */
4125 case ARRAY_TYPE:
4126 /* In these cases, assume the front-end has validated the conversion.
4127 If the conversion is valid, it will be a bit-wise conversion, so
4128 it can be viewed as an unchecked conversion. */
4129 return unchecked_convert (type, expr, false);
4131 case UNION_TYPE:
4132 /* This is a either a conversion between a tagged type and some
4133 subtype, which we have to mark as a UNION_TYPE because of
4134 overlapping fields or a conversion of an Unchecked_Union. */
4135 return unchecked_convert (type, expr, false);
4137 case UNCONSTRAINED_ARRAY_TYPE:
4138 /* If EXPR is a constrained array, take its address, convert it to a
4139 fat pointer, and then dereference it. Likewise if EXPR is a
4140 record containing both a template and a constrained array.
4141 Note that a record representing a justified modular type
4142 always represents a packed constrained array. */
4143 if (ecode == ARRAY_TYPE
4144 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4145 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4146 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4147 return
4148 build_unary_op
4149 (INDIRECT_REF, NULL_TREE,
4150 convert_to_fat_pointer (TREE_TYPE (type),
4151 build_unary_op (ADDR_EXPR,
4152 NULL_TREE, expr)));
4154 /* Do something very similar for converting one unconstrained
4155 array to another. */
4156 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4157 return
4158 build_unary_op (INDIRECT_REF, NULL_TREE,
4159 convert (TREE_TYPE (type),
4160 build_unary_op (ADDR_EXPR,
4161 NULL_TREE, expr)));
4162 else
4163 gcc_unreachable ();
4165 case COMPLEX_TYPE:
4166 return fold (convert_to_complex (type, expr));
4168 default:
4169 gcc_unreachable ();
4173 /* Remove all conversions that are done in EXP. This includes converting
4174 from a padded type or to a justified modular type. If TRUE_ADDRESS
4175 is true, always return the address of the containing object even if
4176 the address is not bit-aligned. */
4178 tree
4179 remove_conversions (tree exp, bool true_address)
4181 switch (TREE_CODE (exp))
4183 case CONSTRUCTOR:
4184 if (true_address
4185 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4186 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4187 return
4188 remove_conversions (VEC_index (constructor_elt,
4189 CONSTRUCTOR_ELTS (exp), 0)->value,
4190 true);
4191 break;
4193 case COMPONENT_REF:
4194 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
4195 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4196 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4197 break;
4199 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4200 CASE_CONVERT:
4201 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4203 default:
4204 break;
4207 return exp;
4210 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4211 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
4212 likewise return an expression pointing to the underlying array. */
4214 tree
4215 maybe_unconstrained_array (tree exp)
4217 enum tree_code code = TREE_CODE (exp);
4218 tree new;
4220 switch (TREE_CODE (TREE_TYPE (exp)))
4222 case UNCONSTRAINED_ARRAY_TYPE:
4223 if (code == UNCONSTRAINED_ARRAY_REF)
4226 = build_unary_op (INDIRECT_REF, NULL_TREE,
4227 build_component_ref (TREE_OPERAND (exp, 0),
4228 get_identifier ("P_ARRAY"),
4229 NULL_TREE, false));
4230 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
4231 return new;
4234 else if (code == NULL_EXPR)
4235 return build1 (NULL_EXPR,
4236 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4237 (TREE_TYPE (TREE_TYPE (exp))))),
4238 TREE_OPERAND (exp, 0));
4240 case RECORD_TYPE:
4241 /* If this is a padded type, convert to the unpadded type and see if
4242 it contains a template. */
4243 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
4245 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4246 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
4247 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
4248 return
4249 build_component_ref (new, NULL_TREE,
4250 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
4253 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4254 return
4255 build_component_ref (exp, NULL_TREE,
4256 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
4257 break;
4259 default:
4260 break;
4263 return exp;
4266 /* Return true if EXPR is an expression that can be folded as an operand
4267 of a VIEW_CONVERT_EXPR. See the head comment of unchecked_convert for
4268 the rationale. */
4270 static bool
4271 can_fold_for_view_convert_p (tree expr)
4273 tree t1, t2;
4275 /* The folder will fold NOP_EXPRs between integral types with the same
4276 precision (in the middle-end's sense). We cannot allow it if the
4277 types don't have the same precision in the Ada sense as well. */
4278 if (TREE_CODE (expr) != NOP_EXPR)
4279 return true;
4281 t1 = TREE_TYPE (expr);
4282 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4284 /* Defer to the folder for non-integral conversions. */
4285 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4286 return true;
4288 /* Only fold conversions that preserve both precisions. */
4289 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4290 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4291 return true;
4293 return false;
4296 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4297 If NOTRUNC_P is true, truncation operations should be suppressed.
4299 Special care is required with (source or target) integral types whose
4300 precision is not equal to their size, to make sure we fetch or assign
4301 the value bits whose location might depend on the endianness, e.g.
4303 Rmsize : constant := 8;
4304 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4306 type Bit_Array is array (1 .. Rmsize) of Boolean;
4307 pragma Pack (Bit_Array);
4309 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4311 Value : Int := 2#1000_0001#;
4312 Vbits : Bit_Array := To_Bit_Array (Value);
4314 we expect the 8 bits at Vbits'Address to always contain Value, while
4315 their original location depends on the endianness, at Value'Address
4316 on a little-endian architecture but not on a big-endian one.
4318 ??? There is a problematic discrepancy between what is called precision
4319 here (and more generally throughout gigi) for integral types and what is
4320 called precision in the middle-end. In the former case it's the RM size
4321 as given by TYPE_RM_SIZE (or rm_size) whereas it's TYPE_PRECISION in the
4322 latter case, the hitch being that they are not equal when they matter,
4323 that is when the number of value bits is not equal to the type's size:
4324 TYPE_RM_SIZE does give the number of value bits but TYPE_PRECISION is set
4325 to the size. The sole exception are BOOLEAN_TYPEs for which both are 1.
4327 The consequence is that gigi must duplicate code bridging the gap between
4328 the type's size and its precision that exists for TYPE_PRECISION in the
4329 middle-end, because the latter knows nothing about TYPE_RM_SIZE, and be
4330 wary of transformations applied in the middle-end based on TYPE_PRECISION
4331 because this value doesn't reflect the actual precision for Ada. */
4333 tree
4334 unchecked_convert (tree type, tree expr, bool notrunc_p)
4336 tree etype = TREE_TYPE (expr);
4338 /* If the expression is already the right type, we are done. */
4339 if (etype == type)
4340 return expr;
4342 /* If both types types are integral just do a normal conversion.
4343 Likewise for a conversion to an unconstrained array. */
4344 if ((((INTEGRAL_TYPE_P (type)
4345 && !(TREE_CODE (type) == INTEGER_TYPE
4346 && TYPE_VAX_FLOATING_POINT_P (type)))
4347 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
4348 || (TREE_CODE (type) == RECORD_TYPE
4349 && TYPE_JUSTIFIED_MODULAR_P (type)))
4350 && ((INTEGRAL_TYPE_P (etype)
4351 && !(TREE_CODE (etype) == INTEGER_TYPE
4352 && TYPE_VAX_FLOATING_POINT_P (etype)))
4353 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
4354 || (TREE_CODE (etype) == RECORD_TYPE
4355 && TYPE_JUSTIFIED_MODULAR_P (etype))))
4356 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4358 if (TREE_CODE (etype) == INTEGER_TYPE
4359 && TYPE_BIASED_REPRESENTATION_P (etype))
4361 tree ntype = copy_type (etype);
4362 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4363 TYPE_MAIN_VARIANT (ntype) = ntype;
4364 expr = build1 (NOP_EXPR, ntype, expr);
4367 if (TREE_CODE (type) == INTEGER_TYPE
4368 && TYPE_BIASED_REPRESENTATION_P (type))
4370 tree rtype = copy_type (type);
4371 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4372 TYPE_MAIN_VARIANT (rtype) = rtype;
4373 expr = convert (rtype, expr);
4374 expr = build1 (NOP_EXPR, type, expr);
4377 /* We have another special case: if we are unchecked converting either
4378 a subtype or a type with limited range into a base type, we need to
4379 ensure that VRP doesn't propagate range information because this
4380 conversion may be done precisely to validate that the object is
4381 within the range it is supposed to have. */
4382 else if (TREE_CODE (expr) != INTEGER_CST
4383 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
4384 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
4385 || TREE_CODE (etype) == ENUMERAL_TYPE
4386 || TREE_CODE (etype) == BOOLEAN_TYPE))
4388 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
4389 in order not to be deemed an useless type conversion, it must
4390 be from subtype to base type.
4392 Therefore we first do the bulk of the conversion to a subtype of
4393 the final type. And this conversion must itself not be deemed
4394 useless if the source type is not a subtype because, otherwise,
4395 the final VIEW_CONVERT_EXPR will be deemed so as well. That's
4396 why we toggle the unsigned flag in this conversion, which is
4397 harmless since the final conversion is only a reinterpretation
4398 of the bit pattern.
4400 ??? This may raise addressability and/or aliasing issues because
4401 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
4402 address of its operand to be taken if it is deemed addressable
4403 and not already in GIMPLE form. */
4404 tree rtype
4405 = gnat_type_for_mode (TYPE_MODE (type), !TYPE_UNSIGNED (etype));
4406 rtype = copy_type (rtype);
4407 TYPE_MAIN_VARIANT (rtype) = rtype;
4408 TREE_TYPE (rtype) = type;
4409 expr = convert (rtype, expr);
4410 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4413 else
4414 expr = convert (type, expr);
4417 /* If we are converting to an integral type whose precision is not equal
4418 to its size, first unchecked convert to a record that contains an
4419 object of the output type. Then extract the field. */
4420 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4421 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4422 GET_MODE_BITSIZE (TYPE_MODE (type))))
4424 tree rec_type = make_node (RECORD_TYPE);
4425 tree field = create_field_decl (get_identifier ("OBJ"), type,
4426 rec_type, 1, 0, 0, 0);
4428 TYPE_FIELDS (rec_type) = field;
4429 layout_type (rec_type);
4431 expr = unchecked_convert (rec_type, expr, notrunc_p);
4432 expr = build_component_ref (expr, NULL_TREE, field, 0);
4435 /* Similarly if we are converting from an integral type whose precision
4436 is not equal to its size. */
4437 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4438 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4439 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4441 tree rec_type = make_node (RECORD_TYPE);
4442 tree field
4443 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4444 1, 0, 0, 0);
4446 TYPE_FIELDS (rec_type) = field;
4447 layout_type (rec_type);
4449 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4450 expr = unchecked_convert (type, expr, notrunc_p);
4453 /* We have a special case when we are converting between two
4454 unconstrained array types. In that case, take the address,
4455 convert the fat pointer types, and dereference. */
4456 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
4457 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4458 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4459 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4460 build_unary_op (ADDR_EXPR, NULL_TREE,
4461 expr)));
4462 else
4464 expr = maybe_unconstrained_array (expr);
4465 etype = TREE_TYPE (expr);
4466 if (can_fold_for_view_convert_p (expr))
4467 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4468 else
4469 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4472 /* If the result is an integral type whose precision is not equal to its
4473 size, sign- or zero-extend the result. We need not do this if the input
4474 is an integral type of the same precision and signedness or if the output
4475 is a biased type or if both the input and output are unsigned. */
4476 if (!notrunc_p
4477 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4478 && !(TREE_CODE (type) == INTEGER_TYPE
4479 && TYPE_BIASED_REPRESENTATION_P (type))
4480 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4481 GET_MODE_BITSIZE (TYPE_MODE (type)))
4482 && !(INTEGRAL_TYPE_P (etype)
4483 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4484 && operand_equal_p (TYPE_RM_SIZE (type),
4485 (TYPE_RM_SIZE (etype) != 0
4486 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4488 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4490 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4491 TYPE_UNSIGNED (type));
4492 tree shift_expr
4493 = convert (base_type,
4494 size_binop (MINUS_EXPR,
4495 bitsize_int
4496 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4497 TYPE_RM_SIZE (type)));
4498 expr
4499 = convert (type,
4500 build_binary_op (RSHIFT_EXPR, base_type,
4501 build_binary_op (LSHIFT_EXPR, base_type,
4502 convert (base_type, expr),
4503 shift_expr),
4504 shift_expr));
4507 /* An unchecked conversion should never raise Constraint_Error. The code
4508 below assumes that GCC's conversion routines overflow the same way that
4509 the underlying hardware does. This is probably true. In the rare case
4510 when it is false, we can rely on the fact that such conversions are
4511 erroneous anyway. */
4512 if (TREE_CODE (expr) == INTEGER_CST)
4513 TREE_OVERFLOW (expr) = 0;
4515 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4516 show no longer constant. */
4517 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4518 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4519 OEP_ONLY_CONST))
4520 TREE_CONSTANT (expr) = 0;
4522 return expr;
4525 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4526 the latter being a record type as predicated by Is_Record_Type. */
4528 enum tree_code
4529 tree_code_for_record_type (Entity_Id gnat_type)
4531 Node_Id component_list
4532 = Component_List (Type_Definition
4533 (Declaration_Node
4534 (Implementation_Base_Type (gnat_type))));
4535 Node_Id component;
4537 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4538 we have a non-discriminant field outside a variant. In either case,
4539 it's a RECORD_TYPE. */
4541 if (!Is_Unchecked_Union (gnat_type))
4542 return RECORD_TYPE;
4544 for (component = First_Non_Pragma (Component_Items (component_list));
4545 Present (component);
4546 component = Next_Non_Pragma (component))
4547 if (Ekind (Defining_Entity (component)) == E_Component)
4548 return RECORD_TYPE;
4550 return UNION_TYPE;
4553 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4554 component of an aggregate type. */
4556 bool
4557 type_for_nonaliased_component_p (tree gnu_type)
4559 /* If the type is passed by reference, we may have pointers to the
4560 component so it cannot be made non-aliased. */
4561 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4562 return false;
4564 /* We used to say that any component of aggregate type is aliased
4565 because the front-end may take 'Reference of it. The front-end
4566 has been enhanced in the meantime so as to use a renaming instead
4567 in most cases, but the back-end can probably take the address of
4568 such a component too so we go for the conservative stance.
4570 For instance, we might need the address of any array type, even
4571 if normally passed by copy, to construct a fat pointer if the
4572 component is used as an actual for an unconstrained formal.
4574 Likewise for record types: even if a specific record subtype is
4575 passed by copy, the parent type might be passed by ref (e.g. if
4576 it's of variable size) and we might take the address of a child
4577 component to pass to a parent formal. We have no way to check
4578 for such conditions here. */
4579 if (AGGREGATE_TYPE_P (gnu_type))
4580 return false;
4582 return true;
4585 /* Perform final processing on global variables. */
4587 void
4588 gnat_write_global_declarations (void)
4590 /* Proceed to optimize and emit assembly.
4591 FIXME: shouldn't be the front end's responsibility to call this. */
4592 cgraph_optimize ();
4594 /* Emit debug info for all global declarations. */
4595 emit_debug_global_declarations (VEC_address (tree, global_decls),
4596 VEC_length (tree, global_decls));
4599 /* ************************************************************************
4600 * * GCC builtins support *
4601 * ************************************************************************ */
4603 /* The general scheme is fairly simple:
4605 For each builtin function/type to be declared, gnat_install_builtins calls
4606 internal facilities which eventually get to gnat_push_decl, which in turn
4607 tracks the so declared builtin function decls in the 'builtin_decls' global
4608 datastructure. When an Intrinsic subprogram declaration is processed, we
4609 search this global datastructure to retrieve the associated BUILT_IN DECL
4610 node. */
4612 /* Search the chain of currently available builtin declarations for a node
4613 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4614 found, if any, or NULL_TREE otherwise. */
4615 tree
4616 builtin_decl_for (tree name)
4618 unsigned i;
4619 tree decl;
4621 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4622 if (DECL_NAME (decl) == name)
4623 return decl;
4625 return NULL_TREE;
4628 /* The code below eventually exposes gnat_install_builtins, which declares
4629 the builtin types and functions we might need, either internally or as
4630 user accessible facilities.
4632 ??? This is a first implementation shot, still in rough shape. It is
4633 heavily inspired from the "C" family implementation, with chunks copied
4634 verbatim from there.
4636 Two obvious TODO candidates are
4637 o Use a more efficient name/decl mapping scheme
4638 o Devise a middle-end infrastructure to avoid having to copy
4639 pieces between front-ends. */
4641 /* ----------------------------------------------------------------------- *
4642 * BUILTIN ELEMENTARY TYPES *
4643 * ----------------------------------------------------------------------- */
4645 /* Standard data types to be used in builtin argument declarations. */
4647 enum c_tree_index
4649 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4650 CTI_STRING_TYPE,
4651 CTI_CONST_STRING_TYPE,
4653 CTI_MAX
4656 static tree c_global_trees[CTI_MAX];
4658 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4659 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4660 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4662 /* ??? In addition some attribute handlers, we currently don't support a
4663 (small) number of builtin-types, which in turns inhibits support for a
4664 number of builtin functions. */
4665 #define wint_type_node void_type_node
4666 #define intmax_type_node void_type_node
4667 #define uintmax_type_node void_type_node
4669 /* Build the void_list_node (void_type_node having been created). */
4671 static tree
4672 build_void_list_node (void)
4674 tree t = build_tree_list (NULL_TREE, void_type_node);
4675 return t;
4678 /* Used to help initialize the builtin-types.def table. When a type of
4679 the correct size doesn't exist, use error_mark_node instead of NULL.
4680 The later results in segfaults even when a decl using the type doesn't
4681 get invoked. */
4683 static tree
4684 builtin_type_for_size (int size, bool unsignedp)
4686 tree type = lang_hooks.types.type_for_size (size, unsignedp);
4687 return type ? type : error_mark_node;
4690 /* Build/push the elementary type decls that builtin functions/types
4691 will need. */
4693 static void
4694 install_builtin_elementary_types (void)
4696 signed_size_type_node = size_type_node;
4697 pid_type_node = integer_type_node;
4698 void_list_node = build_void_list_node ();
4700 string_type_node = build_pointer_type (char_type_node);
4701 const_string_type_node
4702 = build_pointer_type (build_qualified_type
4703 (char_type_node, TYPE_QUAL_CONST));
4706 /* ----------------------------------------------------------------------- *
4707 * BUILTIN FUNCTION TYPES *
4708 * ----------------------------------------------------------------------- */
4710 /* Now, builtin function types per se. */
4712 enum c_builtin_type
4714 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4715 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4716 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4717 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4718 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4719 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4720 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4721 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4722 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4723 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4724 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4725 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4726 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4727 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4728 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4729 NAME,
4730 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4731 #include "builtin-types.def"
4732 #undef DEF_PRIMITIVE_TYPE
4733 #undef DEF_FUNCTION_TYPE_0
4734 #undef DEF_FUNCTION_TYPE_1
4735 #undef DEF_FUNCTION_TYPE_2
4736 #undef DEF_FUNCTION_TYPE_3
4737 #undef DEF_FUNCTION_TYPE_4
4738 #undef DEF_FUNCTION_TYPE_5
4739 #undef DEF_FUNCTION_TYPE_6
4740 #undef DEF_FUNCTION_TYPE_7
4741 #undef DEF_FUNCTION_TYPE_VAR_0
4742 #undef DEF_FUNCTION_TYPE_VAR_1
4743 #undef DEF_FUNCTION_TYPE_VAR_2
4744 #undef DEF_FUNCTION_TYPE_VAR_3
4745 #undef DEF_FUNCTION_TYPE_VAR_4
4746 #undef DEF_FUNCTION_TYPE_VAR_5
4747 #undef DEF_POINTER_TYPE
4748 BT_LAST
4751 typedef enum c_builtin_type builtin_type;
4753 /* A temporary array used in communication with def_fn_type. */
4754 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4756 /* A helper function for install_builtin_types. Build function type
4757 for DEF with return type RET and N arguments. If VAR is true, then the
4758 function should be variadic after those N arguments.
4760 Takes special care not to ICE if any of the types involved are
4761 error_mark_node, which indicates that said type is not in fact available
4762 (see builtin_type_for_size). In which case the function type as a whole
4763 should be error_mark_node. */
4765 static void
4766 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4768 tree args = NULL, t;
4769 va_list list;
4770 int i;
4772 va_start (list, n);
4773 for (i = 0; i < n; ++i)
4775 builtin_type a = va_arg (list, builtin_type);
4776 t = builtin_types[a];
4777 if (t == error_mark_node)
4778 goto egress;
4779 args = tree_cons (NULL_TREE, t, args);
4781 va_end (list);
4783 args = nreverse (args);
4784 if (!var)
4785 args = chainon (args, void_list_node);
4787 t = builtin_types[ret];
4788 if (t == error_mark_node)
4789 goto egress;
4790 t = build_function_type (t, args);
4792 egress:
4793 builtin_types[def] = t;
4796 /* Build the builtin function types and install them in the builtin_types
4797 array for later use in builtin function decls. */
4799 static void
4800 install_builtin_function_types (void)
4802 tree va_list_ref_type_node;
4803 tree va_list_arg_type_node;
4805 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4807 va_list_arg_type_node = va_list_ref_type_node =
4808 build_pointer_type (TREE_TYPE (va_list_type_node));
4810 else
4812 va_list_arg_type_node = va_list_type_node;
4813 va_list_ref_type_node = build_reference_type (va_list_type_node);
4816 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
4817 builtin_types[ENUM] = VALUE;
4818 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
4819 def_fn_type (ENUM, RETURN, 0, 0);
4820 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
4821 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
4822 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
4823 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
4824 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4825 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
4826 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4827 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
4828 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4829 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4830 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4831 ARG6) \
4832 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4833 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4834 ARG6, ARG7) \
4835 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4836 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
4837 def_fn_type (ENUM, RETURN, 1, 0);
4838 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
4839 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
4840 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
4841 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
4842 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4843 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
4844 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4845 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
4846 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4847 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4848 #define DEF_POINTER_TYPE(ENUM, TYPE) \
4849 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
4851 #include "builtin-types.def"
4853 #undef DEF_PRIMITIVE_TYPE
4854 #undef DEF_FUNCTION_TYPE_1
4855 #undef DEF_FUNCTION_TYPE_2
4856 #undef DEF_FUNCTION_TYPE_3
4857 #undef DEF_FUNCTION_TYPE_4
4858 #undef DEF_FUNCTION_TYPE_5
4859 #undef DEF_FUNCTION_TYPE_6
4860 #undef DEF_FUNCTION_TYPE_VAR_0
4861 #undef DEF_FUNCTION_TYPE_VAR_1
4862 #undef DEF_FUNCTION_TYPE_VAR_2
4863 #undef DEF_FUNCTION_TYPE_VAR_3
4864 #undef DEF_FUNCTION_TYPE_VAR_4
4865 #undef DEF_FUNCTION_TYPE_VAR_5
4866 #undef DEF_POINTER_TYPE
4867 builtin_types[(int) BT_LAST] = NULL_TREE;
4870 /* ----------------------------------------------------------------------- *
4871 * BUILTIN ATTRIBUTES *
4872 * ----------------------------------------------------------------------- */
4874 enum built_in_attribute
4876 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
4877 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
4878 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
4879 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
4880 #include "builtin-attrs.def"
4881 #undef DEF_ATTR_NULL_TREE
4882 #undef DEF_ATTR_INT
4883 #undef DEF_ATTR_IDENT
4884 #undef DEF_ATTR_TREE_LIST
4885 ATTR_LAST
4888 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
4890 static void
4891 install_builtin_attributes (void)
4893 /* Fill in the built_in_attributes array. */
4894 #define DEF_ATTR_NULL_TREE(ENUM) \
4895 built_in_attributes[(int) ENUM] = NULL_TREE;
4896 #define DEF_ATTR_INT(ENUM, VALUE) \
4897 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
4898 #define DEF_ATTR_IDENT(ENUM, STRING) \
4899 built_in_attributes[(int) ENUM] = get_identifier (STRING);
4900 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
4901 built_in_attributes[(int) ENUM] \
4902 = tree_cons (built_in_attributes[(int) PURPOSE], \
4903 built_in_attributes[(int) VALUE], \
4904 built_in_attributes[(int) CHAIN]);
4905 #include "builtin-attrs.def"
4906 #undef DEF_ATTR_NULL_TREE
4907 #undef DEF_ATTR_INT
4908 #undef DEF_ATTR_IDENT
4909 #undef DEF_ATTR_TREE_LIST
4912 /* Handle a "const" attribute; arguments as in
4913 struct attribute_spec.handler. */
4915 static tree
4916 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
4917 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4918 bool *no_add_attrs)
4920 if (TREE_CODE (*node) == FUNCTION_DECL)
4921 TREE_READONLY (*node) = 1;
4922 else
4923 *no_add_attrs = true;
4925 return NULL_TREE;
4928 /* Handle a "nothrow" attribute; arguments as in
4929 struct attribute_spec.handler. */
4931 static tree
4932 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
4933 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4934 bool *no_add_attrs)
4936 if (TREE_CODE (*node) == FUNCTION_DECL)
4937 TREE_NOTHROW (*node) = 1;
4938 else
4939 *no_add_attrs = true;
4941 return NULL_TREE;
4944 /* Handle a "pure" attribute; arguments as in
4945 struct attribute_spec.handler. */
4947 static tree
4948 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4949 int ARG_UNUSED (flags), bool *no_add_attrs)
4951 if (TREE_CODE (*node) == FUNCTION_DECL)
4952 DECL_PURE_P (*node) = 1;
4953 /* ??? TODO: Support types. */
4954 else
4956 warning (OPT_Wattributes, "%qE attribute ignored", name);
4957 *no_add_attrs = true;
4960 return NULL_TREE;
4963 /* Handle a "no vops" attribute; arguments as in
4964 struct attribute_spec.handler. */
4966 static tree
4967 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
4968 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4969 bool *ARG_UNUSED (no_add_attrs))
4971 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
4972 DECL_IS_NOVOPS (*node) = 1;
4973 return NULL_TREE;
4976 /* Helper for nonnull attribute handling; fetch the operand number
4977 from the attribute argument list. */
4979 static bool
4980 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
4982 /* Verify the arg number is a constant. */
4983 if (TREE_CODE (arg_num_expr) != INTEGER_CST
4984 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
4985 return false;
4987 *valp = TREE_INT_CST_LOW (arg_num_expr);
4988 return true;
4991 /* Handle the "nonnull" attribute. */
4992 static tree
4993 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
4994 tree args, int ARG_UNUSED (flags),
4995 bool *no_add_attrs)
4997 tree type = *node;
4998 unsigned HOST_WIDE_INT attr_arg_num;
5000 /* If no arguments are specified, all pointer arguments should be
5001 non-null. Verify a full prototype is given so that the arguments
5002 will have the correct types when we actually check them later. */
5003 if (!args)
5005 if (!TYPE_ARG_TYPES (type))
5007 error ("nonnull attribute without arguments on a non-prototype");
5008 *no_add_attrs = true;
5010 return NULL_TREE;
5013 /* Argument list specified. Verify that each argument number references
5014 a pointer argument. */
5015 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5017 tree argument;
5018 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5020 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5022 error ("nonnull argument has invalid operand number (argument %lu)",
5023 (unsigned long) attr_arg_num);
5024 *no_add_attrs = true;
5025 return NULL_TREE;
5028 argument = TYPE_ARG_TYPES (type);
5029 if (argument)
5031 for (ck_num = 1; ; ck_num++)
5033 if (!argument || ck_num == arg_num)
5034 break;
5035 argument = TREE_CHAIN (argument);
5038 if (!argument
5039 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5041 error ("nonnull argument with out-of-range operand number (argument %lu, operand %lu)",
5042 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5043 *no_add_attrs = true;
5044 return NULL_TREE;
5047 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5049 error ("nonnull argument references non-pointer operand (argument %lu, operand %lu)",
5050 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5051 *no_add_attrs = true;
5052 return NULL_TREE;
5057 return NULL_TREE;
5060 /* Handle a "sentinel" attribute. */
5062 static tree
5063 handle_sentinel_attribute (tree *node, tree name, tree args,
5064 int ARG_UNUSED (flags), bool *no_add_attrs)
5066 tree params = TYPE_ARG_TYPES (*node);
5068 if (!params)
5070 warning (OPT_Wattributes,
5071 "%qE attribute requires prototypes with named arguments", name);
5072 *no_add_attrs = true;
5074 else
5076 while (TREE_CHAIN (params))
5077 params = TREE_CHAIN (params);
5079 if (VOID_TYPE_P (TREE_VALUE (params)))
5081 warning (OPT_Wattributes,
5082 "%qE attribute only applies to variadic functions", name);
5083 *no_add_attrs = true;
5087 if (args)
5089 tree position = TREE_VALUE (args);
5091 if (TREE_CODE (position) != INTEGER_CST)
5093 warning (0, "requested position is not an integer constant");
5094 *no_add_attrs = true;
5096 else
5098 if (tree_int_cst_lt (position, integer_zero_node))
5100 warning (0, "requested position is less than zero");
5101 *no_add_attrs = true;
5106 return NULL_TREE;
5109 /* Handle a "noreturn" attribute; arguments as in
5110 struct attribute_spec.handler. */
5112 static tree
5113 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5114 int ARG_UNUSED (flags), bool *no_add_attrs)
5116 tree type = TREE_TYPE (*node);
5118 /* See FIXME comment in c_common_attribute_table. */
5119 if (TREE_CODE (*node) == FUNCTION_DECL)
5120 TREE_THIS_VOLATILE (*node) = 1;
5121 else if (TREE_CODE (type) == POINTER_TYPE
5122 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5123 TREE_TYPE (*node)
5124 = build_pointer_type
5125 (build_type_variant (TREE_TYPE (type),
5126 TYPE_READONLY (TREE_TYPE (type)), 1));
5127 else
5129 warning (OPT_Wattributes, "%qE attribute ignored", name);
5130 *no_add_attrs = true;
5133 return NULL_TREE;
5136 /* Handle a "malloc" attribute; arguments as in
5137 struct attribute_spec.handler. */
5139 static tree
5140 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5141 int ARG_UNUSED (flags), bool *no_add_attrs)
5143 if (TREE_CODE (*node) == FUNCTION_DECL
5144 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5145 DECL_IS_MALLOC (*node) = 1;
5146 else
5148 warning (OPT_Wattributes, "%qE attribute ignored", name);
5149 *no_add_attrs = true;
5152 return NULL_TREE;
5155 /* Fake handler for attributes we don't properly support. */
5157 tree
5158 fake_attribute_handler (tree * ARG_UNUSED (node),
5159 tree ARG_UNUSED (name),
5160 tree ARG_UNUSED (args),
5161 int ARG_UNUSED (flags),
5162 bool * ARG_UNUSED (no_add_attrs))
5164 return NULL_TREE;
5167 /* Handle a "type_generic" attribute. */
5169 static tree
5170 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5171 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5172 bool * ARG_UNUSED (no_add_attrs))
5174 tree params;
5176 /* Ensure we have a function type. */
5177 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5179 params = TYPE_ARG_TYPES (*node);
5180 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5181 params = TREE_CHAIN (params);
5183 /* Ensure we have a variadic function. */
5184 gcc_assert (!params);
5186 return NULL_TREE;
5189 /* ----------------------------------------------------------------------- *
5190 * BUILTIN FUNCTIONS *
5191 * ----------------------------------------------------------------------- */
5193 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5194 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5195 if nonansi_p and flag_no_nonansi_builtin. */
5197 static void
5198 def_builtin_1 (enum built_in_function fncode,
5199 const char *name,
5200 enum built_in_class fnclass,
5201 tree fntype, tree libtype,
5202 bool both_p, bool fallback_p,
5203 bool nonansi_p ATTRIBUTE_UNUSED,
5204 tree fnattrs, bool implicit_p)
5206 tree decl;
5207 const char *libname;
5209 /* Preserve an already installed decl. It most likely was setup in advance
5210 (e.g. as part of the internal builtins) for specific reasons. */
5211 if (built_in_decls[(int) fncode] != NULL_TREE)
5212 return;
5214 gcc_assert ((!both_p && !fallback_p)
5215 || !strncmp (name, "__builtin_",
5216 strlen ("__builtin_")));
5218 libname = name + strlen ("__builtin_");
5219 decl = add_builtin_function (name, fntype, fncode, fnclass,
5220 (fallback_p ? libname : NULL),
5221 fnattrs);
5222 if (both_p)
5223 /* ??? This is normally further controlled by command-line options
5224 like -fno-builtin, but we don't have them for Ada. */
5225 add_builtin_function (libname, libtype, fncode, fnclass,
5226 NULL, fnattrs);
5228 built_in_decls[(int) fncode] = decl;
5229 if (implicit_p)
5230 implicit_built_in_decls[(int) fncode] = decl;
5233 static int flag_isoc94 = 0;
5234 static int flag_isoc99 = 0;
5236 /* Install what the common builtins.def offers. */
5238 static void
5239 install_builtin_functions (void)
5241 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5242 NONANSI_P, ATTRS, IMPLICIT, COND) \
5243 if (NAME && COND) \
5244 def_builtin_1 (ENUM, NAME, CLASS, \
5245 builtin_types[(int) TYPE], \
5246 builtin_types[(int) LIBTYPE], \
5247 BOTH_P, FALLBACK_P, NONANSI_P, \
5248 built_in_attributes[(int) ATTRS], IMPLICIT);
5249 #include "builtins.def"
5250 #undef DEF_BUILTIN
5253 /* ----------------------------------------------------------------------- *
5254 * BUILTIN FUNCTIONS *
5255 * ----------------------------------------------------------------------- */
5257 /* Install the builtin functions we might need. */
5259 void
5260 gnat_install_builtins (void)
5262 install_builtin_elementary_types ();
5263 install_builtin_function_types ();
5264 install_builtin_attributes ();
5266 /* Install builtins used by generic middle-end pieces first. Some of these
5267 know about internal specificities and control attributes accordingly, for
5268 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5269 the generic definition from builtins.def. */
5270 build_common_builtin_nodes ();
5272 /* Now, install the target specific builtins, such as the AltiVec family on
5273 ppc, and the common set as exposed by builtins.def. */
5274 targetm.init_builtins ();
5275 install_builtin_functions ();
5278 #include "gt-ada-utils.h"
5279 #include "gtype-ada.h"