ada: Fix issue with indefinite vector of overaligned unconstrained array
[official-gcc.git] / gcc / ada / gcc-interface / utils.cc
blobe7b5c7783b1f1c702130c8879c79b7e329764b09
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2023, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "target.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "stringpool.h"
33 #include "cgraph.h"
34 #include "diagnostic.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "toplev.h"
41 #include "opts.h"
42 #include "output.h"
43 #include "debug.h"
44 #include "convert.h"
45 #include "common/common-target.h"
46 #include "langhooks.h"
47 #include "tree-dump.h"
48 #include "tree-inline.h"
50 #include "ada.h"
51 #include "types.h"
52 #include "atree.h"
53 #include "nlists.h"
54 #include "snames.h"
55 #include "uintp.h"
56 #include "fe.h"
57 #include "sinfo.h"
58 #include "einfo.h"
59 #include "ada-tree.h"
60 #include "gigi.h"
62 /* If nonzero, pretend we are allocating at global level. */
63 int force_global;
65 /* The default alignment of "double" floating-point types, i.e. floating
66 point types whose size is equal to 64 bits, or 0 if this alignment is
67 not specifically capped. */
68 int double_float_alignment;
70 /* The default alignment of "double" or larger scalar types, i.e. scalar
71 types whose size is greater or equal to 64 bits, or 0 if this alignment
72 is not specifically capped. */
73 int double_scalar_alignment;
75 /* True if floating-point arithmetics may use wider intermediate results. */
76 bool fp_arith_may_widen = true;
78 /* Tree nodes for the various types and decls we create. */
79 tree gnat_std_decls[(int) ADT_LAST];
81 /* Functions to call for each of the possible raise reasons. */
82 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
84 /* Likewise, but with extra info for each of the possible raise reasons. */
85 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
87 /* Forward declarations for handlers of attributes. */
88 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_expected_throw_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_stack_protect_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_no_stack_protector_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_strub_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
101 static tree handle_noicf_attribute (tree *, tree, tree, int, bool *);
102 static tree handle_noipa_attribute (tree *, tree, tree, int, bool *);
103 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
104 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
105 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
106 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
107 static tree handle_flatten_attribute (tree *, tree, tree, int, bool *);
108 static tree handle_used_attribute (tree *, tree, tree, int, bool *);
109 static tree handle_cold_attribute (tree *, tree, tree, int, bool *);
110 static tree handle_hot_attribute (tree *, tree, tree, int, bool *);
111 static tree handle_simd_attribute (tree *, tree, tree, int, bool *);
112 static tree handle_target_attribute (tree *, tree, tree, int, bool *);
113 static tree handle_target_clones_attribute (tree *, tree, tree, int, bool *);
114 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
115 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
116 static tree handle_zero_call_used_regs_attribute (tree *, tree, tree, int,
117 bool *);
119 static const struct attribute_spec::exclusions attr_cold_hot_exclusions[] =
121 { "cold", true, true, true },
122 { "hot" , true, true, true },
123 { NULL , false, false, false }
126 static const struct attribute_spec::exclusions attr_stack_protect_exclusions[] =
128 { "stack_protect", true, false, false },
129 { "no_stack_protector", true, false, false },
130 { NULL, false, false, false },
133 /* Fake handler for attributes we don't properly support, typically because
134 they'd require dragging a lot of the common-c front-end circuitry. */
135 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
137 /* Table of machine-independent internal attributes for Ada. We support
138 this minimal set of attributes to accommodate the needs of builtins. */
139 const struct attribute_spec gnat_internal_attribute_table[] =
141 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
142 affects_type_identity, handler, exclude } */
143 { "const", 0, 0, true, false, false, false,
144 handle_const_attribute, NULL },
145 { "nothrow", 0, 0, true, false, false, false,
146 handle_nothrow_attribute, NULL },
147 { "expected_throw", 0, 0, true, false, false, false,
148 handle_expected_throw_attribute, NULL },
149 { "pure", 0, 0, true, false, false, false,
150 handle_pure_attribute, NULL },
151 { "no vops", 0, 0, true, false, false, false,
152 handle_novops_attribute, NULL },
153 { "nonnull", 0, -1, false, true, true, false,
154 handle_nonnull_attribute, NULL },
155 { "sentinel", 0, 1, false, true, true, false,
156 handle_sentinel_attribute, NULL },
157 { "noreturn", 0, 0, true, false, false, false,
158 handle_noreturn_attribute, NULL },
159 { "stack_protect",0, 0, true, false, false, false,
160 handle_stack_protect_attribute,
161 attr_stack_protect_exclusions },
162 { "no_stack_protector",0, 0, true, false, false, false,
163 handle_no_stack_protector_attribute,
164 attr_stack_protect_exclusions },
165 { "strub", 0, 1, false, true, false, true,
166 handle_strub_attribute, NULL },
167 { "noinline", 0, 0, true, false, false, false,
168 handle_noinline_attribute, NULL },
169 { "noclone", 0, 0, true, false, false, false,
170 handle_noclone_attribute, NULL },
171 { "no_icf", 0, 0, true, false, false, false,
172 handle_noicf_attribute, NULL },
173 { "noipa", 0, 0, true, false, false, false,
174 handle_noipa_attribute, NULL },
175 { "leaf", 0, 0, true, false, false, false,
176 handle_leaf_attribute, NULL },
177 { "always_inline",0, 0, true, false, false, false,
178 handle_always_inline_attribute, NULL },
179 { "malloc", 0, 0, true, false, false, false,
180 handle_malloc_attribute, NULL },
181 { "type generic", 0, 0, false, true, true, false,
182 handle_type_generic_attribute, NULL },
184 { "flatten", 0, 0, true, false, false, false,
185 handle_flatten_attribute, NULL },
186 { "used", 0, 0, true, false, false, false,
187 handle_used_attribute, NULL },
188 { "cold", 0, 0, true, false, false, false,
189 handle_cold_attribute, attr_cold_hot_exclusions },
190 { "hot", 0, 0, true, false, false, false,
191 handle_hot_attribute, attr_cold_hot_exclusions },
192 { "simd", 0, 1, true, false, false, false,
193 handle_simd_attribute, NULL },
194 { "target", 1, -1, true, false, false, false,
195 handle_target_attribute, NULL },
196 { "target_clones",1, -1, true, false, false, false,
197 handle_target_clones_attribute, NULL },
199 { "vector_size", 1, 1, false, true, false, false,
200 handle_vector_size_attribute, NULL },
201 { "vector_type", 0, 0, false, true, false, false,
202 handle_vector_type_attribute, NULL },
203 { "may_alias", 0, 0, false, true, false, false,
204 NULL, NULL },
206 { "zero_call_used_regs", 1, 1, true, false, false, false,
207 handle_zero_call_used_regs_attribute, NULL },
209 /* ??? format and format_arg are heavy and not supported, which actually
210 prevents support for stdio builtins, which we however declare as part
211 of the common builtins.def contents. */
212 { "format", 3, 3, false, true, true, false,
213 fake_attribute_handler, NULL },
214 { "format_arg", 1, 1, false, true, true, false,
215 fake_attribute_handler, NULL },
217 /* This is handled entirely in the front end. */
218 { "hardbool", 0, 0, false, true, false, true,
219 fake_attribute_handler, NULL },
221 { NULL, 0, 0, false, false, false, false,
222 NULL, NULL }
225 /* Associates a GNAT tree node to a GCC tree node. It is used in
226 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
227 of `save_gnu_tree' for more info. */
228 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
230 #define GET_GNU_TREE(GNAT_ENTITY) \
231 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
233 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
234 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
236 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
237 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
239 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
240 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
242 #define GET_DUMMY_NODE(GNAT_ENTITY) \
243 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
245 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
246 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
248 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
249 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
251 /* This variable keeps a table for types for each precision so that we only
252 allocate each of them once. Signed and unsigned types are kept separate.
254 Note that these types are only used when fold-const requests something
255 special. Perhaps we should NOT share these types; we'll see how it
256 goes later. */
257 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
259 /* Likewise for float types, but record these by mode. */
260 static GTY(()) tree float_types[NUM_MACHINE_MODES];
262 /* For each binding contour we allocate a binding_level structure to indicate
263 the binding depth. */
265 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
266 /* The binding level containing this one (the enclosing binding level). */
267 struct gnat_binding_level *chain;
268 /* The BLOCK node for this level. */
269 tree block;
272 /* The binding level currently in effect. */
273 static GTY(()) struct gnat_binding_level *current_binding_level;
275 /* A chain of gnat_binding_level structures awaiting reuse. */
276 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
278 /* The context to be used for global declarations. */
279 static GTY(()) tree global_context;
281 /* An array of global declarations. */
282 static GTY(()) vec<tree, va_gc> *global_decls;
284 /* An array of builtin function declarations. */
285 static GTY(()) vec<tree, va_gc> *builtin_decls;
287 /* A chain of unused BLOCK nodes. */
288 static GTY((deletable)) tree free_block_chain;
290 /* A hash table of packable types. It is modelled on the generic type
291 hash table in tree.cc, which must thus be used as a reference. */
293 struct GTY((for_user)) packable_type_hash
295 hashval_t hash;
296 tree type;
299 struct packable_type_hasher : ggc_cache_ptr_hash<packable_type_hash>
301 static inline hashval_t hash (packable_type_hash *t) { return t->hash; }
302 static bool equal (packable_type_hash *a, packable_type_hash *b);
304 static int
305 keep_cache_entry (packable_type_hash *&t)
307 return ggc_marked_p (t->type);
311 static GTY ((cache)) hash_table<packable_type_hasher> *packable_type_hash_table;
313 /* A hash table of padded types. It is modelled on the generic type
314 hash table in tree.cc, which must thus be used as a reference. */
316 struct GTY((for_user)) pad_type_hash
318 hashval_t hash;
319 tree type;
322 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
324 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
325 static bool equal (pad_type_hash *a, pad_type_hash *b);
327 static int
328 keep_cache_entry (pad_type_hash *&t)
330 return ggc_marked_p (t->type);
334 static GTY ((cache)) hash_table<pad_type_hasher> *pad_type_hash_table;
336 static tree merge_sizes (tree, tree, tree, bool, bool);
337 static tree fold_bit_position (const_tree);
338 static tree compute_related_constant (tree, tree);
339 static tree split_plus (tree, tree *);
340 static tree float_type_for_precision (int, machine_mode);
341 static tree convert_to_fat_pointer (tree, tree);
342 static unsigned int scale_by_factor_of (tree, unsigned int);
344 /* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
345 of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */
346 struct deferred_decl_context_node
348 /* The ..._DECL node to work on. */
349 tree decl;
351 /* The corresponding entity's Scope. */
352 Entity_Id gnat_scope;
354 /* The value of force_global when DECL was pushed. */
355 int force_global;
357 /* The list of ..._TYPE nodes to propagate the context to. */
358 vec<tree> types;
360 /* The next queue item. */
361 struct deferred_decl_context_node *next;
364 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
366 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
367 feed it with the elaboration of GNAT_SCOPE. */
368 static struct deferred_decl_context_node *
369 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
371 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
372 feed it with the DECL_CONTEXT computed as part of N as soon as it is
373 computed. */
374 static void add_deferred_type_context (struct deferred_decl_context_node *n,
375 tree type);
377 /* Initialize data structures of the utils.cc module. */
379 void
380 init_gnat_utils (void)
382 /* Initialize the association of GNAT nodes to GCC trees. */
383 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
385 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
386 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
388 /* Initialize the hash table of packable types. */
389 packable_type_hash_table = hash_table<packable_type_hasher>::create_ggc (512);
391 /* Initialize the hash table of padded types. */
392 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
395 /* Destroy data structures of the utils.cc module. */
397 void
398 destroy_gnat_utils (void)
400 /* Destroy the association of GNAT nodes to GCC trees. */
401 ggc_free (associate_gnat_to_gnu);
402 associate_gnat_to_gnu = NULL;
404 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
405 ggc_free (dummy_node_table);
406 dummy_node_table = NULL;
408 /* Destroy the hash table of packable types. */
409 packable_type_hash_table->empty ();
410 packable_type_hash_table = NULL;
412 /* Destroy the hash table of padded types. */
413 pad_type_hash_table->empty ();
414 pad_type_hash_table = NULL;
417 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
418 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
419 If NO_CHECK is true, the latter check is suppressed.
421 If GNU_DECL is zero, reset a previous association. */
423 void
424 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
426 /* Check that GNAT_ENTITY is not already defined and that it is being set
427 to something which is a decl. If that is not the case, this usually
428 means GNAT_ENTITY is defined twice, but occasionally is due to some
429 Gigi problem. */
430 gcc_assert (!(gnu_decl
431 && (PRESENT_GNU_TREE (gnat_entity)
432 || (!no_check && !DECL_P (gnu_decl)))));
434 SET_GNU_TREE (gnat_entity, gnu_decl);
437 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
438 that was associated with it. If there is no such tree node, abort.
440 In some cases, such as delayed elaboration or expressions that need to
441 be elaborated only once, GNAT_ENTITY is really not an entity. */
443 tree
444 get_gnu_tree (Entity_Id gnat_entity)
446 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
447 return GET_GNU_TREE (gnat_entity);
450 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
452 bool
453 present_gnu_tree (Entity_Id gnat_entity)
455 return PRESENT_GNU_TREE (gnat_entity);
458 /* Make a dummy type corresponding to GNAT_TYPE. */
460 tree
461 make_dummy_type (Entity_Id gnat_type)
463 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
464 tree gnu_type, debug_type;
466 /* If there was no equivalent type (can only happen when just annotating
467 types) or underlying type, go back to the original type. */
468 if (No (gnat_equiv))
469 gnat_equiv = gnat_type;
471 /* If it there already a dummy type, use that one. Else make one. */
472 if (PRESENT_DUMMY_NODE (gnat_equiv))
473 return GET_DUMMY_NODE (gnat_equiv);
475 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
476 an ENUMERAL_TYPE. */
477 gnu_type = make_node (Is_Record_Type (gnat_equiv)
478 ? tree_code_for_record_type (gnat_equiv)
479 : ENUMERAL_TYPE);
480 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
481 TYPE_DUMMY_P (gnu_type) = 1;
482 TYPE_STUB_DECL (gnu_type)
483 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
484 if (Is_By_Reference_Type (gnat_equiv))
485 TYPE_BY_REFERENCE_P (gnu_type) = 1;
486 if (Has_Discriminants (gnat_equiv))
487 decl_attributes (&gnu_type,
488 tree_cons (get_identifier ("may_alias"), NULL_TREE,
489 NULL_TREE),
490 ATTR_FLAG_TYPE_IN_PLACE);
492 SET_DUMMY_NODE (gnat_equiv, gnu_type);
494 /* Create a debug type so that debuggers only see an unspecified type. */
495 if (Needs_Debug_Info (gnat_type))
497 debug_type = make_node (LANG_TYPE);
498 TYPE_NAME (debug_type) = TYPE_NAME (gnu_type);
499 TYPE_ARTIFICIAL (debug_type) = TYPE_ARTIFICIAL (gnu_type);
500 SET_TYPE_DEBUG_TYPE (gnu_type, debug_type);
503 return gnu_type;
506 /* Return the dummy type that was made for GNAT_TYPE, if any. */
508 tree
509 get_dummy_type (Entity_Id gnat_type)
511 return GET_DUMMY_NODE (gnat_type);
514 /* Build dummy fat and thin pointer types whose designated type is specified
515 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
517 void
518 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
520 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
521 tree gnu_fat_type, fields, gnu_object_type;
523 gnu_template_type = make_node (RECORD_TYPE);
524 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
525 TYPE_DUMMY_P (gnu_template_type) = 1;
526 gnu_ptr_template = build_pointer_type (gnu_template_type);
528 gnu_array_type = make_node (ENUMERAL_TYPE);
529 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
530 TYPE_DUMMY_P (gnu_array_type) = 1;
531 gnu_ptr_array = build_pointer_type (gnu_array_type);
533 gnu_fat_type = make_node (RECORD_TYPE);
534 /* Build a stub DECL to trigger the special processing for fat pointer types
535 in gnat_pushdecl. */
536 TYPE_NAME (gnu_fat_type)
537 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
538 gnu_fat_type);
539 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
540 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 1);
541 DECL_CHAIN (fields)
542 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
543 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 1);
544 finish_fat_pointer_type (gnu_fat_type, fields);
545 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
546 /* Suppress debug info until after the type is completed. */
547 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
549 gnu_object_type = make_node (RECORD_TYPE);
550 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
551 TYPE_DUMMY_P (gnu_object_type) = 1;
553 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
554 TYPE_REFERENCE_TO (gnu_desig_type) = gnu_fat_type;
555 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
558 /* Return true if we are in the global binding level. */
560 bool
561 global_bindings_p (void)
563 return force_global || !current_function_decl;
566 /* Enter a new binding level. */
568 void
569 gnat_pushlevel (void)
571 struct gnat_binding_level *newlevel = NULL;
573 /* Reuse a struct for this binding level, if there is one. */
574 if (free_binding_level)
576 newlevel = free_binding_level;
577 free_binding_level = free_binding_level->chain;
579 else
580 newlevel = ggc_alloc<gnat_binding_level> ();
582 /* Use a free BLOCK, if any; otherwise, allocate one. */
583 if (free_block_chain)
585 newlevel->block = free_block_chain;
586 free_block_chain = BLOCK_CHAIN (free_block_chain);
587 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
589 else
590 newlevel->block = make_node (BLOCK);
592 /* Point the BLOCK we just made to its parent. */
593 if (current_binding_level)
594 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
596 BLOCK_VARS (newlevel->block) = NULL_TREE;
597 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
598 TREE_USED (newlevel->block) = 1;
600 /* Add this level to the front of the chain (stack) of active levels. */
601 newlevel->chain = current_binding_level;
602 current_binding_level = newlevel;
605 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
606 and point FNDECL to this BLOCK. */
608 void
609 set_current_block_context (tree fndecl)
611 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
612 DECL_INITIAL (fndecl) = current_binding_level->block;
613 set_block_for_group (current_binding_level->block);
616 /* Exit a binding level. Set any BLOCK into the current code group. */
618 void
619 gnat_poplevel (void)
621 struct gnat_binding_level *level = current_binding_level;
622 tree block = level->block;
624 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
625 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
627 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
628 are no variables free the block and merge its subblocks into those of its
629 parent block. Otherwise, add it to the list of its parent. */
630 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
632 else if (!BLOCK_VARS (block))
634 BLOCK_SUBBLOCKS (level->chain->block)
635 = block_chainon (BLOCK_SUBBLOCKS (block),
636 BLOCK_SUBBLOCKS (level->chain->block));
637 BLOCK_CHAIN (block) = free_block_chain;
638 free_block_chain = block;
640 else
642 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
643 BLOCK_SUBBLOCKS (level->chain->block) = block;
644 TREE_USED (block) = 1;
645 set_block_for_group (block);
648 /* Free this binding structure. */
649 current_binding_level = level->chain;
650 level->chain = free_binding_level;
651 free_binding_level = level;
654 /* Exit a binding level and discard the associated BLOCK. */
656 void
657 gnat_zaplevel (void)
659 struct gnat_binding_level *level = current_binding_level;
660 tree block = level->block;
662 BLOCK_CHAIN (block) = free_block_chain;
663 free_block_chain = block;
665 /* Free this binding structure. */
666 current_binding_level = level->chain;
667 level->chain = free_binding_level;
668 free_binding_level = level;
671 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
673 static void
674 gnat_set_type_context (tree type, tree context)
676 tree decl = TYPE_STUB_DECL (type);
678 TYPE_CONTEXT (type) = context;
680 while (decl && DECL_PARALLEL_TYPE (decl))
682 tree parallel_type = DECL_PARALLEL_TYPE (decl);
684 /* Give a context to the parallel types and their stub decl, if any.
685 Some parallel types seems to be present in multiple parallel type
686 chains, so don't mess with their context if they already have one. */
687 if (!TYPE_CONTEXT (parallel_type))
689 if (TYPE_STUB_DECL (parallel_type))
690 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
691 TYPE_CONTEXT (parallel_type) = context;
694 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
698 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
699 the debug info, or Empty if there is no such scope. If not NULL, set
700 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
702 Entity_Id
703 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
705 Entity_Id gnat_entity;
707 if (is_subprogram)
708 *is_subprogram = false;
710 if (Nkind (gnat_node) == N_Defining_Identifier
711 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
712 gnat_entity = Scope (gnat_node);
713 else
714 return Empty;
716 while (Present (gnat_entity))
718 switch (Ekind (gnat_entity))
720 case E_Function:
721 case E_Procedure:
722 if (Present (Protected_Body_Subprogram (gnat_entity)))
723 gnat_entity = Protected_Body_Subprogram (gnat_entity);
725 /* If the scope is a subprogram, then just rely on
726 current_function_decl, so that we don't have to defer
727 anything. This is needed because other places rely on the
728 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
729 if (is_subprogram)
730 *is_subprogram = true;
731 return gnat_entity;
733 case E_Record_Type:
734 case E_Record_Subtype:
735 return gnat_entity;
737 default:
738 /* By default, we are not interested in this particular scope: go to
739 the outer one. */
740 break;
743 gnat_entity = Scope (gnat_entity);
746 return Empty;
749 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing
750 of N otherwise. */
752 static void
753 defer_or_set_type_context (tree type, tree context,
754 struct deferred_decl_context_node *n)
756 if (n)
757 add_deferred_type_context (n, type);
758 else
759 gnat_set_type_context (type, context);
762 /* Return global_context, but create it first if need be. */
764 static tree
765 get_global_context (void)
767 if (!global_context)
769 global_context
770 = build_translation_unit_decl (get_identifier (main_input_filename));
771 debug_hooks->register_main_translation_unit (global_context);
774 return global_context;
777 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
778 for location information and flag propagation. */
780 void
781 gnat_pushdecl (tree decl, Node_Id gnat_node)
783 tree context = NULL_TREE;
784 struct deferred_decl_context_node *deferred_decl_context = NULL;
786 /* If explicitly asked to make DECL global or if it's an imported nested
787 object, short-circuit the regular Scope-based context computation. */
788 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
790 /* Rely on the GNAT scope, or fallback to the current_function_decl if
791 the GNAT scope reached the global scope, if it reached a subprogram
792 or the declaration is a subprogram or a variable (for them we skip
793 intermediate context types because the subprogram body elaboration
794 machinery and the inliner both expect a subprogram context).
796 Falling back to current_function_decl is necessary for implicit
797 subprograms created by gigi, such as the elaboration subprograms. */
798 bool context_is_subprogram = false;
799 const Entity_Id gnat_scope
800 = get_debug_scope (gnat_node, &context_is_subprogram);
802 if (Present (gnat_scope)
803 && !context_is_subprogram
804 && TREE_CODE (decl) != FUNCTION_DECL
805 && TREE_CODE (decl) != VAR_DECL)
806 /* Always assume the scope has not been elaborated, thus defer the
807 context propagation to the time its elaboration will be
808 available. */
809 deferred_decl_context
810 = add_deferred_decl_context (decl, gnat_scope, force_global);
812 /* External declarations (when force_global > 0) may not be in a
813 local context. */
814 else if (current_function_decl && force_global == 0)
815 context = current_function_decl;
818 /* If either we are forced to be in global mode or if both the GNAT scope and
819 the current_function_decl did not help in determining the context, use the
820 global scope. */
821 if (!deferred_decl_context && !context)
822 context = get_global_context ();
824 /* Functions imported in another function are not really nested.
825 For really nested functions mark them initially as needing
826 a static chain for uses of that flag before unnesting;
827 lower_nested_functions will then recompute it. */
828 if (TREE_CODE (decl) == FUNCTION_DECL
829 && !TREE_PUBLIC (decl)
830 && context
831 && (TREE_CODE (context) == FUNCTION_DECL
832 || decl_function_context (context)))
833 DECL_STATIC_CHAIN (decl) = 1;
835 if (!deferred_decl_context)
836 DECL_CONTEXT (decl) = context;
838 /* Disable warnings for compiler-generated entities or explicit request. */
839 if (No (gnat_node)
840 || !Comes_From_Source (gnat_node)
841 || Warnings_Off (gnat_node))
842 suppress_warning (decl);
844 /* Set the location of DECL and emit a declaration for it. */
845 if (Present (gnat_node) && !renaming_from_instantiation_p (gnat_node))
846 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
848 add_decl_expr (decl, gnat_node);
850 /* Put the declaration on the list. The list of declarations is in reverse
851 order. The list will be reversed later. Put global declarations in the
852 globals list and local ones in the current block. But skip TYPE_DECLs
853 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
854 with the debugger and aren't needed anyway. */
855 if (!(TREE_CODE (decl) == TYPE_DECL
856 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
858 /* External declarations must go to the binding level they belong to.
859 This will make corresponding imported entities are available in the
860 debugger at the proper time. */
861 if (DECL_EXTERNAL (decl)
862 && TREE_CODE (decl) == FUNCTION_DECL
863 && fndecl_built_in_p (decl))
864 vec_safe_push (builtin_decls, decl);
865 else if (global_bindings_p ())
866 vec_safe_push (global_decls, decl);
867 else
869 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
870 BLOCK_VARS (current_binding_level->block) = decl;
874 /* Pointer types aren't named types in the C sense so we need to generate a
875 typedef in DWARF for them. Also do that for fat pointer types because,
876 even though they are named types in the C sense, they are still the XUP
877 types created for the base array type at this point. */
878 #define TYPE_IS_POINTER_P(NODE) \
879 (TREE_CODE (NODE) == POINTER_TYPE || TYPE_IS_FAT_POINTER_P (NODE))
881 /* For the declaration of a type, set its name either if it isn't already
882 set or if the previous type name was not derived from a source name.
883 We'd rather have the type named with a real name and all the pointer
884 types to the same object have the same node, except when the names are
885 both derived from source names. */
886 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
888 tree t = TREE_TYPE (decl);
890 /* For pointer types, make sure the typedef is generated and preserved
891 in DWARF, unless the type is artificial. */
892 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
893 && (!TYPE_IS_POINTER_P (t) || DECL_ARTIFICIAL (decl)))
895 /* For pointer types, create the DECL_ORIGINAL_TYPE that will generate
896 the typedef in DWARF. */
897 else if (TYPE_IS_POINTER_P (t) && !DECL_ARTIFICIAL (decl))
899 tree tt = build_variant_type_copy (t);
900 TYPE_NAME (tt) = decl;
901 defer_or_set_type_context (tt,
902 DECL_CONTEXT (decl),
903 deferred_decl_context);
904 TREE_TYPE (decl) = tt;
905 if (TYPE_NAME (t)
906 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
907 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
908 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
909 else
910 DECL_ORIGINAL_TYPE (decl) = t;
911 /* Remark the canonical fat pointer type as artificial. */
912 if (TYPE_IS_FAT_POINTER_P (t))
913 TYPE_ARTIFICIAL (t) = 1;
914 t = NULL_TREE;
916 else if (TYPE_NAME (t)
917 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
918 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
920 else
921 t = NULL_TREE;
923 /* Propagate the name to all the variants, this is needed for the type
924 qualifiers machinery to work properly (see check_qualified_type).
925 Also propagate the context to them. Note that it will be propagated
926 to all parallel types too thanks to gnat_set_type_context. */
927 if (t)
928 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
929 /* Skip it for pointer types to preserve the typedef. */
930 if (!(TYPE_IS_POINTER_P (t)
931 && TYPE_NAME (t)
932 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
934 TYPE_NAME (t) = decl;
935 defer_or_set_type_context (t,
936 DECL_CONTEXT (decl),
937 deferred_decl_context);
941 #undef TYPE_IS_POINTER_P
944 /* Create a record type that contains a SIZE bytes long field of TYPE with a
945 starting bit position so that it is aligned to ALIGN bits, and leaving at
946 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
947 record is guaranteed to get. GNAT_NODE is used for the position of the
948 associated TYPE_DECL. */
950 tree
951 make_aligning_type (tree type, unsigned int align, tree size,
952 unsigned int base_align, int room, Node_Id gnat_node)
954 /* We will be crafting a record type with one field at a position set to be
955 the next multiple of ALIGN past record'address + room bytes. We use a
956 record placeholder to express record'address. */
957 tree record_type = make_node (RECORD_TYPE);
958 tree record = build0 (PLACEHOLDER_EXPR, record_type);
960 tree record_addr_st
961 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
963 /* The diagram below summarizes the shape of what we manipulate:
965 <--------- pos ---------->
966 { +------------+-------------+-----------------+
967 record =>{ |############| ... | field (type) |
968 { +------------+-------------+-----------------+
969 |<-- room -->|<- voffset ->|<---- size ----->|
972 record_addr vblock_addr
974 Every length is in sizetype bytes there, except "pos" which has to be
975 set as a bit position in the GCC tree for the record. */
976 tree room_st = size_int (room);
977 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
978 tree voffset_st, pos, field;
980 tree name = TYPE_IDENTIFIER (type);
982 name = concat_name (name, "ALIGN");
983 TYPE_NAME (record_type) = name;
985 /* Compute VOFFSET and then POS. The next byte position multiple of some
986 alignment after some address is obtained by "and"ing the alignment minus
987 1 with the two's complement of the address. */
988 voffset_st = size_binop (BIT_AND_EXPR,
989 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
990 size_int ((align / BITS_PER_UNIT) - 1));
992 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
993 pos = size_binop (MULT_EXPR,
994 convert (bitsizetype,
995 size_binop (PLUS_EXPR, room_st, voffset_st)),
996 bitsize_unit_node);
998 /* Craft the GCC record representation. We exceptionally do everything
999 manually here because 1) our generic circuitry is not quite ready to
1000 handle the complex position/size expressions we are setting up, 2) we
1001 have a strong simplifying factor at hand: we know the maximum possible
1002 value of voffset, and 3) we have to set/reset at least the sizes in
1003 accordance with this maximum value anyway, as we need them to convey
1004 what should be "alloc"ated for this type.
1006 Use -1 as the 'addressable' indication for the field to prevent the
1007 creation of a bitfield. We don't need one, it would have damaging
1008 consequences on the alignment computation, and create_field_decl would
1009 make one without this special argument, for instance because of the
1010 complex position expression. */
1011 field = create_field_decl (get_identifier ("F"), type, record_type, size,
1012 pos, 1, -1);
1013 TYPE_FIELDS (record_type) = field;
1015 SET_TYPE_ALIGN (record_type, base_align);
1016 TYPE_USER_ALIGN (record_type) = 1;
1018 TYPE_SIZE (record_type)
1019 = size_binop (PLUS_EXPR,
1020 size_binop (MULT_EXPR, convert (bitsizetype, size),
1021 bitsize_unit_node),
1022 bitsize_int (align + room * BITS_PER_UNIT));
1023 TYPE_SIZE_UNIT (record_type)
1024 = size_binop (PLUS_EXPR, size,
1025 size_int (room + align / BITS_PER_UNIT));
1027 SET_TYPE_MODE (record_type, BLKmode);
1028 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
1030 /* Declare it now since it will never be declared otherwise. This is
1031 necessary to ensure that its subtrees are properly marked. */
1032 create_type_decl (name, record_type, true, false, gnat_node);
1034 return record_type;
1037 /* Return true iff the packable types are equivalent. */
1039 bool
1040 packable_type_hasher::equal (packable_type_hash *t1, packable_type_hash *t2)
1042 tree type1, type2;
1044 if (t1->hash != t2->hash)
1045 return 0;
1047 type1 = t1->type;
1048 type2 = t2->type;
1050 /* We consider that packable types are equivalent if they have the same name,
1051 size, alignment, RM size and storage order. Taking the mode into account
1052 is redundant since it is determined by the others. */
1053 return
1054 TYPE_NAME (type1) == TYPE_NAME (type2)
1055 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1056 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1057 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1058 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1061 /* Compute the hash value for the packable TYPE. */
1063 static hashval_t
1064 hash_packable_type (tree type)
1066 hashval_t hashcode;
1068 hashcode = iterative_hash_expr (TYPE_NAME (type), 0);
1069 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1070 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1071 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1072 hashcode
1073 = iterative_hash_hashval_t (TYPE_REVERSE_STORAGE_ORDER (type), hashcode);
1075 return hashcode;
1078 /* Look up the packable TYPE in the hash table and return its canonical version
1079 if it exists; otherwise, insert it into the hash table. */
1081 static tree
1082 canonicalize_packable_type (tree type)
1084 const hashval_t hashcode = hash_packable_type (type);
1085 struct packable_type_hash in, *h, **slot;
1087 in.hash = hashcode;
1088 in.type = type;
1089 slot = packable_type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
1090 h = *slot;
1091 if (!h)
1093 h = ggc_alloc<packable_type_hash> ();
1094 h->hash = hashcode;
1095 h->type = type;
1096 *slot = h;
1099 return h->type;
1102 /* TYPE is an ARRAY_TYPE that is being used as the type of a field in a packed
1103 record. See if we can rewrite it as a type that has non-BLKmode, which we
1104 can pack tighter in the packed record. If so, return the new type; if not,
1105 return the original type. */
1107 static tree
1108 make_packable_array_type (tree type)
1110 const unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
1111 unsigned HOST_WIDE_INT new_size;
1112 unsigned int new_align;
1114 /* No point in doing anything if the size is either zero or too large for an
1115 integral mode, or if the type already has non-BLKmode. */
1116 if (size == 0 || size > MAX_FIXED_MODE_SIZE || TYPE_MODE (type) != BLKmode)
1117 return type;
1119 /* Punt if the component type is an aggregate type for now. */
1120 if (AGGREGATE_TYPE_P (TREE_TYPE (type)))
1121 return type;
1123 tree new_type = copy_type (type);
1125 new_size = ceil_pow2 (size);
1126 new_align = MIN (new_size, BIGGEST_ALIGNMENT);
1127 SET_TYPE_ALIGN (new_type, new_align);
1129 TYPE_SIZE (new_type) = bitsize_int (new_size);
1130 TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1132 SET_TYPE_MODE (new_type, mode_for_size (new_size, MODE_INT, 1).else_blk ());
1134 return new_type;
1137 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
1138 as the type of a field in a packed record if IN_RECORD is true, or as
1139 the component type of a packed array if IN_RECORD is false. See if we
1140 can rewrite it either as a type that has non-BLKmode, which we can pack
1141 tighter in the packed record case, or as a smaller type with at most
1142 MAX_ALIGN alignment if the value is non-zero. If so, return the new
1143 type; if not, return the original type. */
1145 tree
1146 make_packable_type (tree type, bool in_record, unsigned int max_align)
1148 const unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
1149 const unsigned int align = TYPE_ALIGN (type);
1150 unsigned HOST_WIDE_INT new_size;
1151 unsigned int new_align;
1153 /* No point in doing anything if the size is zero. */
1154 if (size == 0)
1155 return type;
1157 tree new_type = make_node (TREE_CODE (type));
1159 /* Copy the name and flags from the old type to that of the new.
1160 Note that we rely on the pointer equality created here for
1161 TYPE_NAME to look through conversions in various places. */
1162 TYPE_NAME (new_type) = TYPE_NAME (type);
1163 TYPE_PACKED (new_type) = 1;
1164 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
1165 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
1166 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
1167 if (TREE_CODE (type) == RECORD_TYPE)
1168 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
1170 /* If we are in a record and have a small size, set the alignment to
1171 try for an integral mode. Otherwise set it to try for a smaller
1172 type with BLKmode. */
1173 if (in_record && size <= MAX_FIXED_MODE_SIZE)
1175 new_size = ceil_pow2 (size);
1176 new_align = MIN (new_size, BIGGEST_ALIGNMENT);
1177 SET_TYPE_ALIGN (new_type, new_align);
1179 else
1181 tree ada_size = TYPE_ADA_SIZE (type);
1183 /* Do not try to shrink the size if the RM size is not constant. */
1184 if (TYPE_CONTAINS_TEMPLATE_P (type) || !tree_fits_uhwi_p (ada_size))
1185 return type;
1187 /* Round the RM size up to a unit boundary to get the minimal size
1188 for a BLKmode record. Give up if it's already the size and we
1189 don't need to lower the alignment. */
1190 new_size = tree_to_uhwi (ada_size);
1191 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
1192 if (new_size == size && (max_align == 0 || align <= max_align))
1193 return type;
1195 new_align = MIN (new_size & -new_size, BIGGEST_ALIGNMENT);
1196 if (max_align > 0 && new_align > max_align)
1197 new_align = max_align;
1198 SET_TYPE_ALIGN (new_type, MIN (align, new_align));
1201 TYPE_USER_ALIGN (new_type) = 1;
1203 /* Now copy the fields, keeping the position and size as we don't want
1204 to change the layout by propagating the packedness downwards. */
1205 tree new_field_list = NULL_TREE;
1206 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1208 tree new_field_type = TREE_TYPE (field);
1209 tree new_field, new_field_size;
1211 if (AGGREGATE_TYPE_P (new_field_type)
1212 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1214 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1215 && !TYPE_FAT_POINTER_P (new_field_type))
1216 new_field_type
1217 = make_packable_type (new_field_type, true, max_align);
1218 else if (in_record
1219 && max_align > 0
1220 && max_align < BITS_PER_UNIT
1221 && TREE_CODE (new_field_type) == ARRAY_TYPE)
1222 new_field_type = make_packable_array_type (new_field_type);
1225 /* However, for the last field in a not already packed record type
1226 that is of an aggregate type, we need to use the RM size in the
1227 packable version of the record type, see finish_record_type. */
1228 if (!DECL_CHAIN (field)
1229 && !TYPE_PACKED (type)
1230 && RECORD_OR_UNION_TYPE_P (new_field_type)
1231 && !TYPE_FAT_POINTER_P (new_field_type)
1232 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1233 && TYPE_ADA_SIZE (new_field_type))
1234 new_field_size = TYPE_ADA_SIZE (new_field_type);
1235 else
1237 new_field_size = DECL_SIZE (field);
1239 /* Make sure not to use too small a type for the size. */
1240 if (TYPE_MODE (new_field_type) == BLKmode)
1241 new_field_type = TREE_TYPE (field);
1244 /* This is a layout with full representation, alignment and size clauses
1245 so we simply pass 0 as PACKED like gnat_to_gnu_field in this case. */
1246 new_field
1247 = create_field_decl (DECL_NAME (field), new_field_type, new_type,
1248 new_field_size, bit_position (field), 0,
1249 !DECL_NONADDRESSABLE_P (field));
1251 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field);
1252 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field);
1253 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1254 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (field);
1256 DECL_CHAIN (new_field) = new_field_list;
1257 new_field_list = new_field;
1260 /* If this is a padding record, we never want to make the size smaller
1261 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1262 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1264 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1265 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1266 new_size = size;
1268 else
1270 TYPE_SIZE (new_type) = bitsize_int (new_size);
1271 TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1274 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1275 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1277 finish_record_type (new_type, nreverse (new_field_list), 2, false);
1278 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1279 if (gnat_encodings != DWARF_GNAT_ENCODINGS_ALL)
1280 SET_TYPE_DEBUG_TYPE (new_type, TYPE_DEBUG_TYPE (type));
1281 else if (TYPE_STUB_DECL (type))
1282 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1283 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1285 /* Try harder to get a packable type if necessary, for example in case
1286 the record itself contains a BLKmode field. */
1287 if (in_record && TYPE_MODE (new_type) == BLKmode)
1288 SET_TYPE_MODE (new_type,
1289 mode_for_size_tree (TYPE_SIZE (new_type),
1290 MODE_INT, 1).else_blk ());
1292 /* If neither mode nor size nor alignment shrunk, return the old type. */
1293 if (TYPE_MODE (new_type) == BLKmode && new_size >= size && max_align == 0)
1294 return type;
1296 /* If the packable type is named, we canonicalize it by means of the hash
1297 table. This is consistent with the language semantics and ensures that
1298 gigi and the middle-end have a common view of these packable types. */
1299 return
1300 TYPE_NAME (new_type) ? canonicalize_packable_type (new_type) : new_type;
1303 /* Return true if TYPE has an unsigned representation. This needs to be used
1304 when the representation of types whose precision is not equal to their size
1305 is manipulated based on the RM size. */
1307 static inline bool
1308 type_unsigned_for_rm (tree type)
1310 /* This is the common case. */
1311 if (TYPE_UNSIGNED (type))
1312 return true;
1314 /* See the E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1315 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1316 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1317 return true;
1319 return false;
1322 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1323 If TYPE is the best type, return it. Otherwise, make a new type. We
1324 only support new integral and pointer types. FOR_BIASED is true if
1325 we are making a biased type. */
1327 tree
1328 make_type_from_size (tree type, tree size_tree, bool for_biased)
1330 unsigned HOST_WIDE_INT size;
1331 bool biased_p;
1332 tree new_type;
1334 /* If size indicates an error, just return TYPE to avoid propagating
1335 the error. Likewise if it's too large to represent. */
1336 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1337 return type;
1339 size = tree_to_uhwi (size_tree);
1341 switch (TREE_CODE (type))
1343 case BOOLEAN_TYPE:
1344 /* Do not mess with boolean types that have foreign convention. */
1345 if (TYPE_PRECISION (type) == 1 && TYPE_SIZE (type) == size_tree)
1346 break;
1348 /* ... fall through ... */
1350 case INTEGER_TYPE:
1351 case ENUMERAL_TYPE:
1352 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1353 && TYPE_BIASED_REPRESENTATION_P (type));
1355 /* Integer types with precision 0 are forbidden. */
1356 if (size == 0)
1357 size = 1;
1359 /* Only do something if the type is not a bit-packed array type and does
1360 not already have the proper size and the size is not too large. */
1361 if (BIT_PACKED_ARRAY_TYPE_P (type)
1362 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1363 || size > (Enable_128bit_Types ? 128 : LONG_LONG_TYPE_SIZE))
1364 break;
1366 biased_p |= for_biased;
1368 /* The type should be an unsigned type if the original type is unsigned
1369 or if the lower bound is constant and non-negative or if the type is
1370 biased, see E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1371 if (type_unsigned_for_rm (type) || biased_p)
1372 new_type = make_unsigned_type (size);
1373 else
1374 new_type = make_signed_type (size);
1375 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1376 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1377 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1378 /* Copy the name to show that it's essentially the same type and
1379 not a subrange type. */
1380 TYPE_NAME (new_type) = TYPE_NAME (type);
1381 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1382 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1383 return new_type;
1385 case RECORD_TYPE:
1386 /* Do something if this is a fat pointer, in which case we
1387 may need to return the thin pointer. */
1388 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1390 scalar_int_mode p_mode;
1391 if (!int_mode_for_size (size, 0).exists (&p_mode)
1392 || !targetm.valid_pointer_mode (p_mode))
1393 p_mode = ptr_mode;
1394 return
1395 build_pointer_type_for_mode
1396 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1397 p_mode, 0);
1399 break;
1401 case POINTER_TYPE:
1402 /* Only do something if this is a thin pointer, in which case we
1403 may need to return the fat pointer. */
1404 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1405 return
1406 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1407 break;
1409 default:
1410 break;
1413 return type;
1416 /* Return true iff the padded types are equivalent. */
1418 bool
1419 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1421 tree type1, type2;
1423 if (t1->hash != t2->hash)
1424 return 0;
1426 type1 = t1->type;
1427 type2 = t2->type;
1429 /* We consider that padded types are equivalent if they pad the same type
1430 and have the same size, alignment, RM size and storage order. Taking the
1431 mode into account is redundant since it is determined by the others. */
1432 return
1433 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1434 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1435 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1436 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1437 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1440 /* Compute the hash value for the padded TYPE. */
1442 static hashval_t
1443 hash_pad_type (tree type)
1445 hashval_t hashcode;
1447 hashcode
1448 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1449 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1450 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1451 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1452 hashcode
1453 = iterative_hash_hashval_t (TYPE_REVERSE_STORAGE_ORDER (type), hashcode);
1455 return hashcode;
1458 /* Look up the padded TYPE in the hash table and return its canonical version
1459 if it exists; otherwise, insert it into the hash table. */
1461 static tree
1462 canonicalize_pad_type (tree type)
1464 const hashval_t hashcode = hash_pad_type (type);
1465 struct pad_type_hash in, *h, **slot;
1467 in.hash = hashcode;
1468 in.type = type;
1469 slot = pad_type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
1470 h = *slot;
1471 if (!h)
1473 h = ggc_alloc<pad_type_hash> ();
1474 h->hash = hashcode;
1475 h->type = type;
1476 *slot = h;
1479 return h->type;
1482 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1483 if needed. We have already verified that SIZE and ALIGN are large enough.
1484 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1485 IS_COMPONENT_TYPE is true if this is being done for the component type of
1486 an array. DEFINITION is true if this type is being defined. SET_RM_SIZE
1487 is true if the RM size of the resulting type is to be set to SIZE too; in
1488 this case, the padded type is canonicalized before being returned.
1490 Note that, if TYPE is an array, then we pad it even if it has already got
1491 an alignment of ALIGN, provided that it's larger than the alignment of the
1492 element type. This ensures that the size of the type is a multiple of its
1493 alignment as required by the GCC type system, and alleviates the oddity of
1494 the larger alignment, which is used to implement alignment clauses present
1495 on unconstrained array types. */
1497 tree
1498 maybe_pad_type (tree type, tree size, unsigned int align,
1499 Entity_Id gnat_entity, bool is_component_type,
1500 bool definition, bool set_rm_size)
1502 tree orig_size = TYPE_SIZE (type);
1503 unsigned int orig_align
1504 = TREE_CODE (type) == ARRAY_TYPE
1505 ? TYPE_ALIGN (TREE_TYPE (type))
1506 : TYPE_ALIGN (type);
1507 tree record, field;
1509 /* If TYPE is a padded type, see if it agrees with any size and alignment
1510 we were given. If so, return the original type. Otherwise, strip
1511 off the padding, since we will either be returning the inner type
1512 or repadding it. If no size or alignment is specified, use that of
1513 the original padded type. */
1514 if (TYPE_IS_PADDING_P (type))
1516 if ((!size
1517 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1518 && (align == 0 || align == orig_align))
1519 return type;
1521 if (!size)
1522 size = orig_size;
1523 if (align == 0)
1524 align = orig_align;
1526 type = TREE_TYPE (TYPE_FIELDS (type));
1527 orig_size = TYPE_SIZE (type);
1528 orig_align
1529 = TREE_CODE (type) == ARRAY_TYPE
1530 ? TYPE_ALIGN (TREE_TYPE (type))
1531 : TYPE_ALIGN (type);
1534 /* If the size is either not being changed or is being made smaller (which
1535 is not done here and is only valid for bitfields anyway), show the size
1536 isn't changing. Likewise, clear the alignment if it isn't being
1537 changed. Then return if we aren't doing anything. */
1538 if (size
1539 && (operand_equal_p (size, orig_size, 0)
1540 || (TREE_CODE (orig_size) == INTEGER_CST
1541 && tree_int_cst_lt (size, orig_size))))
1542 size = NULL_TREE;
1544 if (align == orig_align)
1545 align = 0;
1547 if (align == 0 && !size)
1548 return type;
1550 /* We used to modify the record in place in some cases, but that could
1551 generate incorrect debugging information. So make a new record
1552 type and name. */
1553 record = make_node (RECORD_TYPE);
1554 TYPE_PADDING_P (record) = 1;
1556 if (Present (gnat_entity))
1557 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1559 SET_TYPE_ALIGN (record, align ? align : orig_align);
1560 TYPE_SIZE (record) = size ? size : orig_size;
1561 TYPE_SIZE_UNIT (record)
1562 = convert (sizetype,
1563 size_binop (EXACT_DIV_EXPR, TYPE_SIZE (record),
1564 bitsize_unit_node));
1566 /* If we are changing the alignment and the input type is a record with
1567 BLKmode and a small constant size, try to make a form that has an
1568 integral mode. This might allow the padding record to also have an
1569 integral mode, which will be much more efficient. There is no point
1570 in doing so if a size is specified unless it is also a small constant
1571 size and it is incorrect to do so if we cannot guarantee that the mode
1572 will be naturally aligned since the field must always be addressable.
1574 ??? This might not always be a win when done for a stand-alone object:
1575 since the nominal and the effective type of the object will now have
1576 different modes, a VIEW_CONVERT_EXPR will be required for converting
1577 between them and it might be hard to overcome afterwards, including
1578 at the RTL level when the stand-alone object is accessed as a whole. */
1579 if (align > 0
1580 && RECORD_OR_UNION_TYPE_P (type)
1581 && !TYPE_IS_FAT_POINTER_P (type)
1582 && TYPE_MODE (type) == BLKmode
1583 && !TYPE_BY_REFERENCE_P (type)
1584 && TREE_CODE (orig_size) == INTEGER_CST
1585 && !TREE_OVERFLOW (orig_size)
1586 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1587 && (!size
1588 || (TREE_CODE (size) == INTEGER_CST
1589 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1591 tree packable_type = make_packable_type (type, true, align);
1592 if (TYPE_MODE (packable_type) != BLKmode
1593 && compare_tree_int (TYPE_SIZE (packable_type), align) <= 0)
1594 type = packable_type;
1597 /* Now create the field with the original size. */
1598 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1599 bitsize_zero_node, 0, 1);
1600 DECL_INTERNAL_P (field) = 1;
1602 /* We will output additional debug info manually below. */
1603 finish_record_type (record, field, 1, false);
1605 /* Set the RM size if requested. */
1606 if (set_rm_size)
1608 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1610 /* If the padded type is complete and has constant size, we canonicalize
1611 it by means of the hash table. This is consistent with the language
1612 semantics and ensures that gigi and the middle-end have a common view
1613 of these padded types. */
1614 if (TREE_CONSTANT (TYPE_SIZE (record)))
1616 tree canonical = canonicalize_pad_type (record);
1617 if (canonical != record)
1619 record = canonical;
1620 goto built;
1625 /* Make the inner type the debug type of the padded type. */
1626 if (gnat_encodings != DWARF_GNAT_ENCODINGS_ALL)
1627 SET_TYPE_DEBUG_TYPE (record, maybe_debug_type (type));
1629 /* Unless debugging information isn't being written for the input type,
1630 write a record that shows what we are a subtype of and also make a
1631 variable that indicates our size, if still variable. */
1632 if (TREE_CODE (orig_size) != INTEGER_CST
1633 && TYPE_NAME (record)
1634 && TYPE_NAME (type)
1635 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1636 && DECL_IGNORED_P (TYPE_NAME (type))))
1638 tree name = TYPE_IDENTIFIER (record);
1639 tree size_unit = TYPE_SIZE_UNIT (record);
1641 /* A variable that holds the size is required even with no encoding since
1642 it will be referenced by debugging information attributes. At global
1643 level, we need a single variable across all translation units. */
1644 if (size
1645 && TREE_CODE (size) != INTEGER_CST
1646 && (definition || global_bindings_p ()))
1648 /* Whether or not gnat_entity comes from source, this XVZ variable is
1649 is a compilation artifact. */
1650 size_unit
1651 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1652 size_unit, true, global_bindings_p (),
1653 !definition && global_bindings_p (), false,
1654 false, true, true, NULL, gnat_entity, false);
1655 TYPE_SIZE_UNIT (record) = size_unit;
1658 /* There is no need to show what we are a subtype of when outputting as
1659 few encodings as possible: regular debugging infomation makes this
1660 redundant. */
1661 if (gnat_encodings == DWARF_GNAT_ENCODINGS_ALL)
1663 tree marker = make_node (RECORD_TYPE);
1664 tree orig_name = TYPE_IDENTIFIER (type);
1666 TYPE_NAME (marker) = concat_name (name, "XVS");
1667 finish_record_type (marker,
1668 create_field_decl (orig_name,
1669 build_reference_type (type),
1670 marker, NULL_TREE, NULL_TREE,
1671 0, 0),
1672 0, true);
1673 TYPE_SIZE_UNIT (marker) = size_unit;
1675 add_parallel_type (record, marker);
1679 built:
1680 /* If a simple size was explicitly given, maybe issue a warning. */
1681 if (!size
1682 || TREE_CODE (size) == COND_EXPR
1683 || TREE_CODE (size) == MAX_EXPR
1684 || No (gnat_entity))
1685 return record;
1687 /* But don't do it if we are just annotating types and the type is tagged or
1688 concurrent, since these types aren't fully laid out in this mode. */
1689 if (type_annotate_only)
1691 Entity_Id gnat_type
1692 = is_component_type
1693 ? Component_Type (gnat_entity) : Etype (gnat_entity);
1695 if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1696 return record;
1699 /* Take the original size as the maximum size of the input if there was an
1700 unconstrained record involved and round it up to the specified alignment,
1701 if one was specified, but only for aggregate types. */
1702 if (CONTAINS_PLACEHOLDER_P (orig_size))
1703 orig_size = max_size (orig_size, true);
1705 if (align && AGGREGATE_TYPE_P (type))
1706 orig_size = round_up (orig_size, align);
1708 if (!operand_equal_p (size, orig_size, 0)
1709 && !(TREE_CODE (size) == INTEGER_CST
1710 && TREE_CODE (orig_size) == INTEGER_CST
1711 && (TREE_OVERFLOW (size)
1712 || TREE_OVERFLOW (orig_size)
1713 || tree_int_cst_lt (size, orig_size))))
1715 Node_Id gnat_error_node;
1717 /* For a packed array, post the message on the original array type. */
1718 if (Is_Packed_Array_Impl_Type (gnat_entity))
1719 gnat_entity = Original_Array_Type (gnat_entity);
1721 if ((Ekind (gnat_entity) == E_Component
1722 || Ekind (gnat_entity) == E_Discriminant)
1723 && Present (Component_Clause (gnat_entity)))
1724 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1725 else if (Has_Size_Clause (gnat_entity))
1726 gnat_error_node = Expression (Size_Clause (gnat_entity));
1727 else if (Has_Object_Size_Clause (gnat_entity))
1728 gnat_error_node = Expression (Object_Size_Clause (gnat_entity));
1729 else
1730 gnat_error_node = Empty;
1732 /* Generate message only for entities that come from source, since
1733 if we have an entity created by expansion, the message will be
1734 generated for some other corresponding source entity. */
1735 if (Comes_From_Source (gnat_entity))
1737 if (is_component_type)
1738 post_error_ne_tree ("component of& padded{ by ^ bits}??",
1739 gnat_entity, gnat_entity,
1740 size_diffop (size, orig_size));
1741 else if (Present (gnat_error_node))
1742 post_error_ne_tree ("{^ }bits of & unused??",
1743 gnat_error_node, gnat_entity,
1744 size_diffop (size, orig_size));
1748 return record;
1751 /* Return true if padded TYPE was built with an RM size. */
1753 bool
1754 pad_type_has_rm_size (tree type)
1756 /* This is required for the lookup. */
1757 if (!TREE_CONSTANT (TYPE_SIZE (type)))
1758 return false;
1760 const hashval_t hashcode = hash_pad_type (type);
1761 struct pad_type_hash in, *h;
1763 in.hash = hashcode;
1764 in.type = type;
1765 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1767 /* The types built with an RM size are the canonicalized ones. */
1768 return h && h->type == type;
1771 /* Return a copy of the padded TYPE but with reverse storage order. */
1773 tree
1774 set_reverse_storage_order_on_pad_type (tree type)
1776 if (flag_checking)
1778 /* If the inner type is not scalar then the function does nothing. */
1779 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1780 gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1781 && !VECTOR_TYPE_P (inner_type));
1784 /* This is required for the canonicalization. */
1785 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1787 tree field = copy_node (TYPE_FIELDS (type));
1788 type = copy_type (type);
1789 DECL_CONTEXT (field) = type;
1790 TYPE_FIELDS (type) = field;
1791 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1792 return canonicalize_pad_type (type);
1795 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1796 If this is a multi-dimensional array type, do this recursively.
1798 OP may be
1799 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1800 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1801 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1803 void
1804 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1806 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1807 of a one-dimensional array, since the padding has the same alias set
1808 as the field type, but if it's a multi-dimensional array, we need to
1809 see the inner types. */
1810 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1811 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1812 || TYPE_PADDING_P (gnu_old_type)))
1813 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1815 /* Unconstrained array types are deemed incomplete and would thus be given
1816 alias set 0. Retrieve the underlying array type. */
1817 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1818 gnu_old_type
1819 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1820 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1821 gnu_new_type
1822 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1824 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1825 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1826 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1827 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1829 switch (op)
1831 case ALIAS_SET_COPY:
1832 /* The alias set shouldn't be copied between array types with different
1833 aliasing settings because this can break the aliasing relationship
1834 between the array type and its element type. */
1835 if (flag_checking || flag_strict_aliasing)
1836 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1837 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1838 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1839 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1841 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1842 break;
1844 case ALIAS_SET_SUBSET:
1845 case ALIAS_SET_SUPERSET:
1847 alias_set_type old_set = get_alias_set (gnu_old_type);
1848 alias_set_type new_set = get_alias_set (gnu_new_type);
1850 /* Do nothing if the alias sets conflict. This ensures that we
1851 never call record_alias_subset several times for the same pair
1852 or at all for alias set 0. */
1853 if (!alias_sets_conflict_p (old_set, new_set))
1855 if (op == ALIAS_SET_SUBSET)
1856 record_alias_subset (old_set, new_set);
1857 else
1858 record_alias_subset (new_set, old_set);
1861 break;
1863 default:
1864 gcc_unreachable ();
1867 record_component_aliases (gnu_new_type);
1870 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1871 ARTIFICIAL_P is true if the type was generated by the compiler. */
1873 void
1874 record_builtin_type (const char *name, tree type, bool artificial_p)
1876 tree type_decl = build_decl (input_location,
1877 TYPE_DECL, get_identifier (name), type);
1878 DECL_ARTIFICIAL (type_decl) = artificial_p;
1879 TYPE_ARTIFICIAL (type) = artificial_p;
1880 gnat_pushdecl (type_decl, Empty);
1882 if (debug_hooks->type_decl)
1883 debug_hooks->type_decl (type_decl, false);
1886 /* Finish constructing the character type CHAR_TYPE.
1888 In Ada character types are enumeration types and, as a consequence, are
1889 represented in the front-end by integral types holding the positions of
1890 the enumeration values as defined by the language, which means that the
1891 integral types are unsigned.
1893 Unfortunately the signedness of 'char' in C is implementation-defined
1894 and GCC even has the option -f[un]signed-char to toggle it at run time.
1895 Since GNAT's philosophy is to be compatible with C by default, to wit
1896 Interfaces.C.char is defined as a mere copy of Character, we may need
1897 to declare character types as signed types in GENERIC and generate the
1898 necessary adjustments to make them behave as unsigned types.
1900 The overall strategy is as follows: if 'char' is unsigned, do nothing;
1901 if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1902 character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1903 types. The idea is to ensure that the bit pattern contained in the
1904 Esize'd objects is not changed, even though the numerical value will
1905 be interpreted differently depending on the signedness. */
1907 void
1908 finish_character_type (tree char_type)
1910 if (TYPE_UNSIGNED (char_type))
1911 return;
1913 /* Make a copy of a generic unsigned version since we'll modify it. */
1914 tree unsigned_char_type
1915 = (char_type == char_type_node
1916 ? unsigned_char_type_node
1917 : copy_type (gnat_unsigned_type_for (char_type)));
1919 /* Create an unsigned version of the type and set it as debug type. */
1920 TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1921 TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1922 TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1923 SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1925 /* If this is a subtype, make the debug type a subtype of the debug type
1926 of the base type and convert literal RM bounds to unsigned. */
1927 if (TREE_TYPE (char_type))
1929 tree base_unsigned_char_type = TYPE_DEBUG_TYPE (TREE_TYPE (char_type));
1930 tree min_value = TYPE_RM_MIN_VALUE (char_type);
1931 tree max_value = TYPE_RM_MAX_VALUE (char_type);
1933 if (TREE_CODE (min_value) == INTEGER_CST)
1934 min_value = fold_convert (base_unsigned_char_type, min_value);
1935 if (TREE_CODE (max_value) == INTEGER_CST)
1936 max_value = fold_convert (base_unsigned_char_type, max_value);
1938 TREE_TYPE (unsigned_char_type) = base_unsigned_char_type;
1939 SET_TYPE_RM_MIN_VALUE (unsigned_char_type, min_value);
1940 SET_TYPE_RM_MAX_VALUE (unsigned_char_type, max_value);
1943 /* Adjust the RM bounds of the original type to unsigned; that's especially
1944 important for types since they are implicit in this case. */
1945 SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1946 SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1949 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1950 finish constructing the record type as a fat pointer type. */
1952 void
1953 finish_fat_pointer_type (tree record_type, tree field_list)
1955 /* Make sure we can put it into a register. */
1956 if (STRICT_ALIGNMENT)
1957 SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
1959 /* Show what it really is. */
1960 TYPE_FAT_POINTER_P (record_type) = 1;
1962 /* Do not emit debug info for it since the types of its fields may still be
1963 incomplete at this point. */
1964 finish_record_type (record_type, field_list, 0, false);
1966 /* Force type_contains_placeholder_p to return true on it. Although the
1967 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1968 type but the representation of the unconstrained array. */
1969 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1972 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1973 finish constructing the record or union type. If REP_LEVEL is zero, this
1974 record has no representation clause and so will be entirely laid out here.
1975 If REP_LEVEL is one, this record has a representation clause and has been
1976 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1977 this record is derived from a parent record and thus inherits its layout;
1978 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1979 additional debug info needs to be output for this type. */
1981 void
1982 finish_record_type (tree record_type, tree field_list, int rep_level,
1983 bool debug_info_p)
1985 const enum tree_code orig_code = TREE_CODE (record_type);
1986 const bool had_size = TYPE_SIZE (record_type) != NULL_TREE;
1987 const bool had_align = TYPE_ALIGN (record_type) > 0;
1988 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1989 out just like a UNION_TYPE, since the size will be fixed. */
1990 const enum tree_code code
1991 = (orig_code == QUAL_UNION_TYPE && rep_level > 0 && had_size
1992 ? UNION_TYPE : orig_code);
1993 tree name = TYPE_IDENTIFIER (record_type);
1994 tree ada_size = bitsize_zero_node;
1995 tree size = bitsize_zero_node;
1996 tree field;
1998 TYPE_FIELDS (record_type) = field_list;
2000 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
2001 generate debug info and have a parallel type. */
2002 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
2004 /* Globally initialize the record first. If this is a rep'ed record,
2005 that just means some initializations; otherwise, layout the record. */
2006 if (rep_level > 0)
2008 if (TYPE_ALIGN (record_type) < BITS_PER_UNIT)
2009 SET_TYPE_ALIGN (record_type, BITS_PER_UNIT);
2011 if (!had_size)
2012 TYPE_SIZE (record_type) = bitsize_zero_node;
2014 else
2016 /* Ensure there isn't a size already set. There can be in an error
2017 case where there is a rep clause but all fields have errors and
2018 no longer have a position. */
2019 TYPE_SIZE (record_type) = NULL_TREE;
2021 /* Ensure we use the traditional GCC layout for bitfields when we need
2022 to pack the record type or have a representation clause. The other
2023 possible layout (Microsoft C compiler), if available, would prevent
2024 efficient packing in almost all cases. */
2025 #ifdef TARGET_MS_BITFIELD_LAYOUT
2026 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
2027 decl_attributes (&record_type,
2028 tree_cons (get_identifier ("gcc_struct"),
2029 NULL_TREE, NULL_TREE),
2030 ATTR_FLAG_TYPE_IN_PLACE);
2031 #endif
2033 layout_type (record_type);
2036 /* At this point, the position and size of each field is known. It was
2037 either set before entry by a rep clause, or by laying out the type above.
2039 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
2040 to compute the Ada size; the GCC size and alignment (for rep'ed records
2041 that are not padding types); and the mode (for rep'ed records). We also
2042 clear the DECL_BIT_FIELD indication for the cases we know have not been
2043 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
2045 if (code == QUAL_UNION_TYPE)
2046 field_list = nreverse (field_list);
2048 for (field = field_list; field; field = DECL_CHAIN (field))
2050 tree type = TREE_TYPE (field);
2051 tree pos = bit_position (field);
2052 tree this_size = DECL_SIZE (field);
2053 tree this_ada_size;
2055 if (RECORD_OR_UNION_TYPE_P (type)
2056 && !TYPE_FAT_POINTER_P (type)
2057 && !TYPE_CONTAINS_TEMPLATE_P (type)
2058 && TYPE_ADA_SIZE (type))
2059 this_ada_size = TYPE_ADA_SIZE (type);
2060 else
2061 this_ada_size = this_size;
2063 const bool variant_part = (TREE_CODE (type) == QUAL_UNION_TYPE);
2065 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
2066 if (DECL_BIT_FIELD (field)
2067 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
2069 const unsigned int align = TYPE_ALIGN (type);
2071 /* In the general case, type alignment is required. */
2072 if (value_factor_p (pos, align))
2074 /* The enclosing record type must be sufficiently aligned.
2075 Otherwise, if no alignment was specified for it and it
2076 has been laid out already, bump its alignment to the
2077 desired one if this is compatible with its size and
2078 maximum alignment, if any. */
2079 if (TYPE_ALIGN (record_type) >= align)
2081 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
2082 DECL_BIT_FIELD (field) = 0;
2084 else if (!had_align
2085 && rep_level == 0
2086 && value_factor_p (TYPE_SIZE (record_type), align)
2087 && (!TYPE_MAX_ALIGN (record_type)
2088 || TYPE_MAX_ALIGN (record_type) >= align))
2090 SET_TYPE_ALIGN (record_type, align);
2091 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
2092 DECL_BIT_FIELD (field) = 0;
2096 /* In the non-strict alignment case, only byte alignment is. */
2097 if (!STRICT_ALIGNMENT
2098 && DECL_BIT_FIELD (field)
2099 && value_factor_p (pos, BITS_PER_UNIT))
2100 DECL_BIT_FIELD (field) = 0;
2103 /* Clear DECL_BIT_FIELD_TYPE for a variant part at offset 0, it's simply
2104 not supported by the DECL_BIT_FIELD_REPRESENTATIVE machinery because
2105 the variant part is always the last field in the list. */
2106 if (variant_part && integer_zerop (pos))
2107 DECL_BIT_FIELD_TYPE (field) = NULL_TREE;
2109 /* If we still have DECL_BIT_FIELD set at this point, we know that the
2110 field is technically not addressable. Except that it can actually
2111 be addressed if it is BLKmode and happens to be properly aligned. */
2112 if (DECL_BIT_FIELD (field)
2113 && !(DECL_MODE (field) == BLKmode
2114 && value_factor_p (pos, BITS_PER_UNIT)))
2115 DECL_NONADDRESSABLE_P (field) = 1;
2117 /* A type must be as aligned as its most aligned field that is not
2118 a bit-field. But this is already enforced by layout_type. */
2119 if (rep_level > 0 && !DECL_BIT_FIELD (field))
2120 SET_TYPE_ALIGN (record_type,
2121 MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
2123 switch (code)
2125 case UNION_TYPE:
2126 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
2127 size = size_binop (MAX_EXPR, size, this_size);
2128 break;
2130 case QUAL_UNION_TYPE:
2131 ada_size
2132 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
2133 this_ada_size, ada_size);
2134 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
2135 this_size, size);
2136 break;
2138 case RECORD_TYPE:
2139 /* Since we know here that all fields are sorted in order of
2140 increasing bit position, the size of the record is one
2141 higher than the ending bit of the last field processed
2142 unless we have a rep clause, because we might be processing
2143 the REP part of a record with a variant part for which the
2144 variant part has a rep clause but not the fixed part, in
2145 which case this REP part may contain overlapping fields
2146 and thus needs to be treated like a union tyoe above, so
2147 use a MAX in that case. Also, if this field is a variant
2148 part, we need to take into account the previous size in
2149 the case of empty variants. */
2150 ada_size
2151 = merge_sizes (ada_size, pos, this_ada_size, rep_level > 0,
2152 variant_part);
2153 size
2154 = merge_sizes (size, pos, this_size, rep_level > 0, variant_part);
2155 break;
2157 default:
2158 gcc_unreachable ();
2162 if (code == QUAL_UNION_TYPE)
2163 nreverse (field_list);
2165 /* We need to set the regular sizes if REP_LEVEL is one. */
2166 if (rep_level == 1)
2168 /* We round TYPE_SIZE and TYPE_SIZE_UNIT up to TYPE_ALIGN separately
2169 to avoid having very large masking constants in TYPE_SIZE_UNIT. */
2170 const unsigned int align = TYPE_ALIGN (record_type);
2172 /* If this is a padding record, we never want to make the size smaller
2173 than what was specified in it, if any. */
2174 if (TYPE_IS_PADDING_P (record_type) && had_size)
2175 size = round_up (TYPE_SIZE (record_type), BITS_PER_UNIT);
2176 else
2177 size = round_up (size, BITS_PER_UNIT);
2179 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
2181 tree size_unit
2182 = convert (sizetype,
2183 size_binop (EXACT_DIV_EXPR, size, bitsize_unit_node));
2184 TYPE_SIZE_UNIT (record_type)
2185 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
2188 /* We need to set the Ada size if REP_LEVEL is zero or one. */
2189 if (rep_level < 2)
2191 /* Now set any of the values we've just computed that apply. */
2192 if (!TYPE_FAT_POINTER_P (record_type)
2193 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
2194 SET_TYPE_ADA_SIZE (record_type, ada_size);
2197 /* We need to set the mode if REP_LEVEL is one or two. */
2198 if (rep_level > 0)
2200 compute_record_mode (record_type);
2201 finish_bitfield_layout (record_type);
2204 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
2205 TYPE_MAX_ALIGN (record_type) = 0;
2207 if (debug_info_p)
2208 rest_of_record_type_compilation (record_type);
2211 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
2212 PARRALEL_TYPE has no context and its computation is not deferred yet, also
2213 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
2214 moment TYPE will get a context. */
2216 void
2217 add_parallel_type (tree type, tree parallel_type)
2219 tree decl = TYPE_STUB_DECL (type);
2221 while (DECL_PARALLEL_TYPE (decl))
2222 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
2224 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
2226 /* If PARALLEL_TYPE already has a context, we are done. */
2227 if (TYPE_CONTEXT (parallel_type))
2228 return;
2230 /* Otherwise, try to get one from TYPE's context. If so, simply propagate
2231 it to PARALLEL_TYPE. */
2232 if (TYPE_CONTEXT (type))
2233 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
2235 /* Otherwise TYPE has not context yet. We know it will have one thanks to
2236 gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
2237 so we have nothing to do in this case. */
2240 /* Return true if TYPE has a parallel type. */
2242 static bool
2243 has_parallel_type (tree type)
2245 tree decl = TYPE_STUB_DECL (type);
2247 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
2250 /* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
2251 associated with it. It need not be invoked directly in most cases as
2252 finish_record_type takes care of doing so. */
2254 void
2255 rest_of_record_type_compilation (tree record_type)
2257 bool var_size = false;
2258 tree field;
2260 /* If this is a padded type, the bulk of the debug info has already been
2261 generated for the field's type. */
2262 if (TYPE_IS_PADDING_P (record_type))
2263 return;
2265 /* If the type already has a parallel type (XVS type), then we're done. */
2266 if (has_parallel_type (record_type))
2267 return;
2269 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
2271 /* We need to make an XVE/XVU record if any field has variable size,
2272 whether or not the record does. For example, if we have a union,
2273 it may be that all fields, rounded up to the alignment, have the
2274 same size, in which case we'll use that size. But the debug
2275 output routines (except Dwarf2) won't be able to output the fields,
2276 so we need to make the special record. */
2277 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
2278 /* If a field has a non-constant qualifier, the record will have
2279 variable size too. */
2280 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
2281 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
2283 var_size = true;
2284 break;
2288 /* If this record type is of variable size, make a parallel record type that
2289 will tell the debugger how the former is laid out (see exp_dbug.ads). */
2290 if (var_size && gnat_encodings == DWARF_GNAT_ENCODINGS_ALL)
2292 tree new_record_type
2293 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
2294 ? UNION_TYPE : TREE_CODE (record_type));
2295 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
2296 tree last_pos = bitsize_zero_node;
2298 new_name
2299 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
2300 ? "XVU" : "XVE");
2301 TYPE_NAME (new_record_type) = new_name;
2302 SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
2303 TYPE_STUB_DECL (new_record_type)
2304 = create_type_stub_decl (new_name, new_record_type);
2305 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
2306 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
2307 gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
2308 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
2309 TYPE_SIZE_UNIT (new_record_type)
2310 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2312 /* Now scan all the fields, replacing each field with a new field
2313 corresponding to the new encoding. */
2314 for (tree old_field = TYPE_FIELDS (record_type);
2315 old_field;
2316 old_field = DECL_CHAIN (old_field))
2318 tree field_type = TREE_TYPE (old_field);
2319 tree field_name = DECL_NAME (old_field);
2320 tree curpos = fold_bit_position (old_field);
2321 tree pos, new_field;
2322 bool var = false;
2323 unsigned int align = 0;
2325 /* See how the position was modified from the last position.
2327 There are two basic cases we support: a value was added
2328 to the last position or the last position was rounded to
2329 a boundary and they something was added. Check for the
2330 first case first. If not, see if there is any evidence
2331 of rounding. If so, round the last position and retry.
2333 If this is a union, the position can be taken as zero. */
2334 if (TREE_CODE (new_record_type) == UNION_TYPE)
2335 pos = bitsize_zero_node;
2336 else
2337 pos = compute_related_constant (curpos, last_pos);
2339 if (pos)
2341 else if (TREE_CODE (curpos) == MULT_EXPR
2342 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
2344 tree offset = TREE_OPERAND (curpos, 0);
2345 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2346 align = scale_by_factor_of (offset, align);
2347 last_pos = round_up (last_pos, align);
2348 pos = compute_related_constant (curpos, last_pos);
2350 else if (TREE_CODE (curpos) == PLUS_EXPR
2351 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
2352 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
2353 && tree_fits_uhwi_p
2354 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
2356 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2357 unsigned HOST_WIDE_INT addend
2358 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2359 align
2360 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
2361 align = scale_by_factor_of (offset, align);
2362 align = MIN (align, addend & -addend);
2363 last_pos = round_up (last_pos, align);
2364 pos = compute_related_constant (curpos, last_pos);
2366 else
2368 align = DECL_ALIGN (old_field);
2369 last_pos = round_up (last_pos, align);
2370 pos = compute_related_constant (curpos, last_pos);
2373 /* See if this type is variable-sized and make a pointer type
2374 and indicate the indirection if so. Beware that the debug
2375 back-end may adjust the position computed above according
2376 to the alignment of the field type, i.e. the pointer type
2377 in this case, if we don't preventively counter that. */
2378 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2380 field_type = copy_type (build_pointer_type (field_type));
2381 SET_TYPE_ALIGN (field_type, BITS_PER_UNIT);
2382 var = true;
2384 /* ??? Kludge to work around a bug in Workbench's debugger. */
2385 if (align == 0)
2387 align = DECL_ALIGN (old_field);
2388 last_pos = round_up (last_pos, align);
2389 pos = compute_related_constant (curpos, last_pos);
2393 /* If we can't compute a position, set it to zero.
2395 ??? We really should abort here, but it's too much work
2396 to get this correct for all cases. */
2397 if (!pos)
2398 pos = bitsize_zero_node;
2400 /* Make a new field name, if necessary. */
2401 if (var || align != 0)
2403 char suffix[16];
2405 if (align != 0)
2406 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2407 align / BITS_PER_UNIT);
2408 else
2409 strcpy (suffix, "XVL");
2411 field_name = concat_name (field_name, suffix);
2414 new_field
2415 = create_field_decl (field_name, field_type, new_record_type,
2416 DECL_SIZE (old_field), pos, 0, 0);
2417 /* The specified position is not the actual position of the field
2418 but the gap with the previous field, so the computation of the
2419 bit-field status may be incorrect. We adjust it manually to
2420 avoid generating useless attributes for the field in DWARF. */
2421 if (DECL_SIZE (old_field) == TYPE_SIZE (field_type)
2422 && value_factor_p (pos, BITS_PER_UNIT))
2424 DECL_BIT_FIELD (new_field) = 0;
2425 DECL_BIT_FIELD_TYPE (new_field) = NULL_TREE;
2427 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2428 TYPE_FIELDS (new_record_type) = new_field;
2430 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2431 zero. The only time it's not the last field of the record
2432 is when there are other components at fixed positions after
2433 it (meaning there was a rep clause for every field) and we
2434 want to be able to encode them. */
2435 last_pos = size_binop (PLUS_EXPR, curpos,
2436 (TREE_CODE (TREE_TYPE (old_field))
2437 == QUAL_UNION_TYPE)
2438 ? bitsize_zero_node
2439 : DECL_SIZE (old_field));
2442 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2444 add_parallel_type (record_type, new_record_type);
2448 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2449 with FIRST_BIT and SIZE that describe a field. If MAX is true, we take the
2450 MAX of the end position of this field with LAST_SIZE. In all other cases,
2451 we use FIRST_BIT plus SIZE. SPECIAL is true if it's for a QUAL_UNION_TYPE,
2452 in which case we must look for COND_EXPRs and replace a value of zero with
2453 the old size. Return an expression for the size. */
2455 static tree
2456 merge_sizes (tree last_size, tree first_bit, tree size, bool max, bool special)
2458 tree type = TREE_TYPE (last_size);
2459 tree new_size;
2461 if (!special || TREE_CODE (size) != COND_EXPR)
2463 new_size = size_binop (PLUS_EXPR, first_bit, size);
2464 if (max)
2465 new_size = size_binop (MAX_EXPR, last_size, new_size);
2468 else
2469 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2470 integer_zerop (TREE_OPERAND (size, 1))
2471 ? last_size : merge_sizes (last_size, first_bit,
2472 TREE_OPERAND (size, 1),
2473 max, special),
2474 integer_zerop (TREE_OPERAND (size, 2))
2475 ? last_size : merge_sizes (last_size, first_bit,
2476 TREE_OPERAND (size, 2),
2477 max, special));
2479 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2480 when fed through SUBSTITUTE_IN_EXPR) into thinking that a constant
2481 size is not constant. */
2482 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2483 new_size = TREE_OPERAND (new_size, 0);
2485 return new_size;
2488 /* Convert the size expression EXPR to TYPE and fold the result. */
2490 static tree
2491 fold_convert_size (tree type, tree expr)
2493 /* We assume that size expressions do not wrap around. */
2494 if (TREE_CODE (expr) == MULT_EXPR || TREE_CODE (expr) == PLUS_EXPR)
2495 return size_binop (TREE_CODE (expr),
2496 fold_convert_size (type, TREE_OPERAND (expr, 0)),
2497 fold_convert_size (type, TREE_OPERAND (expr, 1)));
2499 return fold_convert (type, expr);
2502 /* Return the bit position of FIELD, in bits from the start of the record,
2503 and fold it as much as possible. This is a tree of type bitsizetype. */
2505 static tree
2506 fold_bit_position (const_tree field)
2508 tree offset = fold_convert_size (bitsizetype, DECL_FIELD_OFFSET (field));
2509 return size_binop (PLUS_EXPR, DECL_FIELD_BIT_OFFSET (field),
2510 size_binop (MULT_EXPR, offset, bitsize_unit_node));
2513 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2514 related by the addition of a constant. Return that constant if so. */
2516 static tree
2517 compute_related_constant (tree op0, tree op1)
2519 tree factor, op0_var, op1_var, op0_cst, op1_cst, result;
2521 if (TREE_CODE (op0) == MULT_EXPR
2522 && TREE_CODE (op1) == MULT_EXPR
2523 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
2524 && TREE_OPERAND (op1, 1) == TREE_OPERAND (op0, 1))
2526 factor = TREE_OPERAND (op0, 1);
2527 op0 = TREE_OPERAND (op0, 0);
2528 op1 = TREE_OPERAND (op1, 0);
2530 else
2531 factor = NULL_TREE;
2533 op0_cst = split_plus (op0, &op0_var);
2534 op1_cst = split_plus (op1, &op1_var);
2535 result = size_binop (MINUS_EXPR, op0_cst, op1_cst);
2537 if (operand_equal_p (op0_var, op1_var, 0))
2538 return factor ? size_binop (MULT_EXPR, factor, result) : result;
2540 return NULL_TREE;
2543 /* Utility function of above to split a tree OP which may be a sum, into a
2544 constant part, which is returned, and a variable part, which is stored
2545 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2546 bitsizetype. */
2548 static tree
2549 split_plus (tree in, tree *pvar)
2551 /* Strip conversions in order to ease the tree traversal and maximize the
2552 potential for constant or plus/minus discovery. We need to be careful
2553 to always return and set *pvar to bitsizetype trees, but it's worth
2554 the effort. */
2555 in = remove_conversions (in, false);
2557 *pvar = convert (bitsizetype, in);
2559 if (TREE_CODE (in) == INTEGER_CST)
2561 *pvar = bitsize_zero_node;
2562 return convert (bitsizetype, in);
2564 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2566 tree lhs_var, rhs_var;
2567 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2568 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2570 if (lhs_var == TREE_OPERAND (in, 0)
2571 && rhs_var == TREE_OPERAND (in, 1))
2572 return bitsize_zero_node;
2574 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2575 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2577 else
2578 return bitsize_zero_node;
2581 /* Return a copy of TYPE but safe to modify in any way. */
2583 tree
2584 copy_type (tree type)
2586 tree new_type = copy_node (type);
2588 /* Unshare the language-specific data. */
2589 if (TYPE_LANG_SPECIFIC (type))
2591 TYPE_LANG_SPECIFIC (new_type) = NULL;
2592 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2595 /* And the contents of the language-specific slot if needed. */
2596 if ((INTEGRAL_TYPE_P (type) || SCALAR_FLOAT_TYPE_P (type))
2597 && TYPE_RM_VALUES (type))
2599 TYPE_RM_VALUES (new_type) = NULL_TREE;
2600 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2601 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2602 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2605 /* copy_node clears this field instead of copying it, because it is
2606 aliased with TREE_CHAIN. */
2607 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2609 TYPE_POINTER_TO (new_type) = NULL_TREE;
2610 TYPE_REFERENCE_TO (new_type) = NULL_TREE;
2611 TYPE_MAIN_VARIANT (new_type) = new_type;
2612 TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
2613 TYPE_CANONICAL (new_type) = new_type;
2615 return new_type;
2618 /* Return a subtype of sizetype with range MIN to MAX and whose
2619 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2620 of the associated TYPE_DECL. */
2622 tree
2623 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2625 /* First build a type for the desired range. */
2626 tree type = build_nonshared_range_type (sizetype, min, max);
2628 /* Then set the index type. */
2629 SET_TYPE_INDEX_TYPE (type, index);
2630 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2632 return type;
2635 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2636 sizetype is used. */
2638 tree
2639 create_range_type (tree type, tree min, tree max)
2641 tree range_type;
2643 if (!type)
2644 type = sizetype;
2646 /* First build a type with the base range. */
2647 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2648 TYPE_MAX_VALUE (type));
2650 /* Then set the actual range. */
2651 SET_TYPE_RM_MIN_VALUE (range_type, min);
2652 SET_TYPE_RM_MAX_VALUE (range_type, max);
2654 return range_type;
2657 /* Return an extra subtype of TYPE with range MIN to MAX. */
2659 tree
2660 create_extra_subtype (tree type, tree min, tree max)
2662 const bool uns = TYPE_UNSIGNED (type);
2663 const unsigned prec = TYPE_PRECISION (type);
2664 tree subtype = uns ? make_unsigned_type (prec) : make_signed_type (prec);
2666 TREE_TYPE (subtype) = type;
2667 TYPE_EXTRA_SUBTYPE_P (subtype) = 1;
2669 SET_TYPE_RM_MIN_VALUE (subtype, min);
2670 SET_TYPE_RM_MAX_VALUE (subtype, max);
2672 return subtype;
2675 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2676 NAME gives the name of the type to be used in the declaration. */
2678 tree
2679 create_type_stub_decl (tree name, tree type)
2681 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2682 DECL_ARTIFICIAL (type_decl) = 1;
2683 TYPE_ARTIFICIAL (type) = 1;
2684 return type_decl;
2687 /* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2688 used in the declaration. ARTIFICIAL_P is true if the declaration was
2689 generated by the compiler. DEBUG_INFO_P is true if we need to write
2690 debug information about this type. GNAT_NODE is used for the position
2691 of the decl. */
2693 tree
2694 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2695 Node_Id gnat_node)
2697 enum tree_code code = TREE_CODE (type);
2698 bool is_named
2699 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2700 tree type_decl;
2702 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2703 gcc_assert (!TYPE_IS_DUMMY_P (type));
2705 /* If the type hasn't been named yet, we're naming it; preserve an existing
2706 TYPE_STUB_DECL that has been attached to it for some purpose. */
2707 if (!is_named && TYPE_STUB_DECL (type))
2709 type_decl = TYPE_STUB_DECL (type);
2710 DECL_NAME (type_decl) = name;
2712 else
2713 type_decl = build_decl (input_location, TYPE_DECL, name, type);
2715 DECL_ARTIFICIAL (type_decl) = artificial_p;
2716 TYPE_ARTIFICIAL (type) = artificial_p;
2718 /* Add this decl to the current binding level. */
2719 gnat_pushdecl (type_decl, gnat_node);
2721 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2722 causes the name to be also viewed as a "tag" by the debug back-end, with
2723 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2724 types in DWARF.
2726 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2727 from multiple contexts, and "type_decl" references a copy of it: in such a
2728 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2729 with the mechanism above. */
2730 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2731 TYPE_STUB_DECL (type) = type_decl;
2733 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2734 back-end doesn't support, and for others if we don't need to. */
2735 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2736 DECL_IGNORED_P (type_decl) = 1;
2738 return type_decl;
2741 /* Return a VAR_DECL or CONST_DECL node.
2743 NAME gives the name of the variable. ASM_NAME is its assembler name
2744 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
2745 the GCC tree for an optional initial expression; NULL_TREE if none.
2747 CONST_FLAG is true if this variable is constant, in which case we might
2748 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2750 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2751 definition to be made visible outside of the current compilation unit, for
2752 instance variable definitions in a package specification.
2754 EXTERN_FLAG is true when processing an external variable declaration (as
2755 opposed to a definition: no storage is to be allocated for the variable).
2757 STATIC_FLAG is only relevant when not at top level and indicates whether
2758 to always allocate storage to the variable.
2760 VOLATILE_FLAG is true if this variable is declared as volatile.
2762 ARTIFICIAL_P is true if the variable was generated by the compiler.
2764 DEBUG_INFO_P is true if we need to write debug information for it.
2766 ATTR_LIST is the list of attributes to be attached to the variable.
2768 GNAT_NODE is used for the position of the decl. */
2770 tree
2771 create_var_decl (tree name, tree asm_name, tree type, tree init,
2772 bool const_flag, bool public_flag, bool extern_flag,
2773 bool static_flag, bool volatile_flag, bool artificial_p,
2774 bool debug_info_p, struct attrib *attr_list,
2775 Node_Id gnat_node, bool const_decl_allowed_p)
2777 /* Whether the object has static storage duration, either explicitly or by
2778 virtue of being declared at the global level. */
2779 const bool static_storage = static_flag || global_bindings_p ();
2781 /* Whether the initializer is constant: for an external object or an object
2782 with static storage duration, we check that the initializer is a valid
2783 constant expression for initializing a static variable; otherwise, we
2784 only check that it is constant. */
2785 const bool init_const
2786 = (init
2787 && gnat_types_compatible_p (type, TREE_TYPE (init))
2788 && (extern_flag || static_storage
2789 ? initializer_constant_valid_p (init, TREE_TYPE (init))
2790 != NULL_TREE
2791 : TREE_CONSTANT (init)));
2793 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2794 case the initializer may be used in lieu of the DECL node (as done in
2795 Identifier_to_gnu). This is useful to prevent the need of elaboration
2796 code when an identifier for which such a DECL is made is in turn used
2797 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2798 but extra constraints apply to this choice (see below) and they are not
2799 relevant to the distinction we wish to make. */
2800 const bool constant_p = const_flag && init_const;
2802 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2803 and may be used for scalars in general but not for aggregates. */
2804 tree var_decl
2805 = build_decl (input_location,
2806 (constant_p
2807 && const_decl_allowed_p
2808 && !AGGREGATE_TYPE_P (type) ? CONST_DECL : VAR_DECL),
2809 name, type);
2811 /* Detect constants created by the front-end to hold 'reference to function
2812 calls for stabilization purposes. This is needed for renaming. */
2813 if (const_flag && init && POINTER_TYPE_P (type))
2815 tree inner = init;
2816 if (TREE_CODE (inner) == COMPOUND_EXPR)
2817 inner = TREE_OPERAND (inner, 1);
2818 inner = remove_conversions (inner, true);
2819 if (TREE_CODE (inner) == ADDR_EXPR
2820 && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2821 && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2822 || (VAR_P (TREE_OPERAND (inner, 0))
2823 && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2824 DECL_RETURN_VALUE_P (var_decl) = 1;
2827 /* If this is external, throw away any initializations (they will be done
2828 elsewhere) unless this is a constant for which we would like to remain
2829 able to get the initializer. If we are defining a global here, leave a
2830 constant initialization and save any variable elaborations for the
2831 elaboration routine. If we are just annotating types, throw away the
2832 initialization if it isn't a constant. */
2833 if ((extern_flag && !constant_p)
2834 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2835 init = NULL_TREE;
2837 /* At the global level, a non-constant initializer generates elaboration
2838 statements. Check that such statements are allowed, that is to say,
2839 not violating a No_Elaboration_Code restriction. */
2840 if (init && !init_const && global_bindings_p ())
2841 Check_Elaboration_Code_Allowed (gnat_node);
2843 /* Attach the initializer, if any. */
2844 DECL_INITIAL (var_decl) = init;
2846 /* Directly set some flags. */
2847 DECL_ARTIFICIAL (var_decl) = artificial_p;
2848 DECL_EXTERNAL (var_decl) = extern_flag;
2850 TREE_CONSTANT (var_decl) = constant_p;
2851 TREE_READONLY (var_decl) = const_flag;
2853 /* The object is public if it is external or if it is declared public
2854 and has static storage duration. */
2855 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2857 /* We need to allocate static storage for an object with static storage
2858 duration if it isn't external. */
2859 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2861 TREE_SIDE_EFFECTS (var_decl)
2862 = TREE_THIS_VOLATILE (var_decl)
2863 = TYPE_VOLATILE (type) | volatile_flag;
2865 if (TREE_SIDE_EFFECTS (var_decl))
2866 TREE_ADDRESSABLE (var_decl) = 1;
2868 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2869 try to fiddle with DECL_COMMON. However, on platforms that don't
2870 support global BSS sections, uninitialized global variables would
2871 go in DATA instead, thus increasing the size of the executable. */
2872 if (!flag_no_common
2873 && VAR_P (var_decl)
2874 && TREE_PUBLIC (var_decl)
2875 && !have_global_bss_p ())
2876 DECL_COMMON (var_decl) = 1;
2878 /* Do not emit debug info if not requested, or for an external constant whose
2879 initializer is not absolute because this would require a global relocation
2880 in a read-only section which runs afoul of the PE-COFF run-time relocation
2881 mechanism. */
2882 if (!debug_info_p
2883 || (extern_flag
2884 && constant_p
2885 && init
2886 && initializer_constant_valid_p (init, TREE_TYPE (init))
2887 != null_pointer_node))
2888 DECL_IGNORED_P (var_decl) = 1;
2890 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2891 if (VAR_P (var_decl))
2892 process_attributes (&var_decl, &attr_list, true, gnat_node);
2894 /* Add this decl to the current binding level. */
2895 gnat_pushdecl (var_decl, gnat_node);
2897 if (VAR_P (var_decl) && asm_name)
2899 /* Let the target mangle the name if this isn't a verbatim asm. */
2900 if (*IDENTIFIER_POINTER (asm_name) != '*')
2901 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2903 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2906 return var_decl;
2909 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2911 static bool
2912 aggregate_type_contains_array_p (tree type)
2914 switch (TREE_CODE (type))
2916 case RECORD_TYPE:
2917 case UNION_TYPE:
2918 case QUAL_UNION_TYPE:
2920 tree field;
2921 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2922 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2923 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2924 return true;
2925 return false;
2928 case ARRAY_TYPE:
2929 return true;
2931 default:
2932 gcc_unreachable ();
2936 /* Return true if TYPE is a type with variable size or a padding type with a
2937 field of variable size or a record that has a field with such a type. */
2939 bool
2940 type_has_variable_size (tree type)
2942 tree field;
2944 if (!TREE_CONSTANT (TYPE_SIZE (type)))
2945 return true;
2947 if (TYPE_IS_PADDING_P (type)
2948 && !TREE_CONSTANT (DECL_SIZE (TYPE_FIELDS (type))))
2949 return true;
2951 if (!RECORD_OR_UNION_TYPE_P (type))
2952 return false;
2954 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2955 if (type_has_variable_size (TREE_TYPE (field)))
2956 return true;
2958 return false;
2961 /* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2962 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2963 is the specified size of the field. If POS is nonzero, it is the bit
2964 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2965 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2966 means we are allowed to take the address of the field; if it is negative,
2967 we should not make a bitfield, which is used by make_aligning_type. */
2969 tree
2970 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2971 int packed, int addressable)
2973 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2975 DECL_CONTEXT (field_decl) = record_type;
2976 TREE_READONLY (field_decl) = TYPE_READONLY (type);
2978 /* If a size is specified, use it. Otherwise, if the record type is packed
2979 compute a size to use, which may differ from the object's natural size.
2980 We always set a size in this case to trigger the checks for bitfield
2981 creation below, which is typically required when no position has been
2982 specified. */
2983 if (size)
2984 size = convert (bitsizetype, size);
2985 else if (packed == 1)
2987 size = rm_size (type);
2988 if (TYPE_MODE (type) == BLKmode)
2989 size = round_up (size, BITS_PER_UNIT);
2992 /* If we may, according to ADDRESSABLE, then make a bitfield when the size
2993 is specified for two reasons: first, when it differs from the natural
2994 size; second, when the alignment is insufficient.
2996 We never make a bitfield if the type of the field has a nonconstant size,
2997 because no such entity requiring bitfield operations should reach here.
2999 We do *preventively* make a bitfield when there might be the need for it
3000 but we don't have all the necessary information to decide, as is the case
3001 of a field in a packed record.
3003 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
3004 in layout_decl or finish_record_type to clear the bit_field indication if
3005 it is in fact not needed. */
3006 if (addressable >= 0
3007 && size
3008 && TREE_CODE (size) == INTEGER_CST
3009 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3010 && (packed
3011 || !tree_int_cst_equal (size, TYPE_SIZE (type))
3012 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
3013 || (TYPE_ALIGN (record_type)
3014 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
3016 DECL_BIT_FIELD (field_decl) = 1;
3017 DECL_SIZE (field_decl) = size;
3018 if (!packed && !pos)
3020 if (TYPE_ALIGN (record_type)
3021 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
3022 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
3023 else
3024 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
3028 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
3030 /* If FIELD_TYPE has BLKmode, we must ensure this is aligned to at least
3031 a byte boundary since GCC cannot handle less aligned BLKmode bitfields.
3032 Likewise if it has a variable size and no specified position because
3033 variable-sized objects need to be aligned to at least a byte boundary.
3034 Likewise for an aggregate without specified position that contains an
3035 array because, in this case, slices of variable length of this array
3036 must be handled by GCC and have variable size. */
3037 if (packed && (TYPE_MODE (type) == BLKmode
3038 || (!pos && type_has_variable_size (type))
3039 || (!pos
3040 && AGGREGATE_TYPE_P (type)
3041 && aggregate_type_contains_array_p (type))))
3042 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
3044 /* Bump the alignment if need be, either for bitfield/packing purposes or
3045 to satisfy the type requirements if no such considerations apply. When
3046 we get the alignment from the type, indicate if this is from an explicit
3047 user request, which prevents stor-layout from lowering it later on. */
3048 else
3050 const unsigned int field_align
3051 = DECL_BIT_FIELD (field_decl)
3053 : packed
3054 ? BITS_PER_UNIT
3055 : 0;
3057 if (field_align > DECL_ALIGN (field_decl))
3058 SET_DECL_ALIGN (field_decl, field_align);
3059 else if (!field_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
3061 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
3062 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
3066 if (pos)
3068 /* We need to pass in the alignment the DECL is known to have.
3069 This is the lowest-order bit set in POS, but no more than
3070 the alignment of the record, if one is specified. Note
3071 that an alignment of 0 is taken as infinite. */
3072 unsigned int known_align;
3074 if (tree_fits_uhwi_p (pos))
3075 known_align = tree_to_uhwi (pos) & -tree_to_uhwi (pos);
3076 else
3077 known_align = BITS_PER_UNIT;
3079 if (TYPE_ALIGN (record_type)
3080 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
3081 known_align = TYPE_ALIGN (record_type);
3083 layout_decl (field_decl, known_align);
3084 SET_DECL_OFFSET_ALIGN (field_decl,
3085 tree_fits_uhwi_p (pos)
3086 ? BIGGEST_ALIGNMENT : BITS_PER_UNIT);
3087 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
3088 &DECL_FIELD_BIT_OFFSET (field_decl),
3089 DECL_OFFSET_ALIGN (field_decl), pos);
3092 /* In addition to what our caller says, claim the field is addressable if we
3093 know that its type is not suitable.
3095 The field may also be "technically" nonaddressable, meaning that even if
3096 we attempt to take the field's address we will actually get the address
3097 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
3098 value we have at this point is not accurate enough, so we don't account
3099 for this here and let finish_record_type decide. */
3100 if (!addressable && !type_for_nonaliased_component_p (type))
3101 addressable = 1;
3103 /* Note that there is a trade-off in making a field nonaddressable because
3104 this will cause type-based alias analysis to use the same alias set for
3105 accesses to the field as for accesses to the whole record: while doing
3106 so will make it more likely to disambiguate accesses to other objects
3107 and accesses to the field, it will make it less likely to disambiguate
3108 accesses to the other fields of the record and accesses to the field.
3109 If the record is fully static, then the trade-off is irrelevant since
3110 the fields of the record can always be disambiguated by their offsets
3111 but, if the record is dynamic, then it can become problematic. */
3112 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
3114 return field_decl;
3117 /* Return a PARM_DECL node with NAME and TYPE. */
3119 tree
3120 create_param_decl (tree name, tree type)
3122 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
3124 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
3125 can lead to various ABI violations. */
3126 if (targetm.calls.promote_prototypes (NULL_TREE)
3127 && INTEGRAL_TYPE_P (type)
3128 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
3130 /* We have to be careful about biased types here. Make a subtype
3131 of integer_type_node with the proper biasing. */
3132 if (TREE_CODE (type) == INTEGER_TYPE
3133 && TYPE_BIASED_REPRESENTATION_P (type))
3135 tree subtype
3136 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
3137 TREE_TYPE (subtype) = integer_type_node;
3138 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
3139 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
3140 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
3141 type = subtype;
3143 else
3144 type = integer_type_node;
3147 DECL_ARG_TYPE (param_decl) = type;
3148 return param_decl;
3151 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
3152 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
3153 changed. GNAT_NODE is used for the position of error messages. */
3155 void
3156 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
3157 Node_Id gnat_node)
3159 struct attrib *attr;
3161 for (attr = *attr_list; attr; attr = attr->next)
3162 switch (attr->type)
3164 case ATTR_MACHINE_ATTRIBUTE:
3165 Sloc_to_locus (Sloc (gnat_node), &input_location);
3166 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
3167 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
3168 break;
3170 case ATTR_LINK_ALIAS:
3171 if (!DECL_EXTERNAL (*node))
3173 TREE_STATIC (*node) = 1;
3174 assemble_alias (*node, attr->name);
3176 break;
3178 case ATTR_WEAK_EXTERNAL:
3179 if (SUPPORTS_WEAK)
3180 declare_weak (*node);
3181 else
3182 post_error ("?weak declarations not supported on this target",
3183 attr->error_point);
3184 break;
3186 case ATTR_LINK_SECTION:
3187 if (targetm_common.have_named_sections)
3189 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
3190 DECL_COMMON (*node) = 0;
3192 else
3193 post_error ("?section attributes are not supported for this target",
3194 attr->error_point);
3195 break;
3197 case ATTR_LINK_CONSTRUCTOR:
3198 DECL_STATIC_CONSTRUCTOR (*node) = 1;
3199 TREE_USED (*node) = 1;
3200 break;
3202 case ATTR_LINK_DESTRUCTOR:
3203 DECL_STATIC_DESTRUCTOR (*node) = 1;
3204 TREE_USED (*node) = 1;
3205 break;
3207 case ATTR_THREAD_LOCAL_STORAGE:
3208 set_decl_tls_model (*node, decl_default_tls_model (*node));
3209 DECL_COMMON (*node) = 0;
3210 break;
3213 *attr_list = NULL;
3216 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
3217 a power of 2. */
3219 bool
3220 value_factor_p (tree value, unsigned HOST_WIDE_INT factor)
3222 gcc_checking_assert (pow2p_hwi (factor));
3224 if (tree_fits_uhwi_p (value))
3225 return (tree_to_uhwi (value) & (factor - 1)) == 0;
3227 if (TREE_CODE (value) == MULT_EXPR)
3228 return (value_factor_p (TREE_OPERAND (value, 0), factor)
3229 || value_factor_p (TREE_OPERAND (value, 1), factor));
3231 return false;
3234 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
3235 feed it with the elaboration of GNAT_SCOPE. */
3237 static struct deferred_decl_context_node *
3238 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
3240 struct deferred_decl_context_node *new_node;
3242 new_node
3243 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
3244 new_node->decl = decl;
3245 new_node->gnat_scope = gnat_scope;
3246 new_node->force_global = force_global;
3247 new_node->types.create (1);
3248 new_node->next = deferred_decl_context_queue;
3249 deferred_decl_context_queue = new_node;
3250 return new_node;
3253 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
3254 feed it with the DECL_CONTEXT computed as part of N as soon as it is
3255 computed. */
3257 static void
3258 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
3260 n->types.safe_push (type);
3263 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
3264 NULL_TREE if it is not available. */
3266 static tree
3267 compute_deferred_decl_context (Entity_Id gnat_scope)
3269 tree context;
3271 if (present_gnu_tree (gnat_scope))
3272 context = get_gnu_tree (gnat_scope);
3273 else
3274 return NULL_TREE;
3276 if (TREE_CODE (context) == TYPE_DECL)
3278 tree context_type = TREE_TYPE (context);
3280 /* Skip dummy types: only the final ones can appear in the context
3281 chain. */
3282 if (TYPE_DUMMY_P (context_type))
3283 return NULL_TREE;
3285 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
3286 chain. */
3287 else
3288 context = context_type;
3291 return context;
3294 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
3295 that cannot be processed yet, remove the other ones. If FORCE is true,
3296 force the processing for all nodes, use the global context when nodes don't
3297 have a GNU translation. */
3299 void
3300 process_deferred_decl_context (bool force)
3302 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
3303 struct deferred_decl_context_node *node;
3305 while (*it)
3307 bool processed = false;
3308 tree context = NULL_TREE;
3309 Entity_Id gnat_scope;
3311 node = *it;
3313 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
3314 get the first scope. */
3315 gnat_scope = node->gnat_scope;
3316 while (Present (gnat_scope))
3318 context = compute_deferred_decl_context (gnat_scope);
3319 if (!force || context)
3320 break;
3321 gnat_scope = get_debug_scope (gnat_scope, NULL);
3324 /* Imported declarations must not be in a local context (i.e. not inside
3325 a function). */
3326 if (context && node->force_global > 0)
3328 tree ctx = context;
3330 while (ctx)
3332 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
3333 ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
3337 /* If FORCE, we want to get rid of all nodes in the queue: in case there
3338 was no elaborated scope, use the global context. */
3339 if (force && !context)
3340 context = get_global_context ();
3342 if (context)
3344 tree t;
3345 int i;
3347 DECL_CONTEXT (node->decl) = context;
3349 /* Propagate it to the TYPE_CONTEXT attributes of the requested
3350 ..._TYPE nodes. */
3351 FOR_EACH_VEC_ELT (node->types, i, t)
3353 gnat_set_type_context (t, context);
3355 processed = true;
3358 /* If this node has been successfuly processed, remove it from the
3359 queue. Then move to the next node. */
3360 if (processed)
3362 *it = node->next;
3363 node->types.release ();
3364 free (node);
3366 else
3367 it = &node->next;
3371 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
3373 static unsigned int
3374 scale_by_factor_of (tree expr, unsigned int value)
3376 unsigned HOST_WIDE_INT addend = 0;
3377 unsigned HOST_WIDE_INT factor = 1;
3379 /* Peel conversions around EXPR and try to extract bodies from function
3380 calls: it is possible to get the scale factor from size functions. */
3381 expr = remove_conversions (expr, true);
3382 if (TREE_CODE (expr) == CALL_EXPR)
3383 expr = maybe_inline_call_in_expr (expr);
3385 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3386 multiple of the scale factor we are looking for. */
3387 if (TREE_CODE (expr) == PLUS_EXPR
3388 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3389 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3391 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3392 expr = TREE_OPERAND (expr, 0);
3395 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3396 corresponding to the number of trailing zeros of the mask. */
3397 if (TREE_CODE (expr) == BIT_AND_EXPR
3398 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3400 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3401 unsigned int i = 0;
3403 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3405 mask >>= 1;
3406 factor *= 2;
3407 i++;
3411 /* If the addend is not a multiple of the factor we found, give up. In
3412 theory we could find a smaller common factor but it's useless for our
3413 needs. This situation arises when dealing with a field F1 with no
3414 alignment requirement but that is following a field F2 with such
3415 requirements. As long as we have F2's offset, we don't need alignment
3416 information to compute F1's. */
3417 if (addend % factor != 0)
3418 factor = 1;
3420 return factor * value;
3423 /* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3424 the decl. */
3426 tree
3427 create_label_decl (tree name, Node_Id gnat_node)
3429 tree label_decl
3430 = build_decl (input_location, LABEL_DECL, name, void_type_node);
3432 SET_DECL_MODE (label_decl, VOIDmode);
3434 /* Add this decl to the current binding level. */
3435 gnat_pushdecl (label_decl, gnat_node);
3437 return label_decl;
3440 /* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3441 its assembler name, TYPE its type (a FUNCTION_TYPE or METHOD_TYPE node),
3442 PARAM_DECL_LIST the list of its parameters (a list of PARM_DECL nodes
3443 chained through the DECL_CHAIN field).
3445 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3447 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
3448 definition to be made visible outside of the current compilation unit.
3450 EXTERN_FLAG is true when processing an external subprogram declaration.
3452 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3454 DEBUG_INFO_P is true if we need to write debug information for it.
3456 DEFINITION is true if the subprogram is to be considered as a definition.
3458 ATTR_LIST is the list of attributes to be attached to the subprogram.
3460 GNAT_NODE is used for the position of the decl. */
3462 tree
3463 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3464 enum inline_status_t inline_status, bool public_flag,
3465 bool extern_flag, bool artificial_p, bool debug_info_p,
3466 bool definition, struct attrib *attr_list,
3467 Node_Id gnat_node)
3469 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3470 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3472 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3473 DECL_EXTERNAL (subprog_decl) = extern_flag;
3474 DECL_FUNCTION_IS_DEF (subprog_decl) = definition;
3475 DECL_IGNORED_P (subprog_decl) = !debug_info_p;
3476 TREE_PUBLIC (subprog_decl) = public_flag;
3478 switch (inline_status)
3480 case is_suppressed:
3481 DECL_UNINLINABLE (subprog_decl) = 1;
3482 break;
3484 case is_default:
3485 break;
3487 case is_required:
3488 if (Back_End_Inlining)
3490 decl_attributes (&subprog_decl,
3491 tree_cons (get_identifier ("always_inline"),
3492 NULL_TREE, NULL_TREE),
3493 ATTR_FLAG_TYPE_IN_PLACE);
3495 /* Inline_Always guarantees that every direct call is inlined and
3496 that there is no indirect reference to the subprogram, so the
3497 instance in the original package (as well as its clones in the
3498 client packages created for inter-unit inlining) can be made
3499 private, which causes the out-of-line body to be eliminated. */
3500 TREE_PUBLIC (subprog_decl) = 0;
3503 /* ... fall through ... */
3505 case is_prescribed:
3506 DECL_DISREGARD_INLINE_LIMITS (subprog_decl) = 1;
3508 /* ... fall through ... */
3510 case is_requested:
3511 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3512 if (!Debug_Generated_Code)
3513 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3514 break;
3516 default:
3517 gcc_unreachable ();
3520 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3522 /* Once everything is processed, finish the subprogram declaration. */
3523 finish_subprog_decl (subprog_decl, asm_name, type);
3525 /* Add this decl to the current binding level. */
3526 gnat_pushdecl (subprog_decl, gnat_node);
3528 /* Output the assembler code and/or RTL for the declaration. */
3529 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3531 return subprog_decl;
3534 /* Given a subprogram declaration DECL, its assembler name and its type,
3535 finish constructing the subprogram declaration from ASM_NAME and TYPE. */
3537 void
3538 finish_subprog_decl (tree decl, tree asm_name, tree type)
3540 /* DECL_ARGUMENTS is set by the caller, but not its context. */
3541 for (tree param_decl = DECL_ARGUMENTS (decl);
3542 param_decl;
3543 param_decl = DECL_CHAIN (param_decl))
3544 DECL_CONTEXT (param_decl) = decl;
3546 tree result_decl
3547 = build_decl (DECL_SOURCE_LOCATION (decl), RESULT_DECL, NULL_TREE,
3548 TREE_TYPE (type));
3550 DECL_ARTIFICIAL (result_decl) = 1;
3551 DECL_IGNORED_P (result_decl) = 1;
3552 DECL_CONTEXT (result_decl) = decl;
3553 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3554 DECL_RESULT (decl) = result_decl;
3556 /* Propagate the "pure" property. */
3557 DECL_PURE_P (decl) = TYPE_RESTRICT (type);
3559 /* Propagate the "noreturn" property. */
3560 TREE_THIS_VOLATILE (decl) = TYPE_VOLATILE (type);
3562 if (asm_name)
3564 /* Let the target mangle the name if this isn't a verbatim asm. */
3565 if (*IDENTIFIER_POINTER (asm_name) != '*')
3566 asm_name = targetm.mangle_decl_assembler_name (decl, asm_name);
3568 SET_DECL_ASSEMBLER_NAME (decl, asm_name);
3570 /* The expand_main_function circuitry expects "main_identifier_node" to
3571 designate the DECL_NAME of the 'main' entry point, in turn expected
3572 to be declared as the "main" function literally by default. Ada
3573 program entry points are typically declared with a different name
3574 within the binder generated file, exported as 'main' to satisfy the
3575 system expectations. Force main_identifier_node in this case. */
3576 if (asm_name == main_identifier_node)
3577 DECL_NAME (decl) = main_identifier_node;
3581 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3582 body. This routine needs to be invoked before processing the declarations
3583 appearing in the subprogram. */
3585 void
3586 begin_subprog_body (tree subprog_decl)
3588 announce_function (subprog_decl);
3590 /* This function is being defined. */
3591 TREE_STATIC (subprog_decl) = 1;
3593 /* The failure of this assertion will likely come from a wrong context for
3594 the subprogram body, e.g. another procedure for a procedure declared at
3595 library level. */
3596 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3598 current_function_decl = subprog_decl;
3600 /* Enter a new binding level and show that all the parameters belong to
3601 this function. */
3602 gnat_pushlevel ();
3605 /* Finish translating the current subprogram and set its BODY. */
3607 void
3608 end_subprog_body (tree body)
3610 tree fndecl = current_function_decl;
3612 /* Attach the BLOCK for this level to the function and pop the level. */
3613 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3614 DECL_INITIAL (fndecl) = current_binding_level->block;
3615 gnat_poplevel ();
3617 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3618 if (TREE_CODE (body) == BIND_EXPR)
3620 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3621 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3624 DECL_SAVED_TREE (fndecl) = body;
3626 current_function_decl = decl_function_context (fndecl);
3629 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3631 void
3632 rest_of_subprog_body_compilation (tree subprog_decl)
3634 /* We cannot track the location of errors past this point. */
3635 Current_Error_Node = Empty;
3637 /* If we're only annotating types, don't actually compile this function. */
3638 if (type_annotate_only)
3639 return;
3641 /* Dump functions before gimplification. */
3642 dump_function (TDI_original, subprog_decl);
3644 if (!decl_function_context (subprog_decl))
3645 cgraph_node::finalize_function (subprog_decl, false);
3646 else
3647 /* Register this function with cgraph just far enough to get it
3648 added to our parent's nested function list. */
3649 (void) cgraph_node::get_create (subprog_decl);
3652 tree
3653 gnat_builtin_function (tree decl)
3655 gnat_pushdecl (decl, Empty);
3656 return decl;
3659 /* Return an integer type with the number of bits of precision given by
3660 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3661 it is a signed type. */
3663 tree
3664 gnat_type_for_size (unsigned precision, int unsignedp)
3666 tree t;
3667 char type_name[20];
3669 if (precision <= 2 * MAX_BITS_PER_WORD
3670 && signed_and_unsigned_types[precision][unsignedp])
3671 return signed_and_unsigned_types[precision][unsignedp];
3673 if (unsignedp)
3674 t = make_unsigned_type (precision);
3675 else
3676 t = make_signed_type (precision);
3677 TYPE_ARTIFICIAL (t) = 1;
3679 if (precision <= 2 * MAX_BITS_PER_WORD)
3680 signed_and_unsigned_types[precision][unsignedp] = t;
3682 if (!TYPE_NAME (t))
3684 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3685 TYPE_NAME (t) = get_identifier (type_name);
3688 return t;
3691 /* Likewise for floating-point types. */
3693 static tree
3694 float_type_for_precision (int precision, machine_mode mode)
3696 tree t;
3697 char type_name[20];
3699 if (float_types[(int) mode])
3700 return float_types[(int) mode];
3702 float_types[(int) mode] = t = make_node (REAL_TYPE);
3703 TYPE_PRECISION (t) = precision;
3704 layout_type (t);
3706 gcc_assert (TYPE_MODE (t) == mode);
3707 if (!TYPE_NAME (t))
3709 sprintf (type_name, "FLOAT_%d", precision);
3710 TYPE_NAME (t) = get_identifier (type_name);
3713 return t;
3716 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3717 an unsigned type; otherwise a signed type is returned. */
3719 tree
3720 gnat_type_for_mode (machine_mode mode, int unsignedp)
3722 if (mode == BLKmode)
3723 return NULL_TREE;
3725 if (mode == VOIDmode)
3726 return void_type_node;
3728 if (COMPLEX_MODE_P (mode))
3729 return NULL_TREE;
3731 scalar_float_mode float_mode;
3732 if (is_a <scalar_float_mode> (mode, &float_mode))
3733 return float_type_for_precision (GET_MODE_PRECISION (float_mode),
3734 float_mode);
3736 scalar_int_mode int_mode;
3737 if (is_a <scalar_int_mode> (mode, &int_mode))
3738 return gnat_type_for_size (GET_MODE_BITSIZE (int_mode), unsignedp);
3740 if (VECTOR_MODE_P (mode))
3742 machine_mode inner_mode = GET_MODE_INNER (mode);
3743 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3744 if (inner_type)
3745 return build_vector_type_for_mode (inner_type, mode);
3748 return NULL_TREE;
3751 /* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3752 signedness being specified by UNSIGNEDP. */
3754 tree
3755 gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
3757 if (type_node == char_type_node)
3758 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3760 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
3762 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3764 type = copy_type (type);
3765 TREE_TYPE (type) = type_node;
3767 else if (TREE_TYPE (type_node)
3768 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3769 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3771 type = copy_type (type);
3772 TREE_TYPE (type) = TREE_TYPE (type_node);
3775 return type;
3778 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3779 transparently converted to each other. */
3782 gnat_types_compatible_p (tree t1, tree t2)
3784 enum tree_code code;
3786 /* This is the default criterion. */
3787 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3788 return 1;
3790 /* We only check structural equivalence here. */
3791 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3792 return 0;
3794 /* Vector types are also compatible if they have the same number of subparts
3795 and the same form of (scalar) element type. */
3796 if (code == VECTOR_TYPE
3797 && known_eq (TYPE_VECTOR_SUBPARTS (t1), TYPE_VECTOR_SUBPARTS (t2))
3798 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3799 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3800 return 1;
3802 /* Array types are also compatible if they are constrained and have the same
3803 domain(s), the same component type and the same scalar storage order. */
3804 if (code == ARRAY_TYPE
3805 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3806 || (TYPE_DOMAIN (t1)
3807 && TYPE_DOMAIN (t2)
3808 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3809 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3810 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3811 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3812 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3813 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3814 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3815 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3816 return 1;
3818 return 0;
3821 /* Return true if EXPR is a useless type conversion. */
3823 bool
3824 gnat_useless_type_conversion (tree expr)
3826 if (CONVERT_EXPR_P (expr)
3827 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3828 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3829 return gnat_types_compatible_p (TREE_TYPE (expr),
3830 TREE_TYPE (TREE_OPERAND (expr, 0)));
3832 return false;
3835 /* Return true if T, a {FUNCTION,METHOD}_TYPE, has the specified flags. */
3837 bool
3838 fntype_same_flags_p (const_tree t, tree cico_list, bool return_by_direct_ref_p,
3839 bool return_by_invisi_ref_p)
3841 return TYPE_CI_CO_LIST (t) == cico_list
3842 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3843 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3846 /* Try to compute the maximum (if MAX_P) or minimum (if !MAX_P) value for the
3847 expression EXP, for very simple expressions. Substitute variable references
3848 with their respective type's min/max values. Return the computed value if
3849 any, or EXP if no value can be computed. */
3851 tree
3852 max_value (tree exp, bool max_p)
3854 enum tree_code code = TREE_CODE (exp);
3855 tree type = TREE_TYPE (exp);
3856 tree op0, op1, op2;
3858 switch (TREE_CODE_CLASS (code))
3860 case tcc_declaration:
3861 if (VAR_P (exp))
3862 return fold_convert (type,
3863 max_p
3864 ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3865 break;
3867 case tcc_vl_exp:
3868 if (code == CALL_EXPR)
3870 tree t;
3872 t = maybe_inline_call_in_expr (exp);
3873 if (t)
3874 return max_value (t, max_p);
3876 break;
3878 case tcc_comparison:
3879 return build_int_cst (type, max_p ? 1 : 0);
3881 case tcc_unary:
3882 op0 = TREE_OPERAND (exp, 0);
3884 if (code == NON_LVALUE_EXPR)
3885 return max_value (op0, max_p);
3887 if (code == NEGATE_EXPR)
3888 return max_value (op0, !max_p);
3890 if (code == NOP_EXPR)
3891 return fold_convert (type, max_value (op0, max_p));
3893 break;
3895 case tcc_binary:
3896 op0 = TREE_OPERAND (exp, 0);
3897 op1 = TREE_OPERAND (exp, 1);
3899 switch (code) {
3900 case PLUS_EXPR:
3901 case MULT_EXPR:
3902 return fold_build2 (code, type, max_value(op0, max_p),
3903 max_value (op1, max_p));
3904 case MINUS_EXPR:
3905 case TRUNC_DIV_EXPR:
3906 return fold_build2 (code, type, max_value(op0, max_p),
3907 max_value (op1, !max_p));
3908 default:
3909 break;
3911 break;
3913 case tcc_expression:
3914 if (code == COND_EXPR)
3916 op0 = TREE_OPERAND (exp, 0);
3917 op1 = TREE_OPERAND (exp, 1);
3918 op2 = TREE_OPERAND (exp, 2);
3920 if (!op1 || !op2)
3921 break;
3923 op1 = max_value (op1, max_p);
3924 op2 = max_value (op2, max_p);
3926 if (op1 == TREE_OPERAND (exp, 1) && op2 == TREE_OPERAND (exp, 2))
3927 break;
3929 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type, op1, op2);
3931 break;
3933 default:
3934 break;
3936 return exp;
3940 /* EXP is an expression for the size of an object. If this size contains
3941 discriminant references, replace them with the maximum (if MAX_P) or
3942 minimum (if !MAX_P) possible value of the discriminant.
3944 Note that the expression may have already been gimplified,in which case
3945 COND_EXPRs have VOID_TYPE and no operands, and this must be handled. */
3947 tree
3948 max_size (tree exp, bool max_p)
3950 enum tree_code code = TREE_CODE (exp);
3951 tree type = TREE_TYPE (exp);
3952 tree op0, op1, op2;
3954 switch (TREE_CODE_CLASS (code))
3956 case tcc_declaration:
3957 case tcc_constant:
3958 return exp;
3960 case tcc_exceptional:
3961 gcc_assert (code == SSA_NAME);
3962 return exp;
3964 case tcc_vl_exp:
3965 if (code == CALL_EXPR)
3967 tree t, *argarray;
3968 int n, i;
3970 t = maybe_inline_call_in_expr (exp);
3971 if (t)
3972 return max_size (t, max_p);
3974 n = call_expr_nargs (exp);
3975 gcc_assert (n > 0);
3976 argarray = XALLOCAVEC (tree, n);
3977 /* This is used to remove possible placeholder in call args. */
3978 for (i = 0; i < n; i++)
3979 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3980 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3982 break;
3984 case tcc_reference:
3985 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3986 modify. Otherwise, we treat it like a variable. */
3987 if (CONTAINS_PLACEHOLDER_P (exp))
3989 tree base_type = get_base_type (TREE_TYPE (TREE_OPERAND (exp, 1)));
3990 tree val
3991 = fold_convert (base_type,
3992 max_p
3993 ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3995 /* Walk down the extra subtypes to get more restrictive bounds. */
3996 while (TYPE_IS_EXTRA_SUBTYPE_P (type))
3998 type = TREE_TYPE (type);
3999 if (max_p)
4000 val = fold_build2 (MIN_EXPR, base_type, val,
4001 fold_convert (base_type,
4002 TYPE_MAX_VALUE (type)));
4003 else
4004 val = fold_build2 (MAX_EXPR, base_type, val,
4005 fold_convert (base_type,
4006 TYPE_MIN_VALUE (type)));
4009 return fold_convert (type, max_size (val, max_p));
4012 return exp;
4014 case tcc_comparison:
4015 return build_int_cst (type, max_p ? 1 : 0);
4017 case tcc_unary:
4018 op0 = TREE_OPERAND (exp, 0);
4020 if (code == NON_LVALUE_EXPR)
4021 return max_size (op0, max_p);
4023 if (VOID_TYPE_P (TREE_TYPE (op0)))
4024 return max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type);
4026 op0 = max_size (op0, code == NEGATE_EXPR ? !max_p : max_p);
4028 if (op0 == TREE_OPERAND (exp, 0))
4029 return exp;
4031 return fold_build1 (code, type, op0);
4033 case tcc_binary:
4034 op0 = TREE_OPERAND (exp, 0);
4035 op1 = TREE_OPERAND (exp, 1);
4037 /* If we have a multiply-add with a "negative" value in an unsigned
4038 type, do a multiply-subtract with the negated value, in order to
4039 avoid creating a spurious overflow below. */
4040 if (code == PLUS_EXPR
4041 && TREE_CODE (op0) == MULT_EXPR
4042 && TYPE_UNSIGNED (type)
4043 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
4044 && !TREE_OVERFLOW (TREE_OPERAND (op0, 1))
4045 && tree_int_cst_sign_bit (TREE_OPERAND (op0, 1)))
4047 tree tmp = op1;
4048 op1 = build2 (MULT_EXPR, type, TREE_OPERAND (op0, 0),
4049 fold_build1 (NEGATE_EXPR, type,
4050 TREE_OPERAND (op0, 1)));
4051 op0 = tmp;
4052 code = MINUS_EXPR;
4055 op0 = max_size (op0, max_p);
4056 op1 = max_size (op1, code == MINUS_EXPR ? !max_p : max_p);
4058 if ((code == MINUS_EXPR || code == PLUS_EXPR))
4060 /* If the op0 has overflowed and the op1 is a variable,
4061 propagate the overflow by returning the op0. */
4062 if (TREE_CODE (op0) == INTEGER_CST
4063 && TREE_OVERFLOW (op0)
4064 && TREE_CODE (op1) != INTEGER_CST)
4065 return op0;
4067 /* If we have a "negative" value in an unsigned type, do the
4068 opposite operation on the negated value, in order to avoid
4069 creating a spurious overflow below. */
4070 if (TYPE_UNSIGNED (type)
4071 && TREE_CODE (op1) == INTEGER_CST
4072 && !TREE_OVERFLOW (op1)
4073 && tree_int_cst_sign_bit (op1))
4075 op1 = fold_build1 (NEGATE_EXPR, type, op1);
4076 code = (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR);
4080 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4081 return exp;
4083 /* We need to detect overflows so we call size_binop here. */
4084 return size_binop (code, op0, op1);
4086 case tcc_expression:
4087 switch (TREE_CODE_LENGTH (code))
4089 case 1:
4090 if (code == SAVE_EXPR)
4091 return exp;
4093 op0 = max_size (TREE_OPERAND (exp, 0),
4094 code == TRUTH_NOT_EXPR ? !max_p : max_p);
4096 if (op0 == TREE_OPERAND (exp, 0))
4097 return exp;
4099 return fold_build1 (code, type, op0);
4101 case 2:
4102 if (code == COMPOUND_EXPR)
4103 return max_size (TREE_OPERAND (exp, 1), max_p);
4105 op0 = max_size (TREE_OPERAND (exp, 0), max_p);
4106 op1 = max_size (TREE_OPERAND (exp, 1), max_p);
4108 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4109 return exp;
4111 return fold_build2 (code, type, op0, op1);
4113 case 3:
4114 if (code == COND_EXPR)
4116 op0 = TREE_OPERAND (exp, 0);
4117 op1 = TREE_OPERAND (exp, 1);
4118 op2 = TREE_OPERAND (exp, 2);
4120 if (!op1 || !op2)
4121 return exp;
4123 op1 = max_size (op1, max_p);
4124 op2 = max_size (op2, max_p);
4126 /* If we have the MAX of a "negative" value in an unsigned type
4127 and zero for a length expression, just return zero. */
4128 if (max_p
4129 && TREE_CODE (op0) == LE_EXPR
4130 && TYPE_UNSIGNED (type)
4131 && TREE_CODE (op1) == INTEGER_CST
4132 && !TREE_OVERFLOW (op1)
4133 && tree_int_cst_sign_bit (op1)
4134 && integer_zerop (op2))
4135 return op2;
4137 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type, op1, op2);
4139 break;
4141 default:
4142 break;
4145 /* Other tree classes cannot happen. */
4146 default:
4147 break;
4150 gcc_unreachable ();
4153 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
4154 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
4155 Return a constructor for the template. */
4157 tree
4158 build_template (tree template_type, tree array_type, tree expr)
4160 vec<constructor_elt, va_gc> *template_elts = NULL;
4161 tree bound_list = NULL_TREE;
4162 tree field;
4164 while (TREE_CODE (array_type) == RECORD_TYPE
4165 && (TYPE_PADDING_P (array_type)
4166 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
4167 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
4169 if (TREE_CODE (array_type) == ARRAY_TYPE
4170 || (TREE_CODE (array_type) == INTEGER_TYPE
4171 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
4172 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
4174 /* First make the list for a CONSTRUCTOR for the template. Go down
4175 the field list of the template instead of the type chain because
4176 this array might be an Ada array of array and we can't tell where
4177 the nested array stop being the underlying object. */
4178 for (field = TYPE_FIELDS (template_type);
4179 field;
4180 field = DECL_CHAIN (DECL_CHAIN (field)))
4182 tree bounds, min, max;
4184 /* If we have a bound list, get the bounds from there. Likewise
4185 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
4186 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the
4187 template, but this will only give us a maximum range. */
4188 if (bound_list)
4190 bounds = TREE_VALUE (bound_list);
4191 bound_list = TREE_CHAIN (bound_list);
4193 else if (TREE_CODE (array_type) == ARRAY_TYPE)
4195 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
4196 array_type = TREE_TYPE (array_type);
4198 else if (expr && TREE_CODE (expr) == PARM_DECL
4199 && DECL_BY_COMPONENT_PTR_P (expr))
4200 bounds = TREE_TYPE (field);
4201 else
4202 gcc_unreachable ();
4204 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
4205 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
4207 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
4208 substitute it from OBJECT. */
4209 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
4210 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
4212 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
4213 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
4216 return gnat_build_constructor (template_type, template_elts);
4219 /* Return true if TYPE is suitable for the element type of a vector. */
4221 static bool
4222 type_for_vector_element_p (tree type)
4224 machine_mode mode;
4226 if (!INTEGRAL_TYPE_P (type)
4227 && !SCALAR_FLOAT_TYPE_P (type)
4228 && !FIXED_POINT_TYPE_P (type))
4229 return false;
4231 mode = TYPE_MODE (type);
4232 if (GET_MODE_CLASS (mode) != MODE_INT
4233 && !SCALAR_FLOAT_MODE_P (mode)
4234 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
4235 return false;
4237 return true;
4240 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
4241 this is not possible. If ATTRIBUTE is non-zero, we are processing the
4242 attribute declaration and want to issue error messages on failure. */
4244 static tree
4245 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
4247 unsigned HOST_WIDE_INT size_int, inner_size_int;
4248 int nunits;
4250 /* Silently punt on variable sizes. We can't make vector types for them,
4251 need to ignore them on front-end generated subtypes of unconstrained
4252 base types, and this attribute is for binding implementors, not end
4253 users, so we should never get there from legitimate explicit uses. */
4254 if (!tree_fits_uhwi_p (size))
4255 return NULL_TREE;
4256 size_int = tree_to_uhwi (size);
4258 if (!type_for_vector_element_p (inner_type))
4260 if (attribute)
4261 error ("invalid element type for attribute %qs",
4262 IDENTIFIER_POINTER (attribute));
4263 return NULL_TREE;
4265 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
4267 if (size_int % inner_size_int)
4269 if (attribute)
4270 error ("vector size not an integral multiple of component size");
4271 return NULL_TREE;
4274 if (size_int == 0)
4276 if (attribute)
4277 error ("zero vector size");
4278 return NULL_TREE;
4281 nunits = size_int / inner_size_int;
4282 if (nunits & (nunits - 1))
4284 if (attribute)
4285 error ("number of components of vector not a power of two");
4286 return NULL_TREE;
4289 return build_vector_type (inner_type, nunits);
4292 /* Return a vector type whose representative array type is ARRAY_TYPE, or
4293 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
4294 processing the attribute and want to issue error messages on failure. */
4296 static tree
4297 build_vector_type_for_array (tree array_type, tree attribute)
4299 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
4300 TYPE_SIZE_UNIT (array_type),
4301 attribute);
4302 if (!vector_type)
4303 return NULL_TREE;
4305 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
4306 return vector_type;
4309 /* Build a type to be used to represent an aliased object whose nominal type
4310 is an unconstrained array. This consists of a RECORD_TYPE containing a
4311 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4312 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4313 an arbitrary unconstrained object. Use NAME as the name of the record.
4314 DEBUG_INFO_P is true if we need to write debug information for the type. */
4316 tree
4317 build_unc_object_type (tree template_type, tree object_type, tree name,
4318 bool debug_info_p)
4320 tree type = make_node (RECORD_TYPE);
4321 tree template_field
4322 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
4323 NULL_TREE, NULL_TREE, 0, 1);
4324 tree array_field
4325 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
4326 NULL_TREE, NULL_TREE, 0, 1);
4328 TYPE_NAME (type) = name;
4329 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
4330 DECL_CHAIN (template_field) = array_field;
4331 finish_record_type (type, template_field, 0, true);
4333 /* Declare it now since it will never be declared otherwise. This is
4334 necessary to ensure that its subtrees are properly marked. */
4335 create_type_decl (name, type, true, debug_info_p, Empty);
4337 return type;
4340 /* Same, taking a thin or fat pointer type instead of a template type. */
4342 tree
4343 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
4344 tree name, bool debug_info_p)
4346 tree template_type;
4348 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
4350 template_type
4351 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
4352 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
4353 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
4355 return
4356 build_unc_object_type (template_type, object_type, name, debug_info_p);
4359 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4360 In the normal case this is just two adjustments, but we have more to
4361 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
4363 void
4364 update_pointer_to (tree old_type, tree new_type)
4366 tree ptr = TYPE_POINTER_TO (old_type);
4367 tree ref = TYPE_REFERENCE_TO (old_type);
4368 tree t;
4370 /* If this is the main variant, process all the other variants first. */
4371 if (TYPE_MAIN_VARIANT (old_type) == old_type)
4372 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4373 update_pointer_to (t, new_type);
4375 /* If no pointers and no references, we are done. */
4376 if (!ptr && !ref)
4377 return;
4379 /* Merge the old type qualifiers in the new type.
4381 Each old variant has qualifiers for specific reasons, and the new
4382 designated type as well. Each set of qualifiers represents useful
4383 information grabbed at some point, and merging the two simply unifies
4384 these inputs into the final type description.
4386 Consider for instance a volatile type frozen after an access to constant
4387 type designating it; after the designated type's freeze, we get here with
4388 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4389 when the access type was processed. We will make a volatile and readonly
4390 designated type, because that's what it really is.
4392 We might also get here for a non-dummy OLD_TYPE variant with different
4393 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4394 to private record type elaboration (see the comments around the call to
4395 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4396 the qualifiers in those cases too, to avoid accidentally discarding the
4397 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4398 new_type
4399 = build_qualified_type (new_type,
4400 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4402 /* If old type and new type are identical, there is nothing to do. */
4403 if (old_type == new_type)
4404 return;
4406 /* Otherwise, first handle the simple case. */
4407 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4409 tree new_ptr, new_ref;
4411 /* If pointer or reference already points to new type, nothing to do.
4412 This can happen as update_pointer_to can be invoked multiple times
4413 on the same couple of types because of the type variants. */
4414 if ((ptr && TREE_TYPE (ptr) == new_type)
4415 || (ref && TREE_TYPE (ref) == new_type))
4416 return;
4418 /* Chain PTR and its variants at the end. */
4419 new_ptr = TYPE_POINTER_TO (new_type);
4420 if (new_ptr)
4422 while (TYPE_NEXT_PTR_TO (new_ptr))
4423 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4424 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4426 else
4427 TYPE_POINTER_TO (new_type) = ptr;
4429 /* Now adjust them. */
4430 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4431 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4433 TREE_TYPE (t) = new_type;
4434 if (TYPE_NULL_BOUNDS (t))
4435 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4436 TYPE_CANONICAL (t) = TYPE_CANONICAL (TYPE_POINTER_TO (new_type));
4439 /* Chain REF and its variants at the end. */
4440 new_ref = TYPE_REFERENCE_TO (new_type);
4441 if (new_ref)
4443 while (TYPE_NEXT_REF_TO (new_ref))
4444 new_ref = TYPE_NEXT_REF_TO (new_ref);
4445 TYPE_NEXT_REF_TO (new_ref) = ref;
4447 else
4448 TYPE_REFERENCE_TO (new_type) = ref;
4450 /* Now adjust them. */
4451 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4452 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4454 TREE_TYPE (t) = new_type;
4455 TYPE_CANONICAL (t) = TYPE_CANONICAL (TYPE_REFERENCE_TO (new_type));
4458 TYPE_POINTER_TO (old_type) = NULL_TREE;
4459 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4462 /* Now deal with the unconstrained array case. In this case the pointer
4463 is actually a record where both fields are pointers to dummy nodes.
4464 Turn them into pointers to the correct types using update_pointer_to.
4465 Likewise for the pointer to the object record (thin pointer). */
4466 else
4468 tree new_ptr = TYPE_POINTER_TO (new_type);
4470 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4472 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4473 since update_pointer_to can be invoked multiple times on the same
4474 couple of types because of the type variants. */
4475 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4476 return;
4478 update_pointer_to
4479 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4480 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4482 update_pointer_to
4483 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4484 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4486 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4487 TYPE_OBJECT_RECORD_TYPE (new_type));
4489 TYPE_POINTER_TO (old_type) = NULL_TREE;
4490 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4494 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4495 unconstrained one. This involves making or finding a template. */
4497 static tree
4498 convert_to_fat_pointer (tree type, tree expr)
4500 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4501 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4502 tree etype = TREE_TYPE (expr);
4503 tree template_addr;
4504 vec<constructor_elt, va_gc> *v;
4505 vec_alloc (v, 2);
4507 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4508 array (compare_fat_pointers ensures that this is the full discriminant)
4509 and a valid pointer to the bounds. This latter property is necessary
4510 since the compiler can hoist the load of the bounds done through it. */
4511 if (integer_zerop (expr))
4513 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4514 tree null_bounds, t;
4516 if (TYPE_NULL_BOUNDS (ptr_template_type))
4517 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4518 else
4520 /* The template type can still be dummy at this point so we build an
4521 empty constructor. The middle-end will fill it in with zeros. */
4522 t = build_constructor (template_type, NULL);
4523 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4524 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4525 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4528 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4529 fold_convert (p_array_type, null_pointer_node));
4530 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4531 t = build_constructor (type, v);
4532 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4533 TREE_CONSTANT (t) = 0;
4534 TREE_STATIC (t) = 1;
4536 return t;
4539 /* If EXPR is a thin pointer, make template and data from the record. */
4540 if (TYPE_IS_THIN_POINTER_P (etype))
4542 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4544 expr = gnat_protect_expr (expr);
4546 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4547 the thin pointer value has been shifted so we shift it back to get
4548 the template address. */
4549 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4551 template_addr
4552 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4553 fold_build1 (NEGATE_EXPR, sizetype,
4554 byte_position
4555 (DECL_CHAIN (field))));
4556 template_addr
4557 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4558 template_addr);
4561 /* Otherwise we explicitly take the address of the fields. */
4562 else
4564 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4565 template_addr
4566 = build_unary_op (ADDR_EXPR, NULL_TREE,
4567 build_component_ref (expr, field, false));
4568 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4569 build_component_ref (expr, DECL_CHAIN (field),
4570 false));
4574 /* Otherwise, build the constructor for the template. */
4575 else
4576 template_addr
4577 = build_unary_op (ADDR_EXPR, NULL_TREE,
4578 build_template (template_type, TREE_TYPE (etype),
4579 expr));
4581 /* The final result is a constructor for the fat pointer.
4583 If EXPR is an argument of a foreign convention subprogram, the type it
4584 points to is directly the component type. In this case, the expression
4585 type may not match the corresponding FIELD_DECL type at this point, so we
4586 call "convert" here to fix that up if necessary. This type consistency is
4587 required, for instance because it ensures that possible later folding of
4588 COMPONENT_REFs against this constructor always yields something of the
4589 same type as the initial reference.
4591 Note that the call to "build_template" above is still fine because it
4592 will only refer to the provided TEMPLATE_TYPE in this case. */
4593 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4594 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4595 return gnat_build_constructor (type, v);
4598 /* Create an expression whose value is that of EXPR,
4599 converted to type TYPE. The TREE_TYPE of the value
4600 is always TYPE. This function implements all reasonable
4601 conversions; callers should filter out those that are
4602 not permitted by the language being compiled. */
4604 tree
4605 convert (tree type, tree expr)
4607 tree etype = TREE_TYPE (expr);
4608 enum tree_code ecode = TREE_CODE (etype);
4609 enum tree_code code = TREE_CODE (type);
4611 /* If the expression is already of the right type, we are done. */
4612 if (etype == type)
4613 return expr;
4615 /* If both input and output have padding and are of variable size, do this
4616 as an unchecked conversion. Likewise if one is a mere variant of the
4617 other, so we avoid a pointless unpad/repad sequence. */
4618 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4619 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4620 && (!TREE_CONSTANT (TYPE_SIZE (type))
4621 || !TREE_CONSTANT (TYPE_SIZE (etype))
4622 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4623 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4624 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4627 /* If the output type has padding, convert to the inner type and make a
4628 constructor to build the record, unless a variable size is involved. */
4629 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4631 /* If we previously converted from another type and our type is
4632 of variable size, remove the conversion to avoid the need for
4633 variable-sized temporaries. Likewise for a conversion between
4634 original and packable version. */
4635 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4636 && (!TREE_CONSTANT (TYPE_SIZE (type))
4637 || (ecode == RECORD_TYPE
4638 && TYPE_NAME (etype)
4639 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4640 expr = TREE_OPERAND (expr, 0);
4642 /* If we are just removing the padding from expr, convert the original
4643 object if we have variable size in order to avoid the need for some
4644 variable-sized temporaries. Likewise if the padding is a variant
4645 of the other, so we avoid a pointless unpad/repad sequence. */
4646 if (TREE_CODE (expr) == COMPONENT_REF
4647 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4648 && (!TREE_CONSTANT (TYPE_SIZE (type))
4649 || TYPE_MAIN_VARIANT (type)
4650 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4651 || (ecode == RECORD_TYPE
4652 && TYPE_NAME (etype)
4653 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4654 return convert (type, TREE_OPERAND (expr, 0));
4656 /* If the inner type is of self-referential size and the expression type
4657 is a record, do this as an unchecked conversion unless both types are
4658 essentially the same. */
4659 if (ecode == RECORD_TYPE
4660 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type)))
4661 && TYPE_MAIN_VARIANT (etype)
4662 != TYPE_MAIN_VARIANT (TREE_TYPE (TYPE_FIELDS (type))))
4663 return unchecked_convert (type, expr, false);
4665 /* If we are converting between array types with variable size, do the
4666 final conversion as an unchecked conversion, again to avoid the need
4667 for some variable-sized temporaries. If valid, this conversion is
4668 very likely purely technical and without real effects. */
4669 if (ecode == ARRAY_TYPE
4670 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4671 && !TREE_CONSTANT (TYPE_SIZE (etype))
4672 && !TREE_CONSTANT (TYPE_SIZE (type)))
4673 return unchecked_convert (type,
4674 convert (TREE_TYPE (TYPE_FIELDS (type)),
4675 expr),
4676 false);
4678 tree t = convert (TREE_TYPE (TYPE_FIELDS (type)), expr);
4680 /* If converting to the inner type has already created a CONSTRUCTOR with
4681 the right size, then reuse it instead of creating another one. This
4682 can happen for the padding type built to overalign local variables. */
4683 if (TREE_CODE (t) == VIEW_CONVERT_EXPR
4684 && TREE_CODE (TREE_OPERAND (t, 0)) == CONSTRUCTOR
4685 && TREE_CONSTANT (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0))))
4686 && tree_int_cst_equal (TYPE_SIZE (type),
4687 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0)))))
4688 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (t, 0));
4690 vec<constructor_elt, va_gc> *v;
4691 vec_alloc (v, 1);
4692 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), t);
4693 return gnat_build_constructor (type, v);
4696 /* If the input type has padding, remove it and convert to the output type.
4697 The conditions ordering is arranged to ensure that the output type is not
4698 a padding type here, as it is not clear whether the conversion would
4699 always be correct if this was to happen. */
4700 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4702 tree unpadded;
4704 /* If we have just converted to this padded type, just get the
4705 inner expression. */
4706 if (TREE_CODE (expr) == CONSTRUCTOR)
4707 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4709 /* Otherwise, build an explicit component reference. */
4710 else
4711 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4713 return convert (type, unpadded);
4716 /* If the input is a biased type, convert first to the base type and add
4717 the bias. Note that the bias must go through a full conversion to the
4718 base type, lest it is itself a biased value; this happens for subtypes
4719 of biased types. */
4720 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4721 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4722 fold_convert (TREE_TYPE (etype), expr),
4723 convert (TREE_TYPE (etype),
4724 TYPE_MIN_VALUE (etype))));
4726 /* If the input is a justified modular type, we need to extract the actual
4727 object before converting it to an other type with the exceptions of an
4728 [unconstrained] array or a mere type variant. It is useful to avoid
4729 the extraction and conversion in these cases because it could end up
4730 replacing a VAR_DECL by a constructor and we might be about the take
4731 the address of the result. */
4732 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4733 && code != ARRAY_TYPE
4734 && code != UNCONSTRAINED_ARRAY_TYPE
4735 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4736 return
4737 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4739 /* If converting to a type that contains a template, convert to the data
4740 type and then build the template. */
4741 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4743 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4744 vec<constructor_elt, va_gc> *v;
4745 vec_alloc (v, 2);
4747 /* If the source already has a template, get a reference to the
4748 associated array only, as we are going to rebuild a template
4749 for the target type anyway. */
4750 expr = maybe_unconstrained_array (expr);
4752 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4753 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4754 obj_type, NULL_TREE));
4755 if (expr)
4756 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4757 convert (obj_type, expr));
4758 return gnat_build_constructor (type, v);
4761 /* There are some cases of expressions that we process specially. */
4762 switch (TREE_CODE (expr))
4764 case ERROR_MARK:
4765 return expr;
4767 case NULL_EXPR:
4768 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4769 conversion in gnat_expand_expr. NULL_EXPR does not represent
4770 and actual value, so no conversion is needed. */
4771 expr = copy_node (expr);
4772 TREE_TYPE (expr) = type;
4773 return expr;
4775 case STRING_CST:
4776 /* If we are converting a STRING_CST to another constrained array type,
4777 just make a new one in the proper type. */
4778 if (code == ecode
4779 && !(TREE_CONSTANT (TYPE_SIZE (etype))
4780 && !TREE_CONSTANT (TYPE_SIZE (type))))
4782 expr = copy_node (expr);
4783 TREE_TYPE (expr) = type;
4784 return expr;
4786 break;
4788 case VECTOR_CST:
4789 /* If we are converting a VECTOR_CST to a mere type variant, just make
4790 a new one in the proper type. */
4791 if (code == ecode && gnat_types_compatible_p (type, etype))
4793 expr = copy_node (expr);
4794 TREE_TYPE (expr) = type;
4795 return expr;
4797 break;
4799 case CONSTRUCTOR:
4800 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4801 another padding type around the same type, just make a new one in
4802 the proper type. */
4803 if (code == ecode
4804 && (gnat_types_compatible_p (type, etype)
4805 || (code == RECORD_TYPE
4806 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4807 && TREE_TYPE (TYPE_FIELDS (type))
4808 == TREE_TYPE (TYPE_FIELDS (etype)))))
4810 expr = copy_node (expr);
4811 TREE_TYPE (expr) = type;
4812 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4813 return expr;
4816 /* Likewise for a conversion between original and packable version, or
4817 conversion between types of the same size and with the same list of
4818 fields, but we have to work harder to preserve type consistency. */
4819 if (code == ecode
4820 && code == RECORD_TYPE
4821 && (TYPE_NAME (type) == TYPE_NAME (etype)
4822 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4825 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4826 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4827 vec<constructor_elt, va_gc> *v;
4828 vec_alloc (v, len);
4829 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4830 unsigned HOST_WIDE_INT idx;
4831 tree index, value;
4833 /* Whether we need to clear TREE_CONSTANT et al. on the output
4834 constructor when we convert in place. */
4835 bool clear_constant = false;
4837 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4839 /* Skip the missing fields in the CONSTRUCTOR. */
4840 while (efield && field && !SAME_FIELD_P (efield, index))
4842 efield = DECL_CHAIN (efield);
4843 field = DECL_CHAIN (field);
4845 /* The field must be the same. */
4846 if (!(efield && field && SAME_FIELD_P (efield, field)))
4847 break;
4848 constructor_elt elt
4849 = {field, convert (TREE_TYPE (field), value)};
4850 v->quick_push (elt);
4852 /* If packing has made this field a bitfield and the input
4853 value couldn't be emitted statically any more, we need to
4854 clear TREE_CONSTANT on our output. */
4855 if (!clear_constant
4856 && TREE_CONSTANT (expr)
4857 && !CONSTRUCTOR_BITFIELD_P (efield)
4858 && CONSTRUCTOR_BITFIELD_P (field)
4859 && !initializer_constant_valid_for_bitfield_p (value))
4860 clear_constant = true;
4862 efield = DECL_CHAIN (efield);
4863 field = DECL_CHAIN (field);
4866 /* If we have been able to match and convert all the input fields
4867 to their output type, convert in place now. We'll fallback to a
4868 view conversion downstream otherwise. */
4869 if (idx == len)
4871 expr = copy_node (expr);
4872 TREE_TYPE (expr) = type;
4873 CONSTRUCTOR_ELTS (expr) = v;
4874 if (clear_constant)
4875 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4876 return expr;
4880 /* Likewise for a conversion between array type and vector type with a
4881 compatible representative array. */
4882 else if (code == VECTOR_TYPE
4883 && ecode == ARRAY_TYPE
4884 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4885 etype))
4887 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4888 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4889 vec<constructor_elt, va_gc> *v;
4890 unsigned HOST_WIDE_INT ix;
4891 tree value;
4893 /* Build a VECTOR_CST from a *constant* array constructor. */
4894 if (TREE_CONSTANT (expr))
4896 bool constant_p = true;
4898 /* Iterate through elements and check if all constructor
4899 elements are *_CSTs. */
4900 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4901 if (!CONSTANT_CLASS_P (value))
4903 constant_p = false;
4904 break;
4907 if (constant_p)
4908 return build_vector_from_ctor (type,
4909 CONSTRUCTOR_ELTS (expr));
4912 /* Otherwise, build a regular vector constructor. */
4913 vec_alloc (v, len);
4914 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4916 constructor_elt elt = {NULL_TREE, value};
4917 v->quick_push (elt);
4919 expr = copy_node (expr);
4920 TREE_TYPE (expr) = type;
4921 CONSTRUCTOR_ELTS (expr) = v;
4922 return expr;
4924 break;
4926 case UNCONSTRAINED_ARRAY_REF:
4927 /* First retrieve the underlying array. */
4928 expr = maybe_unconstrained_array (expr);
4929 etype = TREE_TYPE (expr);
4930 ecode = TREE_CODE (etype);
4931 break;
4933 case VIEW_CONVERT_EXPR:
4935 /* GCC 4.x is very sensitive to type consistency overall, and view
4936 conversions thus are very frequent. Even though just "convert"ing
4937 the inner operand to the output type is fine in most cases, it
4938 might expose unexpected input/output type mismatches in special
4939 circumstances so we avoid such recursive calls when we can. */
4940 tree op0 = TREE_OPERAND (expr, 0);
4942 /* If we are converting back to the original type, we can just
4943 lift the input conversion. This is a common occurrence with
4944 switches back-and-forth amongst type variants. */
4945 if (type == TREE_TYPE (op0))
4946 return op0;
4948 /* Otherwise, if we're converting between two aggregate or vector
4949 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4950 target type in place or to just convert the inner expression. */
4951 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4952 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4954 /* If we are converting between mere variants, we can just
4955 substitute the VIEW_CONVERT_EXPR in place. */
4956 if (gnat_types_compatible_p (type, etype))
4957 return build1 (VIEW_CONVERT_EXPR, type, op0);
4959 /* Otherwise, we may just bypass the input view conversion unless
4960 one of the types is a fat pointer, which is handled by
4961 specialized code below which relies on exact type matching. */
4962 else if (!TYPE_IS_FAT_POINTER_P (type)
4963 && !TYPE_IS_FAT_POINTER_P (etype))
4964 return convert (type, op0);
4967 break;
4970 default:
4971 break;
4974 /* Check for converting to a pointer to an unconstrained array. */
4975 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4976 return convert_to_fat_pointer (type, expr);
4978 /* If we are converting between two aggregate or vector types that are mere
4979 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4980 to a vector type from its representative array type. */
4981 else if ((code == ecode
4982 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4983 && gnat_types_compatible_p (type, etype))
4984 || (code == VECTOR_TYPE
4985 && ecode == ARRAY_TYPE
4986 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4987 etype)))
4988 return build1 (VIEW_CONVERT_EXPR, type, expr);
4990 /* If we are converting between tagged types, try to upcast properly.
4991 But don't do it if we are just annotating types since tagged types
4992 aren't fully laid out in this mode. */
4993 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4994 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type)
4995 && !type_annotate_only)
4997 tree child_etype = etype;
4998 do {
4999 tree field = TYPE_FIELDS (child_etype);
5000 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
5001 return build_component_ref (expr, field, false);
5002 child_etype = TREE_TYPE (field);
5003 } while (TREE_CODE (child_etype) == RECORD_TYPE);
5006 /* If we are converting from a smaller form of record type back to it, just
5007 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
5008 size on both sides. */
5009 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
5010 && smaller_form_type_p (etype, type))
5012 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5013 false, false, true),
5014 expr);
5015 return build1 (VIEW_CONVERT_EXPR, type, expr);
5018 /* In all other cases of related types, make a NOP_EXPR. */
5019 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
5020 return fold_convert (type, expr);
5022 switch (code)
5024 case VOID_TYPE:
5025 return fold_build1 (CONVERT_EXPR, type, expr);
5027 case INTEGER_TYPE:
5028 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
5029 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
5030 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
5031 return unchecked_convert (type, expr, false);
5033 /* If the output is a biased type, convert first to the base type and
5034 subtract the bias. Note that the bias itself must go through a full
5035 conversion to the base type, lest it is a biased value; this happens
5036 for subtypes of biased types. */
5037 if (TYPE_BIASED_REPRESENTATION_P (type))
5038 return fold_convert (type,
5039 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
5040 convert (TREE_TYPE (type), expr),
5041 convert (TREE_TYPE (type),
5042 TYPE_MIN_VALUE (type))));
5044 /* If we are converting an additive expression to an integer type
5045 with lower precision, be wary of the optimization that can be
5046 applied by convert_to_integer. There are 2 problematic cases:
5047 - if the first operand was originally of a biased type,
5048 because we could be recursively called to convert it
5049 to an intermediate type and thus rematerialize the
5050 additive operator endlessly,
5051 - if the expression contains a placeholder, because an
5052 intermediate conversion that changes the sign could
5053 be inserted and thus introduce an artificial overflow
5054 at compile time when the placeholder is substituted. */
5055 if (ecode == INTEGER_TYPE
5056 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
5057 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
5059 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
5061 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5062 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
5063 || CONTAINS_PLACEHOLDER_P (expr))
5064 return fold_convert (type, expr);
5067 /* ... fall through ... */
5069 case ENUMERAL_TYPE:
5070 return fold (convert_to_integer (type, expr));
5072 case BOOLEAN_TYPE:
5073 /* Do not use convert_to_integer with boolean types. */
5074 return fold_convert_loc (EXPR_LOCATION (expr), type, expr);
5076 case POINTER_TYPE:
5077 case REFERENCE_TYPE:
5078 /* If converting between two thin pointers, adjust if needed to account
5079 for differing offsets from the base pointer, depending on whether
5080 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
5081 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
5083 tree etype_pos
5084 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
5085 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
5086 : size_zero_node;
5087 tree type_pos
5088 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
5089 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
5090 : size_zero_node;
5091 tree byte_diff = size_diffop (type_pos, etype_pos);
5093 expr = build1 (NOP_EXPR, type, expr);
5094 if (integer_zerop (byte_diff))
5095 return expr;
5097 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
5098 fold_convert (sizetype, byte_diff));
5101 /* If converting fat pointer to normal or thin pointer, get the pointer
5102 to the array and then convert it. */
5103 if (TYPE_IS_FAT_POINTER_P (etype))
5104 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
5106 return fold (convert_to_pointer (type, expr));
5108 case REAL_TYPE:
5109 return fold (convert_to_real (type, expr));
5111 case RECORD_TYPE:
5112 /* Do a normal conversion between scalar and justified modular type. */
5113 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
5115 vec<constructor_elt, va_gc> *v;
5116 vec_alloc (v, 1);
5118 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
5119 convert (TREE_TYPE (TYPE_FIELDS (type)),
5120 expr));
5121 return gnat_build_constructor (type, v);
5124 /* In these cases, assume the front-end has validated the conversion.
5125 If the conversion is valid, it will be a bit-wise conversion, so
5126 it can be viewed as an unchecked conversion. */
5127 return unchecked_convert (type, expr, false);
5129 case ARRAY_TYPE:
5130 /* Do a normal conversion between unconstrained and constrained array
5131 type, assuming the latter is a constrained version of the former. */
5132 if (TREE_CODE (expr) == INDIRECT_REF
5133 && ecode == ARRAY_TYPE
5134 && TREE_TYPE (etype) == TREE_TYPE (type))
5136 tree ptr_type = build_pointer_type (type);
5137 tree t = build_unary_op (INDIRECT_REF, NULL_TREE,
5138 fold_convert (ptr_type,
5139 TREE_OPERAND (expr, 0)));
5140 TREE_READONLY (t) = TREE_READONLY (expr);
5141 TREE_THIS_NOTRAP (t) = TREE_THIS_NOTRAP (expr);
5142 return t;
5145 /* In these cases, assume the front-end has validated the conversion.
5146 If the conversion is valid, it will be a bit-wise conversion, so
5147 it can be viewed as an unchecked conversion. */
5148 return unchecked_convert (type, expr, false);
5150 case UNION_TYPE:
5151 /* This is a either a conversion between a tagged type and some
5152 subtype, which we have to mark as a UNION_TYPE because of
5153 overlapping fields or a conversion of an Unchecked_Union. */
5154 return unchecked_convert (type, expr, false);
5156 case UNCONSTRAINED_ARRAY_TYPE:
5157 /* If the input is a VECTOR_TYPE, convert to the representative
5158 array type first. */
5159 if (ecode == VECTOR_TYPE)
5161 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
5162 etype = TREE_TYPE (expr);
5163 ecode = TREE_CODE (etype);
5166 /* If EXPR is a constrained array, take its address, convert it to a
5167 fat pointer, and then dereference it. Likewise if EXPR is a
5168 record containing both a template and a constrained array.
5169 Note that a record representing a justified modular type
5170 always represents a packed constrained array. */
5171 if (ecode == ARRAY_TYPE
5172 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
5173 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
5174 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
5175 return
5176 build_unary_op
5177 (INDIRECT_REF, NULL_TREE,
5178 convert_to_fat_pointer (TREE_TYPE (type),
5179 build_unary_op (ADDR_EXPR,
5180 NULL_TREE, expr)));
5182 /* Do something very similar for converting one unconstrained
5183 array to another. */
5184 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
5185 return
5186 build_unary_op (INDIRECT_REF, NULL_TREE,
5187 convert (TREE_TYPE (type),
5188 build_unary_op (ADDR_EXPR,
5189 NULL_TREE, expr)));
5190 else
5191 gcc_unreachable ();
5193 case COMPLEX_TYPE:
5194 return fold (convert_to_complex (type, expr));
5196 default:
5197 gcc_unreachable ();
5201 /* Create an expression whose value is that of EXPR converted to the common
5202 index type, which is sizetype. EXPR is supposed to be in the base type
5203 of the GNAT index type. Calling it is equivalent to doing
5205 convert (sizetype, expr)
5207 but we try to distribute the type conversion with the knowledge that EXPR
5208 cannot overflow in its type. This is a best-effort approach and we fall
5209 back to the above expression as soon as difficulties are encountered.
5211 This is necessary to overcome issues that arise when the GNAT base index
5212 type and the GCC common index type (sizetype) don't have the same size,
5213 which is quite frequent on 64-bit architectures. In this case, and if
5214 the GNAT base index type is signed but the iteration type of the loop has
5215 been forced to unsigned, the loop scalar evolution engine cannot compute
5216 a simple evolution for the general induction variables associated with the
5217 array indices, because it will preserve the wrap-around semantics in the
5218 unsigned type of their "inner" part. As a result, many loop optimizations
5219 are blocked.
5221 The solution is to use a special (basic) induction variable that is at
5222 least as large as sizetype, and to express the aforementioned general
5223 induction variables in terms of this induction variable, eliminating
5224 the problematic intermediate truncation to the GNAT base index type.
5225 This is possible as long as the original expression doesn't overflow
5226 and if the middle-end hasn't introduced artificial overflows in the
5227 course of the various simplification it can make to the expression. */
5229 tree
5230 convert_to_index_type (tree expr)
5232 enum tree_code code = TREE_CODE (expr);
5233 tree type = TREE_TYPE (expr);
5235 /* If the type is unsigned, overflow is allowed so we cannot be sure that
5236 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
5237 if (TYPE_UNSIGNED (type) || !optimize || optimize_debug)
5238 return convert (sizetype, expr);
5240 switch (code)
5242 case VAR_DECL:
5243 /* The main effect of the function: replace a loop parameter with its
5244 associated special induction variable. */
5245 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
5246 expr = DECL_INDUCTION_VAR (expr);
5247 break;
5249 CASE_CONVERT:
5251 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
5252 /* Bail out as soon as we suspect some sort of type frobbing. */
5253 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
5254 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
5255 break;
5258 /* ... fall through ... */
5260 case NON_LVALUE_EXPR:
5261 return fold_build1 (code, sizetype,
5262 convert_to_index_type (TREE_OPERAND (expr, 0)));
5264 case PLUS_EXPR:
5265 case MINUS_EXPR:
5266 case MULT_EXPR:
5267 return fold_build2 (code, sizetype,
5268 convert_to_index_type (TREE_OPERAND (expr, 0)),
5269 convert_to_index_type (TREE_OPERAND (expr, 1)));
5271 case COMPOUND_EXPR:
5272 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
5273 convert_to_index_type (TREE_OPERAND (expr, 1)));
5275 case COND_EXPR:
5276 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
5277 convert_to_index_type (TREE_OPERAND (expr, 1)),
5278 convert_to_index_type (TREE_OPERAND (expr, 2)));
5280 default:
5281 break;
5284 return convert (sizetype, expr);
5287 /* Remove all conversions that are done in EXP. This includes converting
5288 from a padded type or to a justified modular type. If TRUE_ADDRESS
5289 is true, always return the address of the containing object even if
5290 the address is not bit-aligned. */
5292 tree
5293 remove_conversions (tree exp, bool true_address)
5295 switch (TREE_CODE (exp))
5297 case CONSTRUCTOR:
5298 if (true_address
5299 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
5300 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
5301 return
5302 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
5303 break;
5305 case COMPONENT_REF:
5306 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
5307 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5308 break;
5310 CASE_CONVERT:
5311 case VIEW_CONVERT_EXPR:
5312 case NON_LVALUE_EXPR:
5313 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5315 default:
5316 break;
5319 return exp;
5322 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
5323 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
5324 likewise return an expression pointing to the underlying array. */
5326 tree
5327 maybe_unconstrained_array (tree exp)
5329 enum tree_code code = TREE_CODE (exp);
5330 tree type = TREE_TYPE (exp);
5332 switch (TREE_CODE (type))
5334 case UNCONSTRAINED_ARRAY_TYPE:
5335 if (code == UNCONSTRAINED_ARRAY_REF)
5337 const bool read_only = TREE_READONLY (exp);
5338 const bool no_trap = TREE_THIS_NOTRAP (exp);
5340 exp = TREE_OPERAND (exp, 0);
5341 type = TREE_TYPE (exp);
5343 if (TREE_CODE (exp) == COND_EXPR)
5345 tree op1
5346 = build_unary_op (INDIRECT_REF, NULL_TREE,
5347 build_component_ref (TREE_OPERAND (exp, 1),
5348 TYPE_FIELDS (type),
5349 false));
5350 tree op2
5351 = build_unary_op (INDIRECT_REF, NULL_TREE,
5352 build_component_ref (TREE_OPERAND (exp, 2),
5353 TYPE_FIELDS (type),
5354 false));
5356 exp = build3 (COND_EXPR,
5357 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
5358 TREE_OPERAND (exp, 0), op1, op2);
5360 else
5362 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
5363 build_component_ref (exp,
5364 TYPE_FIELDS (type),
5365 false));
5366 TREE_READONLY (exp) = read_only;
5367 TREE_THIS_NOTRAP (exp) = no_trap;
5371 else if (code == LOAD_EXPR)
5373 const Entity_Id gnat_smo = tree_to_shwi (TREE_OPERAND (exp, 1));
5374 tree t = maybe_unconstrained_array (TREE_OPERAND (exp, 0));
5375 exp = build_storage_model_load (gnat_smo, t);
5378 else if (code == NULL_EXPR)
5379 exp = build1 (NULL_EXPR,
5380 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5381 TREE_OPERAND (exp, 0));
5382 break;
5384 case RECORD_TYPE:
5385 /* If this is a padded type and it contains a template, convert to the
5386 unpadded type first. */
5387 if (TYPE_PADDING_P (type)
5388 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5389 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
5391 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5392 code = TREE_CODE (exp);
5393 type = TREE_TYPE (exp);
5396 if (TYPE_CONTAINS_TEMPLATE_P (type))
5398 /* If the array initializer is a box, return NULL_TREE. */
5399 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
5400 return NULL_TREE;
5402 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
5403 false);
5405 /* If the array is padded, remove the padding. */
5406 exp = maybe_padded_object (exp);
5408 break;
5410 default:
5411 break;
5414 return exp;
5417 /* Return true if EXPR is an expression that can be folded as an operand
5418 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5420 static bool
5421 can_fold_for_view_convert_p (tree expr)
5423 tree t1, t2;
5425 /* The folder will fold NOP_EXPRs between integral types with the same
5426 precision (in the middle-end's sense). We cannot allow it if the
5427 types don't have the same precision in the Ada sense as well. */
5428 if (TREE_CODE (expr) != NOP_EXPR)
5429 return true;
5431 t1 = TREE_TYPE (expr);
5432 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5434 /* Defer to the folder for non-integral conversions. */
5435 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5436 return true;
5438 /* Only fold conversions that preserve both precisions. */
5439 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5440 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5441 return true;
5443 return false;
5446 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5447 If NOTRUNC_P is true, truncation operations should be suppressed.
5449 Special care is required with (source or target) integral types whose
5450 precision is not equal to their size, to make sure we fetch or assign
5451 the value bits whose location might depend on the endianness, e.g.
5453 Rmsize : constant := 8;
5454 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5456 type Bit_Array is array (1 .. Rmsize) of Boolean;
5457 pragma Pack (Bit_Array);
5459 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5461 Value : Int := 2#1000_0001#;
5462 Vbits : Bit_Array := To_Bit_Array (Value);
5464 we expect the 8 bits at Vbits'Address to always contain Value, while
5465 their original location depends on the endianness, at Value'Address
5466 on a little-endian architecture but not on a big-endian one.
5468 One pitfall is that we cannot use TYPE_UNSIGNED directly to decide how
5469 the bits between the precision and the size are filled, because of the
5470 trick used in the E_Signed_Integer_Subtype case of gnat_to_gnu_entity.
5471 So we use the special predicate type_unsigned_for_rm above. */
5473 tree
5474 unchecked_convert (tree type, tree expr, bool notrunc_p)
5476 tree etype = TREE_TYPE (expr);
5477 enum tree_code ecode = TREE_CODE (etype);
5478 enum tree_code code = TREE_CODE (type);
5479 const bool ebiased
5480 = (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype));
5481 const bool biased
5482 = (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type));
5483 const bool ereverse
5484 = (AGGREGATE_TYPE_P (etype) && TYPE_REVERSE_STORAGE_ORDER (etype));
5485 const bool reverse
5486 = (AGGREGATE_TYPE_P (type) && TYPE_REVERSE_STORAGE_ORDER (type));
5487 tree tem;
5488 int c = 0;
5490 /* If the expression is already of the right type, we are done. */
5491 if (etype == type)
5492 return expr;
5494 /* If both types are integral or regular pointer, then just do a normal
5495 conversion. Likewise for a conversion to an unconstrained array. */
5496 if (((INTEGRAL_TYPE_P (type)
5497 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5498 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5499 && (INTEGRAL_TYPE_P (etype)
5500 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5501 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5502 || code == UNCONSTRAINED_ARRAY_TYPE)
5504 if (ebiased)
5506 tree ntype = copy_type (etype);
5507 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5508 TYPE_MAIN_VARIANT (ntype) = ntype;
5509 expr = build1 (NOP_EXPR, ntype, expr);
5512 if (biased)
5514 tree rtype = copy_type (type);
5515 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5516 TYPE_MAIN_VARIANT (rtype) = rtype;
5517 expr = convert (rtype, expr);
5518 expr = build1 (NOP_EXPR, type, expr);
5520 else
5521 expr = convert (type, expr);
5524 /* If we are converting to an integral type whose precision is not equal
5525 to its size, first unchecked convert to a record type that contains a
5526 field of the given precision. Then extract the result from the field.
5528 There is a subtlety if the source type is an aggregate type with reverse
5529 storage order because its representation is not contiguous in the native
5530 storage order, i.e. a direct unchecked conversion to an integral type
5531 with N bits of precision cannot read the first N bits of the aggregate
5532 type. To overcome it, we do an unchecked conversion to an integral type
5533 with reverse storage order and return the resulting value. This also
5534 ensures that the result of the unchecked conversion doesn't depend on
5535 the endianness of the target machine, but only on the storage order of
5536 the aggregate type.
5538 Finally, for the sake of consistency, we do the unchecked conversion
5539 to an integral type with reverse storage order as soon as the source
5540 type is an aggregate type with reverse storage order, even if there
5541 are no considerations of precision or size involved. Ultimately, we
5542 further extend this processing to any scalar type. */
5543 else if ((INTEGRAL_TYPE_P (type)
5544 && TYPE_RM_SIZE (type)
5545 && ((c = tree_int_cst_compare (TYPE_RM_SIZE (type),
5546 TYPE_SIZE (type))) < 0
5547 || ereverse))
5548 || (SCALAR_FLOAT_TYPE_P (type) && ereverse))
5550 tree rec_type = make_node (RECORD_TYPE);
5551 tree field_type, field;
5553 TYPE_REVERSE_STORAGE_ORDER (rec_type) = ereverse;
5555 if (c < 0)
5557 const unsigned HOST_WIDE_INT prec
5558 = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5559 if (type_unsigned_for_rm (type))
5560 field_type = make_unsigned_type (prec);
5561 else
5562 field_type = make_signed_type (prec);
5563 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5565 else
5566 field_type = type;
5568 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5569 NULL_TREE, bitsize_zero_node, c < 0, 0);
5571 finish_record_type (rec_type, field, 1, false);
5573 expr = unchecked_convert (rec_type, expr, notrunc_p);
5574 expr = build_component_ref (expr, field, false);
5575 expr = fold_build1 (NOP_EXPR, type, expr);
5578 /* Similarly if we are converting from an integral type whose precision is
5579 not equal to its size, first copy into a field of the given precision
5580 and unchecked convert the record type.
5582 The same considerations as above apply if the target type is an aggregate
5583 type with reverse storage order and we also proceed similarly. */
5584 else if ((INTEGRAL_TYPE_P (etype)
5585 && TYPE_RM_SIZE (etype)
5586 && ((c = tree_int_cst_compare (TYPE_RM_SIZE (etype),
5587 TYPE_SIZE (etype))) < 0
5588 || reverse))
5589 || (SCALAR_FLOAT_TYPE_P (etype) && reverse))
5591 tree rec_type = make_node (RECORD_TYPE);
5592 vec<constructor_elt, va_gc> *v;
5593 vec_alloc (v, 1);
5594 tree field_type, field;
5596 TYPE_REVERSE_STORAGE_ORDER (rec_type) = reverse;
5598 if (c < 0)
5600 const unsigned HOST_WIDE_INT prec
5601 = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5602 if (type_unsigned_for_rm (etype))
5603 field_type = make_unsigned_type (prec);
5604 else
5605 field_type = make_signed_type (prec);
5606 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5608 else
5609 field_type = etype;
5611 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5612 NULL_TREE, bitsize_zero_node, c < 0, 0);
5614 finish_record_type (rec_type, field, 1, false);
5616 expr = fold_build1 (NOP_EXPR, field_type, expr);
5617 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5618 expr = gnat_build_constructor (rec_type, v);
5619 expr = unchecked_convert (type, expr, notrunc_p);
5622 /* If we are converting between fixed-size types with different sizes, we
5623 need to pad to have the same size on both sides.
5625 ??? We cannot do it unconditionally because unchecked conversions are
5626 used liberally by the front-end to implement interface thunks:
5628 type ada__tags__addr_ptr is access system.address;
5629 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5630 return p___size__4 (p__object!(S191s.all));
5632 so we need to skip dereferences. */
5633 else if (!INDIRECT_REF_P (expr)
5634 && TREE_CODE (expr) != STRING_CST
5635 && !(AGGREGATE_TYPE_P (etype) && AGGREGATE_TYPE_P (type))
5636 && ecode != UNCONSTRAINED_ARRAY_TYPE
5637 && TREE_CONSTANT (TYPE_SIZE (etype))
5638 && TREE_CONSTANT (TYPE_SIZE (type))
5639 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5641 if (c < 0)
5643 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5644 false, false, true),
5645 expr);
5646 expr = unchecked_convert (type, expr, notrunc_p);
5648 else
5650 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5651 false, false, true);
5652 expr = unchecked_convert (rec_type, expr, notrunc_p);
5653 expr = build3 (COMPONENT_REF, type, expr, TYPE_FIELDS (rec_type),
5654 NULL_TREE);
5658 /* Likewise if we are converting from a fixed-size type to a type with self-
5659 referential size. We use the max size to do the padding in this case. */
5660 else if (!INDIRECT_REF_P (expr)
5661 && TREE_CODE (expr) != STRING_CST
5662 && !(AGGREGATE_TYPE_P (etype) && AGGREGATE_TYPE_P (type))
5663 && ecode != UNCONSTRAINED_ARRAY_TYPE
5664 && TREE_CONSTANT (TYPE_SIZE (etype))
5665 && CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type)))
5667 tree new_size = max_size (TYPE_SIZE (type), true);
5668 c = tree_int_cst_compare (TYPE_SIZE (etype), new_size);
5669 if (c < 0)
5671 expr = convert (maybe_pad_type (etype, new_size, 0, Empty,
5672 false, false, true),
5673 expr);
5674 expr = unchecked_convert (type, expr, notrunc_p);
5676 else
5678 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5679 false, false, true);
5680 expr = unchecked_convert (rec_type, expr, notrunc_p);
5681 expr = build3 (COMPONENT_REF, type, expr, TYPE_FIELDS (rec_type),
5682 NULL_TREE);
5686 /* We have a special case when we are converting between two unconstrained
5687 array types. In that case, take the address, convert the fat pointer
5688 types, and dereference. */
5689 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5690 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5691 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5692 build_unary_op (ADDR_EXPR, NULL_TREE,
5693 expr)));
5695 /* Another special case is when we are converting to a vector type from its
5696 representative array type; this a regular conversion. */
5697 else if (code == VECTOR_TYPE
5698 && ecode == ARRAY_TYPE
5699 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5700 etype))
5701 expr = convert (type, expr);
5703 /* And, if the array type is not the representative, we try to build an
5704 intermediate vector type of which the array type is the representative
5705 and to do the unchecked conversion between the vector types, in order
5706 to enable further simplifications in the middle-end. */
5707 else if (code == VECTOR_TYPE
5708 && ecode == ARRAY_TYPE
5709 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5711 expr = convert (tem, expr);
5712 return unchecked_convert (type, expr, notrunc_p);
5715 /* If we are converting a CONSTRUCTOR to a more aligned aggregate type, bump
5716 the alignment of the CONSTRUCTOR to speed up the copy operation. But do
5717 not do it for a conversion between original and packable version to avoid
5718 an infinite recursion. */
5719 else if (TREE_CODE (expr) == CONSTRUCTOR
5720 && AGGREGATE_TYPE_P (type)
5721 && TYPE_NAME (type) != TYPE_NAME (etype)
5722 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5724 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5725 Empty, false, false, true),
5726 expr);
5727 return unchecked_convert (type, expr, notrunc_p);
5730 /* If we are converting a CONSTRUCTOR to a larger aggregate type, bump the
5731 size of the CONSTRUCTOR to make sure there are enough allocated bytes.
5732 But do not do it for a conversion between original and packable version
5733 to avoid an infinite recursion. */
5734 else if (TREE_CODE (expr) == CONSTRUCTOR
5735 && AGGREGATE_TYPE_P (type)
5736 && TYPE_NAME (type) != TYPE_NAME (etype)
5737 && TREE_CONSTANT (TYPE_SIZE (type))
5738 && (!TREE_CONSTANT (TYPE_SIZE (etype))
5739 || tree_int_cst_lt (TYPE_SIZE (etype), TYPE_SIZE (type))))
5741 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0,
5742 Empty, false, false, true),
5743 expr);
5744 return unchecked_convert (type, expr, notrunc_p);
5747 /* If we are converting a string constant to a pointer to character, make
5748 sure that the string is not folded into an integer constant. */
5749 else if (TREE_CODE (expr) == STRING_CST
5750 && POINTER_TYPE_P (type)
5751 && TYPE_STRING_FLAG (TREE_TYPE (type)))
5752 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5754 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5755 else
5757 expr = maybe_unconstrained_array (expr);
5758 etype = TREE_TYPE (expr);
5759 ecode = TREE_CODE (etype);
5760 if (can_fold_for_view_convert_p (expr))
5761 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5762 else
5763 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5766 /* If the result is a non-biased integral type whose precision is not equal
5767 to its size, sign- or zero-extend the result. But we need not do this
5768 if the input is also an integral type and both are unsigned or both are
5769 signed and have the same precision. */
5770 tree type_rm_size;
5771 if (!notrunc_p
5772 && !biased
5773 && INTEGRAL_TYPE_P (type)
5774 && (type_rm_size = TYPE_RM_SIZE (type))
5775 && tree_int_cst_compare (type_rm_size, TYPE_SIZE (type)) < 0
5776 && !(INTEGRAL_TYPE_P (etype)
5777 && type_unsigned_for_rm (type) == type_unsigned_for_rm (etype)
5778 && (type_unsigned_for_rm (type)
5779 || tree_int_cst_compare (type_rm_size,
5780 TYPE_RM_SIZE (etype)
5781 ? TYPE_RM_SIZE (etype)
5782 : TYPE_SIZE (etype)) == 0)))
5784 if (integer_zerop (type_rm_size))
5785 expr = build_int_cst (type, 0);
5786 else
5788 tree base_type
5789 = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5790 type_unsigned_for_rm (type));
5791 tree shift_expr
5792 = convert (base_type,
5793 size_binop (MINUS_EXPR,
5794 TYPE_SIZE (type), type_rm_size));
5795 expr
5796 = convert (type,
5797 build_binary_op (RSHIFT_EXPR, base_type,
5798 build_binary_op (LSHIFT_EXPR, base_type,
5799 convert (base_type,
5800 expr),
5801 shift_expr),
5802 shift_expr));
5806 /* An unchecked conversion should never raise Constraint_Error. The code
5807 below assumes that GCC's conversion routines overflow the same way that
5808 the underlying hardware does. This is probably true. In the rare case
5809 when it is false, we can rely on the fact that such conversions are
5810 erroneous anyway. */
5811 if (TREE_CODE (expr) == INTEGER_CST)
5812 TREE_OVERFLOW (expr) = 0;
5814 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5815 show no longer constant. */
5816 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5817 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5818 OEP_ONLY_CONST))
5819 TREE_CONSTANT (expr) = 0;
5821 return expr;
5824 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5825 the latter being a record type as predicated by Is_Record_Type. */
5827 enum tree_code
5828 tree_code_for_record_type (Entity_Id gnat_type)
5830 Node_Id component_list, component;
5832 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5833 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5834 if (!Is_Unchecked_Union (gnat_type))
5835 return RECORD_TYPE;
5837 gnat_type = Implementation_Base_Type (gnat_type);
5838 component_list
5839 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5841 for (component = First_Non_Pragma (Component_Items (component_list));
5842 Present (component);
5843 component = Next_Non_Pragma (component))
5844 if (Ekind (Defining_Entity (component)) == E_Component)
5845 return RECORD_TYPE;
5847 return UNION_TYPE;
5850 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5851 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5852 according to the presence of an alignment clause on the type or, if it
5853 is an array, on the component type. */
5855 bool
5856 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5858 gnat_type = Underlying_Type (gnat_type);
5860 *align_clause = Present (Alignment_Clause (gnat_type));
5862 if (Is_Array_Type (gnat_type))
5864 gnat_type = Underlying_Type (Component_Type (gnat_type));
5865 if (Present (Alignment_Clause (gnat_type)))
5866 *align_clause = true;
5869 if (!Is_Floating_Point_Type (gnat_type))
5870 return false;
5872 if (UI_To_Int (Esize (gnat_type)) != 64)
5873 return false;
5875 return true;
5878 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5879 size is greater or equal to 64 bits, or an array of such a type. Set
5880 ALIGN_CLAUSE according to the presence of an alignment clause on the
5881 type or, if it is an array, on the component type. */
5883 bool
5884 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5886 gnat_type = Underlying_Type (gnat_type);
5888 *align_clause = Present (Alignment_Clause (gnat_type));
5890 if (Is_Array_Type (gnat_type))
5892 gnat_type = Underlying_Type (Component_Type (gnat_type));
5893 if (Present (Alignment_Clause (gnat_type)))
5894 *align_clause = true;
5897 if (!Is_Scalar_Type (gnat_type))
5898 return false;
5900 if (UI_To_Int (Esize (gnat_type)) < 64)
5901 return false;
5903 return true;
5906 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5907 component of an aggregate type. */
5909 bool
5910 type_for_nonaliased_component_p (tree gnu_type)
5912 /* If the type is passed by reference, we may have pointers to the
5913 component so it cannot be made non-aliased. */
5914 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5915 return false;
5917 /* We used to say that any component of aggregate type is aliased
5918 because the front-end may take 'Reference of it. The front-end
5919 has been enhanced in the meantime so as to use a renaming instead
5920 in most cases, but the back-end can probably take the address of
5921 such a component too so we go for the conservative stance.
5923 For instance, we might need the address of any array type, even
5924 if normally passed by copy, to construct a fat pointer if the
5925 component is used as an actual for an unconstrained formal.
5927 Likewise for record types: even if a specific record subtype is
5928 passed by copy, the parent type might be passed by ref (e.g. if
5929 it's of variable size) and we might take the address of a child
5930 component to pass to a parent formal. We have no way to check
5931 for such conditions here. */
5932 if (AGGREGATE_TYPE_P (gnu_type))
5933 return false;
5935 return true;
5938 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5940 bool
5941 smaller_form_type_p (tree type, tree orig_type)
5943 tree size, osize;
5945 /* We're not interested in variants here. */
5946 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5947 return false;
5949 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5950 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5951 return false;
5953 size = TYPE_SIZE (type);
5954 osize = TYPE_SIZE (orig_type);
5956 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5957 return false;
5959 return tree_int_cst_lt (size, osize) != 0;
5962 /* Return whether EXPR, which is the renamed object in an object renaming
5963 declaration, can be materialized as a reference (with a REFERENCE_TYPE).
5964 This should be synchronized with Exp_Dbug.Debug_Renaming_Declaration. */
5966 bool
5967 can_materialize_object_renaming_p (Node_Id expr)
5969 while (true)
5971 expr = Original_Node (expr);
5973 switch (Nkind (expr))
5975 case N_Identifier:
5976 case N_Expanded_Name:
5977 if (!Present (Renamed_Object (Entity (expr))))
5978 return true;
5979 expr = Renamed_Object (Entity (expr));
5980 break;
5982 case N_Selected_Component:
5984 if (Is_Packed (Underlying_Type (Etype (Prefix (expr)))))
5985 return false;
5987 const Uint bitpos
5988 = Normalized_First_Bit (Entity (Selector_Name (expr)));
5989 if (bitpos != UI_No_Uint && bitpos != Uint_0)
5990 return false;
5992 expr = Prefix (expr);
5993 break;
5996 case N_Indexed_Component:
5997 case N_Slice:
5999 const Entity_Id t = Underlying_Type (Etype (Prefix (expr)));
6001 if (Is_Array_Type (t) && Present (Packed_Array_Impl_Type (t)))
6002 return false;
6004 expr = Prefix (expr);
6005 break;
6008 case N_Explicit_Dereference:
6009 expr = Prefix (expr);
6010 break;
6012 default:
6013 return true;
6018 /* Perform final processing on global declarations. */
6020 static GTY (()) tree dummy_global;
6022 void
6023 gnat_write_global_declarations (void)
6025 unsigned int i;
6026 tree iter;
6028 /* If we have declared types as used at the global level, insert them in
6029 the global hash table. We use a dummy variable for this purpose, but
6030 we need to build it unconditionally to avoid -fcompare-debug issues. */
6031 if (first_global_object_name)
6033 struct varpool_node *node;
6034 char *label;
6036 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, ULONG_MAX);
6037 dummy_global
6038 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
6039 void_type_node);
6040 DECL_HARD_REGISTER (dummy_global) = 1;
6041 TREE_STATIC (dummy_global) = 1;
6042 node = varpool_node::get_create (dummy_global);
6043 node->definition = 1;
6044 node->force_output = 1;
6046 if (types_used_by_cur_var_decl)
6047 while (!types_used_by_cur_var_decl->is_empty ())
6049 tree t = types_used_by_cur_var_decl->pop ();
6050 types_used_by_var_decl_insert (t, dummy_global);
6054 /* First output the integral global variables, so that they can be referenced
6055 as bounds by the global dynamic types. Skip external variables, unless we
6056 really need to emit debug info for them:, e.g. imported variables. */
6057 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
6058 if (TREE_CODE (iter) == VAR_DECL
6059 && INTEGRAL_TYPE_P (TREE_TYPE (iter))
6060 && (!DECL_EXTERNAL (iter) || !DECL_IGNORED_P (iter)))
6061 rest_of_decl_compilation (iter, true, 0);
6063 /* Now output debug information for the global type declarations. This
6064 ensures that global types whose compilation hasn't been finalized yet,
6065 for example pointers to Taft amendment types, have their compilation
6066 finalized in the right context. */
6067 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
6068 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
6069 debug_hooks->type_decl (iter, false);
6071 /* Then output the other global variables. We need to do that after the
6072 information for global types is emitted so that they are finalized. */
6073 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
6074 if (TREE_CODE (iter) == VAR_DECL
6075 && !INTEGRAL_TYPE_P (TREE_TYPE (iter))
6076 && (!DECL_EXTERNAL (iter) || !DECL_IGNORED_P (iter)))
6077 rest_of_decl_compilation (iter, true, 0);
6079 /* Output debug information for the global constants. */
6080 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
6081 if (TREE_CODE (iter) == CONST_DECL && !DECL_IGNORED_P (iter))
6082 debug_hooks->early_global_decl (iter);
6084 /* Output it for the imported functions. */
6085 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
6086 if (TREE_CODE (iter) == FUNCTION_DECL
6087 && DECL_EXTERNAL (iter)
6088 && DECL_INITIAL (iter) == NULL
6089 && !DECL_IGNORED_P (iter)
6090 && DECL_FUNCTION_IS_DEF (iter))
6091 debug_hooks->early_global_decl (iter);
6093 /* Output it for the imported modules/declarations. In GNAT, these are only
6094 materializing subprogram. */
6095 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
6096 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
6097 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
6098 DECL_CONTEXT (iter), false, false);
6101 /* ************************************************************************
6102 * * GCC builtins support *
6103 * ************************************************************************ */
6105 /* The general scheme is fairly simple:
6107 For each builtin function/type to be declared, gnat_install_builtins calls
6108 internal facilities which eventually get to gnat_pushdecl, which in turn
6109 tracks the so declared builtin function decls in the 'builtin_decls' global
6110 datastructure. When an Intrinsic subprogram declaration is processed, we
6111 search this global datastructure to retrieve the associated BUILT_IN DECL
6112 node. */
6114 /* Search the chain of currently available builtin declarations for a node
6115 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
6116 found, if any, or NULL_TREE otherwise. */
6117 tree
6118 builtin_decl_for (tree name)
6120 unsigned i;
6121 tree decl;
6123 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
6124 if (DECL_NAME (decl) == name)
6125 return decl;
6127 return NULL_TREE;
6130 /* The code below eventually exposes gnat_install_builtins, which declares
6131 the builtin types and functions we might need, either internally or as
6132 user accessible facilities.
6134 ??? This is a first implementation shot, still in rough shape. It is
6135 heavily inspired from the "C" family implementation, with chunks copied
6136 verbatim from there.
6138 Two obvious improvement candidates are:
6139 o Use a more efficient name/decl mapping scheme
6140 o Devise a middle-end infrastructure to avoid having to copy
6141 pieces between front-ends. */
6143 /* ----------------------------------------------------------------------- *
6144 * BUILTIN ELEMENTARY TYPES *
6145 * ----------------------------------------------------------------------- */
6147 /* Standard data types to be used in builtin argument declarations. */
6149 enum c_tree_index
6151 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
6152 CTI_STRING_TYPE,
6153 CTI_CONST_STRING_TYPE,
6155 CTI_MAX
6158 static tree c_global_trees[CTI_MAX];
6160 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
6161 #define string_type_node c_global_trees[CTI_STRING_TYPE]
6162 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
6164 /* ??? In addition some attribute handlers, we currently don't support a
6165 (small) number of builtin-types, which in turns inhibits support for a
6166 number of builtin functions. */
6167 #define wint_type_node void_type_node
6168 #define intmax_type_node void_type_node
6169 #define uintmax_type_node void_type_node
6171 /* Used to help initialize the builtin-types.def table. When a type of
6172 the correct size doesn't exist, use error_mark_node instead of NULL.
6173 The later results in segfaults even when a decl using the type doesn't
6174 get invoked. */
6176 static tree
6177 builtin_type_for_size (int size, bool unsignedp)
6179 tree type = gnat_type_for_size (size, unsignedp);
6180 return type ? type : error_mark_node;
6183 /* Build/push the elementary type decls that builtin functions/types
6184 will need. */
6186 static void
6187 install_builtin_elementary_types (void)
6189 signed_size_type_node = gnat_signed_type_for (size_type_node);
6190 pid_type_node = integer_type_node;
6192 string_type_node = build_pointer_type (char_type_node);
6193 const_string_type_node
6194 = build_pointer_type (build_qualified_type
6195 (char_type_node, TYPE_QUAL_CONST));
6198 /* ----------------------------------------------------------------------- *
6199 * BUILTIN FUNCTION TYPES *
6200 * ----------------------------------------------------------------------- */
6202 /* Now, builtin function types per se. */
6204 enum c_builtin_type
6206 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
6207 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
6208 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
6209 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
6210 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
6211 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
6212 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
6213 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6214 ARG6) NAME,
6215 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6216 ARG6, ARG7) NAME,
6217 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6218 ARG6, ARG7, ARG8) NAME,
6219 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6220 ARG6, ARG7, ARG8, ARG9) NAME,
6221 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6222 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
6223 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6224 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
6225 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
6226 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
6227 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
6228 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
6229 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
6230 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6231 NAME,
6232 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6233 ARG6) NAME,
6234 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6235 ARG6, ARG7) NAME,
6236 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
6237 #include "builtin-types.def"
6238 #include "ada-builtin-types.def"
6239 #undef DEF_PRIMITIVE_TYPE
6240 #undef DEF_FUNCTION_TYPE_0
6241 #undef DEF_FUNCTION_TYPE_1
6242 #undef DEF_FUNCTION_TYPE_2
6243 #undef DEF_FUNCTION_TYPE_3
6244 #undef DEF_FUNCTION_TYPE_4
6245 #undef DEF_FUNCTION_TYPE_5
6246 #undef DEF_FUNCTION_TYPE_6
6247 #undef DEF_FUNCTION_TYPE_7
6248 #undef DEF_FUNCTION_TYPE_8
6249 #undef DEF_FUNCTION_TYPE_9
6250 #undef DEF_FUNCTION_TYPE_10
6251 #undef DEF_FUNCTION_TYPE_11
6252 #undef DEF_FUNCTION_TYPE_VAR_0
6253 #undef DEF_FUNCTION_TYPE_VAR_1
6254 #undef DEF_FUNCTION_TYPE_VAR_2
6255 #undef DEF_FUNCTION_TYPE_VAR_3
6256 #undef DEF_FUNCTION_TYPE_VAR_4
6257 #undef DEF_FUNCTION_TYPE_VAR_5
6258 #undef DEF_FUNCTION_TYPE_VAR_6
6259 #undef DEF_FUNCTION_TYPE_VAR_7
6260 #undef DEF_POINTER_TYPE
6261 BT_LAST
6264 typedef enum c_builtin_type builtin_type;
6266 /* A temporary array used in communication with def_fn_type. */
6267 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
6269 /* A helper function for install_builtin_types. Build function type
6270 for DEF with return type RET and N arguments. If VAR is true, then the
6271 function should be variadic after those N arguments.
6273 Takes special care not to ICE if any of the types involved are
6274 error_mark_node, which indicates that said type is not in fact available
6275 (see builtin_type_for_size). In which case the function type as a whole
6276 should be error_mark_node. */
6278 static void
6279 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
6281 tree t;
6282 tree *args = XALLOCAVEC (tree, n);
6283 va_list list;
6284 int i;
6286 va_start (list, n);
6287 for (i = 0; i < n; ++i)
6289 builtin_type a = (builtin_type) va_arg (list, int);
6290 t = builtin_types[a];
6291 if (t == error_mark_node)
6292 goto egress;
6293 args[i] = t;
6296 t = builtin_types[ret];
6297 if (t == error_mark_node)
6298 goto egress;
6299 if (var)
6300 t = build_varargs_function_type_array (t, n, args);
6301 else
6302 t = build_function_type_array (t, n, args);
6304 egress:
6305 builtin_types[def] = t;
6306 va_end (list);
6309 /* Build the builtin function types and install them in the builtin_types
6310 array for later use in builtin function decls. */
6312 static void
6313 install_builtin_function_types (void)
6315 tree va_list_ref_type_node;
6316 tree va_list_arg_type_node;
6318 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
6320 va_list_arg_type_node = va_list_ref_type_node =
6321 build_pointer_type (TREE_TYPE (va_list_type_node));
6323 else
6325 va_list_arg_type_node = va_list_type_node;
6326 va_list_ref_type_node = build_reference_type (va_list_type_node);
6329 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
6330 builtin_types[ENUM] = VALUE;
6331 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
6332 def_fn_type (ENUM, RETURN, 0, 0);
6333 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
6334 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
6335 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
6336 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
6337 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6338 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
6339 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6340 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
6341 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6342 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6343 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6344 ARG6) \
6345 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
6346 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6347 ARG6, ARG7) \
6348 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
6349 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6350 ARG6, ARG7, ARG8) \
6351 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
6352 ARG7, ARG8);
6353 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6354 ARG6, ARG7, ARG8, ARG9) \
6355 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
6356 ARG7, ARG8, ARG9);
6357 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
6358 ARG6, ARG7, ARG8, ARG9, ARG10) \
6359 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
6360 ARG7, ARG8, ARG9, ARG10);
6361 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
6362 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
6363 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
6364 ARG7, ARG8, ARG9, ARG10, ARG11);
6365 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
6366 def_fn_type (ENUM, RETURN, 1, 0);
6367 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
6368 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
6369 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
6370 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
6371 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6372 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
6373 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6374 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
6375 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6376 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6377 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6378 ARG6) \
6379 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
6380 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6381 ARG6, ARG7) \
6382 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
6383 #define DEF_POINTER_TYPE(ENUM, TYPE) \
6384 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
6386 #include "builtin-types.def"
6387 #include "ada-builtin-types.def"
6389 #undef DEF_PRIMITIVE_TYPE
6390 #undef DEF_FUNCTION_TYPE_0
6391 #undef DEF_FUNCTION_TYPE_1
6392 #undef DEF_FUNCTION_TYPE_2
6393 #undef DEF_FUNCTION_TYPE_3
6394 #undef DEF_FUNCTION_TYPE_4
6395 #undef DEF_FUNCTION_TYPE_5
6396 #undef DEF_FUNCTION_TYPE_6
6397 #undef DEF_FUNCTION_TYPE_7
6398 #undef DEF_FUNCTION_TYPE_8
6399 #undef DEF_FUNCTION_TYPE_9
6400 #undef DEF_FUNCTION_TYPE_10
6401 #undef DEF_FUNCTION_TYPE_11
6402 #undef DEF_FUNCTION_TYPE_VAR_0
6403 #undef DEF_FUNCTION_TYPE_VAR_1
6404 #undef DEF_FUNCTION_TYPE_VAR_2
6405 #undef DEF_FUNCTION_TYPE_VAR_3
6406 #undef DEF_FUNCTION_TYPE_VAR_4
6407 #undef DEF_FUNCTION_TYPE_VAR_5
6408 #undef DEF_FUNCTION_TYPE_VAR_6
6409 #undef DEF_FUNCTION_TYPE_VAR_7
6410 #undef DEF_POINTER_TYPE
6411 builtin_types[(int) BT_LAST] = NULL_TREE;
6414 /* ----------------------------------------------------------------------- *
6415 * BUILTIN ATTRIBUTES *
6416 * ----------------------------------------------------------------------- */
6418 enum built_in_attribute
6420 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
6421 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
6422 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
6423 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
6424 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
6425 #include "builtin-attrs.def"
6426 #undef DEF_ATTR_NULL_TREE
6427 #undef DEF_ATTR_INT
6428 #undef DEF_ATTR_STRING
6429 #undef DEF_ATTR_IDENT
6430 #undef DEF_ATTR_TREE_LIST
6431 ATTR_LAST
6434 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
6436 static void
6437 install_builtin_attributes (void)
6439 /* Fill in the built_in_attributes array. */
6440 #define DEF_ATTR_NULL_TREE(ENUM) \
6441 built_in_attributes[(int) ENUM] = NULL_TREE;
6442 #define DEF_ATTR_INT(ENUM, VALUE) \
6443 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
6444 #define DEF_ATTR_STRING(ENUM, VALUE) \
6445 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
6446 #define DEF_ATTR_IDENT(ENUM, STRING) \
6447 built_in_attributes[(int) ENUM] = get_identifier (STRING);
6448 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
6449 built_in_attributes[(int) ENUM] \
6450 = tree_cons (built_in_attributes[(int) PURPOSE], \
6451 built_in_attributes[(int) VALUE], \
6452 built_in_attributes[(int) CHAIN]);
6453 #include "builtin-attrs.def"
6454 #undef DEF_ATTR_NULL_TREE
6455 #undef DEF_ATTR_INT
6456 #undef DEF_ATTR_STRING
6457 #undef DEF_ATTR_IDENT
6458 #undef DEF_ATTR_TREE_LIST
6461 /* Handle a "const" attribute; arguments as in
6462 struct attribute_spec.handler. */
6464 static tree
6465 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
6466 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6467 bool *no_add_attrs)
6469 if (TREE_CODE (*node) == FUNCTION_DECL)
6470 TREE_READONLY (*node) = 1;
6471 else
6472 *no_add_attrs = true;
6474 return NULL_TREE;
6477 /* Handle a "nothrow" attribute; arguments as in
6478 struct attribute_spec.handler. */
6480 static tree
6481 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
6482 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6483 bool *no_add_attrs)
6485 if (TREE_CODE (*node) == FUNCTION_DECL)
6486 TREE_NOTHROW (*node) = 1;
6487 else
6488 *no_add_attrs = true;
6490 return NULL_TREE;
6493 /* Handle a "expected_throw" attribute; arguments as in
6494 struct attribute_spec.handler. */
6496 static tree
6497 handle_expected_throw_attribute (tree *node, tree ARG_UNUSED (name),
6498 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6499 bool *no_add_attrs)
6501 if (TREE_CODE (*node) == FUNCTION_DECL)
6502 /* No flag to set here. */;
6503 else
6504 *no_add_attrs = true;
6506 return NULL_TREE;
6509 /* Handle a "pure" attribute; arguments as in
6510 struct attribute_spec.handler. */
6512 static tree
6513 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6514 int ARG_UNUSED (flags), bool *no_add_attrs)
6516 if (TREE_CODE (*node) == FUNCTION_DECL)
6517 DECL_PURE_P (*node) = 1;
6518 /* TODO: support types. */
6519 else
6521 warning (OPT_Wattributes, "%qs attribute ignored",
6522 IDENTIFIER_POINTER (name));
6523 *no_add_attrs = true;
6526 return NULL_TREE;
6529 /* Handle a "no vops" attribute; arguments as in
6530 struct attribute_spec.handler. */
6532 static tree
6533 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6534 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6535 bool *ARG_UNUSED (no_add_attrs))
6537 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6538 DECL_IS_NOVOPS (*node) = 1;
6539 return NULL_TREE;
6542 /* Helper for nonnull attribute handling; fetch the operand number
6543 from the attribute argument list. */
6545 static bool
6546 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6548 /* Verify the arg number is a constant. */
6549 if (!tree_fits_uhwi_p (arg_num_expr))
6550 return false;
6552 *valp = TREE_INT_CST_LOW (arg_num_expr);
6553 return true;
6556 /* Handle the "nonnull" attribute. */
6557 static tree
6558 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6559 tree args, int ARG_UNUSED (flags),
6560 bool *no_add_attrs)
6562 tree type = *node;
6563 unsigned HOST_WIDE_INT attr_arg_num;
6565 /* If no arguments are specified, all pointer arguments should be
6566 non-null. Verify a full prototype is given so that the arguments
6567 will have the correct types when we actually check them later.
6568 Avoid diagnosing type-generic built-ins since those have no
6569 prototype. */
6570 if (!args)
6572 if (!prototype_p (type)
6573 && (!TYPE_ATTRIBUTES (type)
6574 || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type))))
6576 error ("%qs attribute without arguments on a non-prototype",
6577 "nonnull");
6578 *no_add_attrs = true;
6580 return NULL_TREE;
6583 /* Argument list specified. Verify that each argument number references
6584 a pointer argument. */
6585 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6587 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6589 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6591 error ("%qs argument has invalid operand number (argument %lu)",
6592 "nonnull", (unsigned long) attr_arg_num);
6593 *no_add_attrs = true;
6594 return NULL_TREE;
6597 if (prototype_p (type))
6599 function_args_iterator iter;
6600 tree argument;
6602 function_args_iter_init (&iter, type);
6603 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6605 argument = function_args_iter_cond (&iter);
6606 if (!argument || ck_num == arg_num)
6607 break;
6610 if (!argument
6611 || TREE_CODE (argument) == VOID_TYPE)
6613 error ("%qs argument with out-of-range operand number "
6614 "(argument %lu, operand %lu)", "nonnull",
6615 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6616 *no_add_attrs = true;
6617 return NULL_TREE;
6620 if (TREE_CODE (argument) != POINTER_TYPE)
6622 error ("%qs argument references non-pointer operand "
6623 "(argument %lu, operand %lu)", "nonnull",
6624 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6625 *no_add_attrs = true;
6626 return NULL_TREE;
6631 return NULL_TREE;
6634 /* Handle a "sentinel" attribute. */
6636 static tree
6637 handle_sentinel_attribute (tree *node, tree name, tree args,
6638 int ARG_UNUSED (flags), bool *no_add_attrs)
6640 if (!prototype_p (*node))
6642 warning (OPT_Wattributes,
6643 "%qs attribute requires prototypes with named arguments",
6644 IDENTIFIER_POINTER (name));
6645 *no_add_attrs = true;
6647 else
6649 if (!stdarg_p (*node))
6651 warning (OPT_Wattributes,
6652 "%qs attribute only applies to variadic functions",
6653 IDENTIFIER_POINTER (name));
6654 *no_add_attrs = true;
6658 if (args)
6660 tree position = TREE_VALUE (args);
6662 if (TREE_CODE (position) != INTEGER_CST)
6664 warning (0, "requested position is not an integer constant");
6665 *no_add_attrs = true;
6667 else
6669 if (tree_int_cst_lt (position, integer_zero_node))
6671 warning (0, "requested position is less than zero");
6672 *no_add_attrs = true;
6677 return NULL_TREE;
6680 /* Handle a "noreturn" attribute; arguments as in
6681 struct attribute_spec.handler. */
6683 static tree
6684 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6685 int ARG_UNUSED (flags), bool *no_add_attrs)
6687 tree type = TREE_TYPE (*node);
6689 /* See FIXME comment in c_common_attribute_table. */
6690 if (TREE_CODE (*node) == FUNCTION_DECL)
6691 TREE_THIS_VOLATILE (*node) = 1;
6692 else if (TREE_CODE (type) == POINTER_TYPE
6693 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6694 TREE_TYPE (*node)
6695 = build_pointer_type
6696 (change_qualified_type (TREE_TYPE (type), TYPE_QUAL_VOLATILE));
6697 else
6699 warning (OPT_Wattributes, "%qs attribute ignored",
6700 IDENTIFIER_POINTER (name));
6701 *no_add_attrs = true;
6704 return NULL_TREE;
6707 /* Handle a "stack_protect" attribute; arguments as in
6708 struct attribute_spec.handler. */
6710 static tree
6711 handle_stack_protect_attribute (tree *node, tree name, tree, int,
6712 bool *no_add_attrs)
6714 if (TREE_CODE (*node) != FUNCTION_DECL)
6716 warning (OPT_Wattributes, "%qE attribute ignored", name);
6717 *no_add_attrs = true;
6720 return NULL_TREE;
6723 /* Handle a "no_stack_protector" attribute; arguments as in
6724 struct attribute_spec.handler. */
6726 static tree
6727 handle_no_stack_protector_attribute (tree *node, tree name, tree, int,
6728 bool *no_add_attrs)
6730 if (TREE_CODE (*node) != FUNCTION_DECL)
6732 warning (OPT_Wattributes, "%qE attribute ignored", name);
6733 *no_add_attrs = true;
6736 return NULL_TREE;
6739 /* Handle a "strub" attribute; arguments as in
6740 struct attribute_spec.handler. */
6742 static tree
6743 handle_strub_attribute (tree *, tree, tree, int, bool *no_add_attrs)
6745 *no_add_attrs = true;
6746 return NULL_TREE;
6749 /* Handle a "noinline" attribute; arguments as in
6750 struct attribute_spec.handler. */
6752 static tree
6753 handle_noinline_attribute (tree *node, tree name,
6754 tree ARG_UNUSED (args),
6755 int ARG_UNUSED (flags), bool *no_add_attrs)
6757 if (TREE_CODE (*node) == FUNCTION_DECL)
6759 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6761 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6762 "with attribute %qs", name, "always_inline");
6763 *no_add_attrs = true;
6765 else
6766 DECL_UNINLINABLE (*node) = 1;
6768 else
6770 warning (OPT_Wattributes, "%qE attribute ignored", name);
6771 *no_add_attrs = true;
6774 return NULL_TREE;
6777 /* Handle a "noclone" attribute; arguments as in
6778 struct attribute_spec.handler. */
6780 static tree
6781 handle_noclone_attribute (tree *node, tree name,
6782 tree ARG_UNUSED (args),
6783 int ARG_UNUSED (flags), bool *no_add_attrs)
6785 if (TREE_CODE (*node) != FUNCTION_DECL)
6787 warning (OPT_Wattributes, "%qE attribute ignored", name);
6788 *no_add_attrs = true;
6791 return NULL_TREE;
6794 /* Handle a "no_icf" attribute; arguments as in
6795 struct attribute_spec.handler. */
6797 static tree
6798 handle_noicf_attribute (tree *node, tree name,
6799 tree ARG_UNUSED (args),
6800 int ARG_UNUSED (flags), bool *no_add_attrs)
6802 if (TREE_CODE (*node) != FUNCTION_DECL)
6804 warning (OPT_Wattributes, "%qE attribute ignored", name);
6805 *no_add_attrs = true;
6808 return NULL_TREE;
6811 /* Handle a "noipa" attribute; arguments as in
6812 struct attribute_spec.handler. */
6814 static tree
6815 handle_noipa_attribute (tree *node, tree name, tree, int, bool *no_add_attrs)
6817 if (TREE_CODE (*node) != FUNCTION_DECL)
6819 warning (OPT_Wattributes, "%qE attribute ignored", name);
6820 *no_add_attrs = true;
6823 return NULL_TREE;
6826 /* Handle a "leaf" attribute; arguments as in
6827 struct attribute_spec.handler. */
6829 static tree
6830 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6831 int ARG_UNUSED (flags), bool *no_add_attrs)
6833 if (TREE_CODE (*node) != FUNCTION_DECL)
6835 warning (OPT_Wattributes, "%qE attribute ignored", name);
6836 *no_add_attrs = true;
6838 if (!TREE_PUBLIC (*node))
6840 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6841 *no_add_attrs = true;
6844 return NULL_TREE;
6847 /* Handle a "always_inline" attribute; arguments as in
6848 struct attribute_spec.handler. */
6850 static tree
6851 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6852 int ARG_UNUSED (flags), bool *no_add_attrs)
6854 if (TREE_CODE (*node) == FUNCTION_DECL)
6856 /* Set the attribute and mark it for disregarding inline limits. */
6857 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6859 else
6861 warning (OPT_Wattributes, "%qE attribute ignored", name);
6862 *no_add_attrs = true;
6865 return NULL_TREE;
6868 /* Handle a "malloc" attribute; arguments as in
6869 struct attribute_spec.handler. */
6871 static tree
6872 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6873 int ARG_UNUSED (flags), bool *no_add_attrs)
6875 if (TREE_CODE (*node) == FUNCTION_DECL
6876 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6877 DECL_IS_MALLOC (*node) = 1;
6878 else
6880 warning (OPT_Wattributes, "%qs attribute ignored",
6881 IDENTIFIER_POINTER (name));
6882 *no_add_attrs = true;
6885 return NULL_TREE;
6888 /* Fake handler for attributes we don't properly support. */
6890 tree
6891 fake_attribute_handler (tree * ARG_UNUSED (node),
6892 tree ARG_UNUSED (name),
6893 tree ARG_UNUSED (args),
6894 int ARG_UNUSED (flags),
6895 bool * ARG_UNUSED (no_add_attrs))
6897 return NULL_TREE;
6900 /* Handle a "type_generic" attribute. */
6902 static tree
6903 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6904 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6905 bool * ARG_UNUSED (no_add_attrs))
6907 /* Ensure we have a function type. */
6908 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6910 /* Ensure we have a variadic function. */
6911 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6913 return NULL_TREE;
6916 /* Handle a "flatten" attribute; arguments as in
6917 struct attribute_spec.handler. */
6919 static tree
6920 handle_flatten_attribute (tree *node, tree name,
6921 tree args ATTRIBUTE_UNUSED,
6922 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
6924 if (TREE_CODE (*node) == FUNCTION_DECL)
6925 /* Do nothing else, just set the attribute. We'll get at
6926 it later with lookup_attribute. */
6928 else
6930 warning (OPT_Wattributes, "%qE attribute ignored", name);
6931 *no_add_attrs = true;
6934 return NULL_TREE;
6937 /* Handle a "used" attribute; arguments as in
6938 struct attribute_spec.handler. */
6940 static tree
6941 handle_used_attribute (tree *pnode, tree name, tree ARG_UNUSED (args),
6942 int ARG_UNUSED (flags), bool *no_add_attrs)
6944 tree node = *pnode;
6946 if (TREE_CODE (node) == FUNCTION_DECL
6947 || (VAR_P (node) && TREE_STATIC (node))
6948 || (TREE_CODE (node) == TYPE_DECL))
6950 TREE_USED (node) = 1;
6951 DECL_PRESERVE_P (node) = 1;
6952 if (VAR_P (node))
6953 DECL_READ_P (node) = 1;
6955 else
6957 warning (OPT_Wattributes, "%qE attribute ignored", name);
6958 *no_add_attrs = true;
6961 return NULL_TREE;
6964 /* Handle a "cold" and attribute; arguments as in
6965 struct attribute_spec.handler. */
6967 static tree
6968 handle_cold_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6969 int ARG_UNUSED (flags), bool *no_add_attrs)
6971 if (TREE_CODE (*node) == FUNCTION_DECL
6972 || TREE_CODE (*node) == LABEL_DECL)
6974 /* Attribute cold processing is done later with lookup_attribute. */
6976 else
6978 warning (OPT_Wattributes, "%qE attribute ignored", name);
6979 *no_add_attrs = true;
6982 return NULL_TREE;
6985 /* Handle a "hot" and attribute; arguments as in
6986 struct attribute_spec.handler. */
6988 static tree
6989 handle_hot_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6990 int ARG_UNUSED (flags), bool *no_add_attrs)
6992 if (TREE_CODE (*node) == FUNCTION_DECL
6993 || TREE_CODE (*node) == LABEL_DECL)
6995 /* Attribute hot processing is done later with lookup_attribute. */
6997 else
6999 warning (OPT_Wattributes, "%qE attribute ignored", name);
7000 *no_add_attrs = true;
7003 return NULL_TREE;
7006 /* Handle a "simd" attribute. */
7008 static tree
7009 handle_simd_attribute (tree *node, tree name, tree args, int, bool *no_add_attrs)
7011 if (TREE_CODE (*node) == FUNCTION_DECL)
7013 tree t = get_identifier ("omp declare simd");
7014 tree attr = NULL_TREE;
7015 if (args)
7017 tree id = TREE_VALUE (args);
7019 if (TREE_CODE (id) != STRING_CST)
7021 error ("attribute %qE argument not a string", name);
7022 *no_add_attrs = true;
7023 return NULL_TREE;
7026 if (strcmp (TREE_STRING_POINTER (id), "notinbranch") == 0)
7027 attr = build_omp_clause (DECL_SOURCE_LOCATION (*node),
7028 OMP_CLAUSE_NOTINBRANCH);
7029 else if (strcmp (TREE_STRING_POINTER (id), "inbranch") == 0)
7030 attr = build_omp_clause (DECL_SOURCE_LOCATION (*node),
7031 OMP_CLAUSE_INBRANCH);
7032 else
7034 error ("only %<inbranch%> and %<notinbranch%> flags are "
7035 "allowed for %<__simd__%> attribute");
7036 *no_add_attrs = true;
7037 return NULL_TREE;
7041 DECL_ATTRIBUTES (*node)
7042 = tree_cons (t, build_tree_list (NULL_TREE, attr),
7043 DECL_ATTRIBUTES (*node));
7045 else
7047 warning (OPT_Wattributes, "%qE attribute ignored", name);
7048 *no_add_attrs = true;
7051 return NULL_TREE;
7054 /* Handle a "target" attribute. */
7056 static tree
7057 handle_target_attribute (tree *node, tree name, tree args, int flags,
7058 bool *no_add_attrs)
7060 /* Ensure we have a function type. */
7061 if (TREE_CODE (*node) != FUNCTION_DECL)
7063 warning (OPT_Wattributes, "%qE attribute ignored", name);
7064 *no_add_attrs = true;
7066 else if (lookup_attribute ("target_clones", DECL_ATTRIBUTES (*node)))
7068 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
7069 "with %qs attribute", name, "target_clones");
7070 *no_add_attrs = true;
7072 else if (!targetm.target_option.valid_attribute_p (*node, name, args, flags))
7073 *no_add_attrs = true;
7075 /* Check that there's no empty string in values of the attribute. */
7076 for (tree t = args; t != NULL_TREE; t = TREE_CHAIN (t))
7078 tree value = TREE_VALUE (t);
7079 if (TREE_CODE (value) == STRING_CST
7080 && TREE_STRING_LENGTH (value) == 1
7081 && TREE_STRING_POINTER (value)[0] == '\0')
7083 warning (OPT_Wattributes, "empty string in attribute %<target%>");
7084 *no_add_attrs = true;
7088 return NULL_TREE;
7091 /* Handle a "target_clones" attribute. */
7093 static tree
7094 handle_target_clones_attribute (tree *node, tree name, tree ARG_UNUSED (args),
7095 int ARG_UNUSED (flags), bool *no_add_attrs)
7097 /* Ensure we have a function type. */
7098 if (TREE_CODE (*node) == FUNCTION_DECL)
7100 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
7102 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
7103 "with %qs attribute", name, "always_inline");
7104 *no_add_attrs = true;
7106 else if (lookup_attribute ("target", DECL_ATTRIBUTES (*node)))
7108 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
7109 "with %qs attribute", name, "target");
7110 *no_add_attrs = true;
7112 else
7113 /* Do not inline functions with multiple clone targets. */
7114 DECL_UNINLINABLE (*node) = 1;
7116 else
7118 warning (OPT_Wattributes, "%qE attribute ignored", name);
7119 *no_add_attrs = true;
7121 return NULL_TREE;
7124 /* Handle a "vector_size" attribute; arguments as in
7125 struct attribute_spec.handler. */
7127 static tree
7128 handle_vector_size_attribute (tree *node, tree name, tree args,
7129 int ARG_UNUSED (flags), bool *no_add_attrs)
7131 tree type = *node;
7132 tree vector_type;
7134 *no_add_attrs = true;
7136 /* We need to provide for vector pointers, vector arrays, and
7137 functions returning vectors. For example:
7139 __attribute__((vector_size(16))) short *foo;
7141 In this case, the mode is SI, but the type being modified is
7142 HI, so we need to look further. */
7143 while (POINTER_TYPE_P (type)
7144 || TREE_CODE (type) == FUNCTION_TYPE
7145 || TREE_CODE (type) == ARRAY_TYPE)
7146 type = TREE_TYPE (type);
7148 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
7149 if (!vector_type)
7150 return NULL_TREE;
7152 /* Build back pointers if needed. */
7153 *node = reconstruct_complex_type (*node, vector_type);
7155 return NULL_TREE;
7158 /* Handle a "vector_type" attribute; arguments as in
7159 struct attribute_spec.handler. */
7161 static tree
7162 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
7163 int ARG_UNUSED (flags), bool *no_add_attrs)
7165 tree type = *node;
7166 tree vector_type;
7168 *no_add_attrs = true;
7170 if (TREE_CODE (type) != ARRAY_TYPE)
7172 error ("attribute %qs applies to array types only",
7173 IDENTIFIER_POINTER (name));
7174 return NULL_TREE;
7177 vector_type = build_vector_type_for_array (type, name);
7178 if (!vector_type)
7179 return NULL_TREE;
7181 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
7182 *node = vector_type;
7184 return NULL_TREE;
7187 /* Handle a "zero_call_used_regs" attribute; arguments as in
7188 struct attribute_spec.handler. */
7190 static tree
7191 handle_zero_call_used_regs_attribute (tree *node, tree name, tree args,
7192 int ARG_UNUSED (flags),
7193 bool *no_add_attrs)
7195 tree decl = *node;
7196 tree id = TREE_VALUE (args);
7198 if (TREE_CODE (decl) != FUNCTION_DECL)
7200 error_at (DECL_SOURCE_LOCATION (decl),
7201 "%qE attribute applies only to functions", name);
7202 *no_add_attrs = true;
7203 return NULL_TREE;
7206 /* pragma Machine_Attribute turns string arguments into identifiers.
7207 Reverse it. */
7208 if (TREE_CODE (id) == IDENTIFIER_NODE)
7209 id = TREE_VALUE (args) = build_string
7210 (IDENTIFIER_LENGTH (id), IDENTIFIER_POINTER (id));
7212 if (TREE_CODE (id) != STRING_CST)
7214 error_at (DECL_SOURCE_LOCATION (decl),
7215 "%qE argument not a string", name);
7216 *no_add_attrs = true;
7217 return NULL_TREE;
7220 bool found = false;
7221 for (unsigned int i = 0; zero_call_used_regs_opts[i].name != NULL; ++i)
7222 if (strcmp (TREE_STRING_POINTER (id),
7223 zero_call_used_regs_opts[i].name) == 0)
7225 found = true;
7226 break;
7229 if (!found)
7231 error_at (DECL_SOURCE_LOCATION (decl),
7232 "unrecognized %qE attribute argument %qs",
7233 name, TREE_STRING_POINTER (id));
7234 *no_add_attrs = true;
7237 return NULL_TREE;
7240 /* ----------------------------------------------------------------------- *
7241 * BUILTIN FUNCTIONS *
7242 * ----------------------------------------------------------------------- */
7244 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
7245 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
7246 if nonansi_p and flag_no_nonansi_builtin. */
7248 static void
7249 def_builtin_1 (enum built_in_function fncode,
7250 const char *name,
7251 enum built_in_class fnclass,
7252 tree fntype, tree libtype,
7253 bool both_p, bool fallback_p,
7254 bool nonansi_p ATTRIBUTE_UNUSED,
7255 tree fnattrs, bool implicit_p)
7257 tree decl;
7258 const char *libname;
7260 /* Preserve an already installed decl. It most likely was setup in advance
7261 (e.g. as part of the internal builtins) for specific reasons. */
7262 if (builtin_decl_explicit (fncode))
7263 return;
7265 if (fntype == error_mark_node)
7266 return;
7268 gcc_assert ((!both_p && !fallback_p)
7269 || startswith (name, "__builtin_"));
7271 libname = name + strlen ("__builtin_");
7272 decl = add_builtin_function (name, fntype, fncode, fnclass,
7273 (fallback_p ? libname : NULL),
7274 fnattrs);
7275 if (both_p)
7276 /* ??? This is normally further controlled by command-line options
7277 like -fno-builtin, but we don't have them for Ada. */
7278 add_builtin_function (libname, libtype, fncode, fnclass,
7279 NULL, fnattrs);
7281 set_builtin_decl (fncode, decl, implicit_p);
7284 static int flag_isoc94 = 0;
7285 static int flag_isoc99 = 0;
7286 static int flag_isoc11 = 0;
7287 static int flag_isoc23 = 0;
7289 /* Install what the common builtins.def offers plus our local additions.
7291 Note that ada-builtins.def is included first so that locally redefined
7292 built-in functions take precedence over the commonly defined ones. */
7294 static void
7295 install_builtin_functions (void)
7297 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
7298 NONANSI_P, ATTRS, IMPLICIT, COND) \
7299 if (NAME && COND) \
7300 def_builtin_1 (ENUM, NAME, CLASS, \
7301 builtin_types[(int) TYPE], \
7302 builtin_types[(int) LIBTYPE], \
7303 BOTH_P, FALLBACK_P, NONANSI_P, \
7304 built_in_attributes[(int) ATTRS], IMPLICIT);
7305 #define DEF_ADA_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
7306 DEF_BUILTIN (ENUM, "__builtin_" NAME, BUILT_IN_FRONTEND, TYPE, BT_LAST, \
7307 false, false, false, ATTRS, true, true)
7308 #include "ada-builtins.def"
7309 #include "builtins.def"
7312 /* ----------------------------------------------------------------------- *
7313 * BUILTIN FUNCTIONS *
7314 * ----------------------------------------------------------------------- */
7316 /* Install the builtin functions we might need. */
7318 void
7319 gnat_install_builtins (void)
7321 install_builtin_elementary_types ();
7322 install_builtin_function_types ();
7323 install_builtin_attributes ();
7325 /* Install builtins used by generic middle-end pieces first. Some of these
7326 know about internal specificities and control attributes accordingly, for
7327 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
7328 the generic definition from builtins.def. */
7329 build_common_builtin_nodes ();
7331 /* Now, install the target specific builtins, such as the AltiVec family on
7332 ppc, and the common set as exposed by builtins.def. */
7333 targetm.init_builtins ();
7334 install_builtin_functions ();
7337 #include "gt-ada-utils.h"
7338 #include "gtype-ada.h"