2003-12-26 Guilhem Lavaux <guilhem@kaffe.org>
[official-gcc.git] / gcc / ada / utils.c
blob423634ea64d05c92d2fcd1ff283d1019ddba47da
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2003, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
21 * *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 * *
25 ****************************************************************************/
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "tree.h"
32 #include "flags.h"
33 #include "defaults.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "ggc.h"
37 #include "debug.h"
38 #include "convert.h"
40 #include "ada.h"
41 #include "types.h"
42 #include "atree.h"
43 #include "elists.h"
44 #include "namet.h"
45 #include "nlists.h"
46 #include "stringt.h"
47 #include "uintp.h"
48 #include "fe.h"
49 #include "sinfo.h"
50 #include "einfo.h"
51 #include "ada-tree.h"
52 #include "gigi.h"
54 #ifndef MAX_FIXED_MODE_SIZE
55 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
56 #endif
58 #ifndef MAX_BITS_PER_WORD
59 #define MAX_BITS_PER_WORD BITS_PER_WORD
60 #endif
62 /* If nonzero, pretend we are allocating at global level. */
63 int force_global;
65 /* Tree nodes for the various types and decls we create. */
66 tree gnat_std_decls[(int) ADT_LAST];
68 /* Functions to call for each of the possible raise reasons. */
69 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
71 /* Associates a GNAT tree node to a GCC tree node. It is used in
72 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
73 of `save_gnu_tree' for more info. */
74 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
76 /* This listhead is used to record any global objects that need elaboration.
77 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
78 initial value to assign. */
80 static GTY(()) tree pending_elaborations;
82 /* This stack allows us to momentarily switch to generating elaboration
83 lists for an inner context. */
85 struct e_stack GTY(()) {
86 struct e_stack *next;
87 tree elab_list;
89 static GTY(()) struct e_stack *elist_stack;
91 /* This variable keeps a table for types for each precision so that we only
92 allocate each of them once. Signed and unsigned types are kept separate.
94 Note that these types are only used when fold-const requests something
95 special. Perhaps we should NOT share these types; we'll see how it
96 goes later. */
97 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
99 /* Likewise for float types, but record these by mode. */
100 static GTY(()) tree float_types[NUM_MACHINE_MODES];
102 /* For each binding contour we allocate a binding_level structure which records
103 the entities defined or declared in that contour. Contours include:
105 the global one
106 one for each subprogram definition
107 one for each compound statement (declare block)
109 Binding contours are used to create GCC tree BLOCK nodes. */
111 struct binding_level GTY(())
113 /* A chain of ..._DECL nodes for all variables, constants, functions,
114 parameters and type declarations. These ..._DECL nodes are chained
115 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
116 in the reverse of the order supplied to be compatible with the
117 back-end. */
118 tree names;
119 /* For each level (except the global one), a chain of BLOCK nodes for all
120 the levels that were entered and exited one level down from this one. */
121 tree blocks;
122 /* The BLOCK node for this level, if one has been preallocated.
123 If 0, the BLOCK is allocated (if needed) when the level is popped. */
124 tree this_block;
125 /* The binding level containing this one (the enclosing binding level). */
126 struct binding_level *level_chain;
129 /* The binding level currently in effect. */
130 static GTY(()) struct binding_level *current_binding_level;
132 /* A chain of binding_level structures awaiting reuse. */
133 static GTY((deletable (""))) struct binding_level *free_binding_level;
135 /* The outermost binding level. This binding level is created when the
136 compiler is started and it will exist through the entire compilation. */
137 static struct binding_level *global_binding_level;
139 /* Binding level structures are initialized by copying this one. */
140 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
142 struct language_function GTY(())
144 int unused;
147 static tree merge_sizes (tree, tree, tree, int, int);
148 static tree compute_related_constant (tree, tree);
149 static tree split_plus (tree, tree *);
150 static int value_zerop (tree);
151 static tree float_type_for_precision (int, enum machine_mode);
152 static tree convert_to_fat_pointer (tree, tree);
153 static tree convert_to_thin_pointer (tree, tree);
154 static tree make_descriptor_field (const char *,tree, tree, tree);
155 static int value_factor_p (tree, int);
156 static int potential_alignment_gap (tree, tree, tree);
158 /* Initialize the association of GNAT nodes to GCC trees. */
160 void
161 init_gnat_to_gnu (void)
163 associate_gnat_to_gnu
164 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
166 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
169 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
170 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
171 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
173 If GNU_DECL is zero, a previous association is to be reset. */
175 void
176 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, int no_check)
178 /* Check that GNAT_ENTITY is not already defined and that it is being set
179 to something which is a decl. Raise gigi 401 if not. Usually, this
180 means GNAT_ENTITY is defined twice, but occasionally is due to some
181 Gigi problem. */
182 if (gnu_decl
183 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
184 || (! no_check && ! DECL_P (gnu_decl))))
185 gigi_abort (401);
187 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
190 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
191 Return the ..._DECL node that was associated with it. If there is no tree
192 node associated with GNAT_ENTITY, abort.
194 In some cases, such as delayed elaboration or expressions that need to
195 be elaborated only once, GNAT_ENTITY is really not an entity. */
197 tree
198 get_gnu_tree (Entity_Id gnat_entity)
200 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
201 gigi_abort (402);
203 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
206 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
209 present_gnu_tree (Entity_Id gnat_entity)
211 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
215 /* Return non-zero if we are currently in the global binding level. */
218 global_bindings_p (void)
220 return (force_global != 0 || current_binding_level == global_binding_level
221 ? -1 : 0);
224 /* Return the list of declarations in the current level. Note that this list
225 is in reverse order (it has to be so for back-end compatibility). */
227 tree
228 getdecls (void)
230 return current_binding_level->names;
233 /* Nonzero if the current level needs to have a BLOCK made. */
236 kept_level_p (void)
238 return (current_binding_level->names != 0);
241 /* Enter a new binding level. The input parameter is ignored, but has to be
242 specified for back-end compatibility. */
244 void
245 pushlevel (int ignore ATTRIBUTE_UNUSED)
247 struct binding_level *newlevel = NULL;
249 /* Reuse a struct for this binding level, if there is one. */
250 if (free_binding_level)
252 newlevel = free_binding_level;
253 free_binding_level = free_binding_level->level_chain;
255 else
256 newlevel
257 = (struct binding_level *) ggc_alloc (sizeof (struct binding_level));
259 *newlevel = clear_binding_level;
261 /* Add this level to the front of the chain (stack) of levels that are
262 active. */
263 newlevel->level_chain = current_binding_level;
264 current_binding_level = newlevel;
267 /* Exit a binding level.
268 Pop the level off, and restore the state of the identifier-decl mappings
269 that were in effect when this level was entered.
271 If KEEP is nonzero, this level had explicit declarations, so
272 and create a "block" (a BLOCK node) for the level
273 to record its declarations and subblocks for symbol table output.
275 If FUNCTIONBODY is nonzero, this level is the body of a function,
276 so create a block as if KEEP were set and also clear out all
277 label names.
279 If REVERSE is nonzero, reverse the order of decls before putting
280 them into the BLOCK. */
282 tree
283 poplevel (int keep, int reverse, int functionbody)
285 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
286 binding level that we are about to exit and which is returned by this
287 routine. */
288 tree block = NULL_TREE;
289 tree decl_chain;
290 tree decl_node;
291 tree subblock_chain = current_binding_level->blocks;
292 tree subblock_node;
293 int block_previously_created;
295 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
296 nodes chained through the `names' field of current_binding_level are in
297 reverse order except for PARM_DECL node, which are explicitly stored in
298 the right order. */
299 current_binding_level->names
300 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
301 : current_binding_level->names;
303 /* Output any nested inline functions within this block which must be
304 compiled because their address is needed. */
305 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
306 if (TREE_CODE (decl_node) == FUNCTION_DECL
307 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
308 && DECL_INITIAL (decl_node) != 0)
310 push_function_context ();
311 output_inline_function (decl_node);
312 pop_function_context ();
315 block = 0;
316 block_previously_created = (current_binding_level->this_block != 0);
317 if (block_previously_created)
318 block = current_binding_level->this_block;
319 else if (keep || functionbody)
320 block = make_node (BLOCK);
321 if (block != 0)
323 BLOCK_VARS (block) = keep ? decl_chain : 0;
324 BLOCK_SUBBLOCKS (block) = subblock_chain;
327 /* Record the BLOCK node just built as the subblock its enclosing scope. */
328 for (subblock_node = subblock_chain; subblock_node;
329 subblock_node = TREE_CHAIN (subblock_node))
330 BLOCK_SUPERCONTEXT (subblock_node) = block;
332 /* Clear out the meanings of the local variables of this level. */
334 for (subblock_node = decl_chain; subblock_node;
335 subblock_node = TREE_CHAIN (subblock_node))
336 if (DECL_NAME (subblock_node) != 0)
337 /* If the identifier was used or addressed via a local extern decl,
338 don't forget that fact. */
339 if (DECL_EXTERNAL (subblock_node))
341 if (TREE_USED (subblock_node))
342 TREE_USED (DECL_NAME (subblock_node)) = 1;
343 if (TREE_ADDRESSABLE (subblock_node))
344 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
348 /* Pop the current level, and free the structure for reuse. */
349 struct binding_level *level = current_binding_level;
350 current_binding_level = current_binding_level->level_chain;
351 level->level_chain = free_binding_level;
352 free_binding_level = level;
355 if (functionbody)
357 /* This is the top level block of a function. The ..._DECL chain stored
358 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
359 leave them in the BLOCK because they are found in the FUNCTION_DECL
360 instead. */
361 DECL_INITIAL (current_function_decl) = block;
362 BLOCK_VARS (block) = 0;
364 else if (block)
366 if (!block_previously_created)
367 current_binding_level->blocks
368 = chainon (current_binding_level->blocks, block);
371 /* If we did not make a block for the level just exited, any blocks made for
372 inner levels (since they cannot be recorded as subblocks in that level)
373 must be carried forward so they will later become subblocks of something
374 else. */
375 else if (subblock_chain)
376 current_binding_level->blocks
377 = chainon (current_binding_level->blocks, subblock_chain);
378 if (block)
379 TREE_USED (block) = 1;
381 return block;
384 /* Insert BLOCK at the end of the list of subblocks of the
385 current binding level. This is used when a BIND_EXPR is expanded,
386 to handle the BLOCK node inside the BIND_EXPR. */
388 void
389 insert_block (tree block)
391 TREE_USED (block) = 1;
392 current_binding_level->blocks
393 = chainon (current_binding_level->blocks, block);
396 /* Set the BLOCK node for the innermost scope
397 (the one we are currently in). */
399 void
400 set_block (tree block)
402 current_binding_level->this_block = block;
403 current_binding_level->names = chainon (current_binding_level->names,
404 BLOCK_VARS (block));
405 current_binding_level->blocks = chainon (current_binding_level->blocks,
406 BLOCK_SUBBLOCKS (block));
409 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
410 Returns the ..._DECL node. */
412 tree
413 pushdecl (tree decl)
415 struct binding_level *b;
417 /* If at top level, there is no context. But PARM_DECLs always go in the
418 level of its function. */
419 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
421 b = global_binding_level;
422 DECL_CONTEXT (decl) = 0;
424 else
426 b = current_binding_level;
427 DECL_CONTEXT (decl) = current_function_decl;
430 /* Put the declaration on the list. The list of declarations is in reverse
431 order. The list will be reversed later if necessary. This needs to be
432 this way for compatibility with the back-end.
434 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
435 will cause trouble with the debugger and aren't needed anyway. */
436 if (TREE_CODE (decl) != TYPE_DECL
437 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
439 TREE_CHAIN (decl) = b->names;
440 b->names = decl;
443 /* For the declaration of a type, set its name if it either is not already
444 set, was set to an IDENTIFIER_NODE, indicating an internal name,
445 or if the previous type name was not derived from a source name.
446 We'd rather have the type named with a real name and all the pointer
447 types to the same object have the same POINTER_TYPE node. Code in this
448 function in c-decl.c makes a copy of the type node here, but that may
449 cause us trouble with incomplete types, so let's not try it (at least
450 for now). */
452 if (TREE_CODE (decl) == TYPE_DECL
453 && DECL_NAME (decl) != 0
454 && (TYPE_NAME (TREE_TYPE (decl)) == 0
455 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
456 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
457 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
458 && ! DECL_ARTIFICIAL (decl))))
459 TYPE_NAME (TREE_TYPE (decl)) = decl;
461 return decl;
464 /* Do little here. Set up the standard declarations later after the
465 front end has been run. */
467 void
468 gnat_init_decl_processing (void)
470 input_line = 0;
472 /* Make the binding_level structure for global names. */
473 current_function_decl = 0;
474 current_binding_level = 0;
475 free_binding_level = 0;
476 pushlevel (0);
477 global_binding_level = current_binding_level;
479 build_common_tree_nodes (0);
481 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
482 corresponding to the size of Pmode. In most cases when ptr_mode and
483 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
484 far better code using the width of Pmode. Make this here since we need
485 this before we can expand the GNAT types. */
486 set_sizetype (gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0));
487 build_common_tree_nodes_2 (0);
489 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
491 /* We need to make the integer type before doing anything else.
492 We stitch this in to the appropriate GNAT type later. */
493 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
494 integer_type_node));
495 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
496 char_type_node));
498 ptr_void_type_node = build_pointer_type (void_type_node);
502 /* Create the predefined scalar types such as `integer_type_node' needed
503 in the gcc back-end and initialize the global binding level. */
505 void
506 init_gigi_decls (tree long_long_float_type, tree exception_type)
508 tree endlink, decl;
509 unsigned int i;
511 /* Set the types that GCC and Gigi use from the front end. We would like
512 to do this for char_type_node, but it needs to correspond to the C
513 char type. */
514 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
516 /* In this case, the builtin floating point types are VAX float,
517 so make up a type for use. */
518 longest_float_type_node = make_node (REAL_TYPE);
519 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
520 layout_type (longest_float_type_node);
521 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
522 longest_float_type_node));
524 else
525 longest_float_type_node = TREE_TYPE (long_long_float_type);
527 except_type_node = TREE_TYPE (exception_type);
529 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
530 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
531 unsigned_type_node));
533 void_type_decl_node
534 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
535 void_type_node));
537 void_ftype = build_function_type (void_type_node, NULL_TREE);
538 ptr_void_ftype = build_pointer_type (void_ftype);
540 /* Now declare runtime functions. */
541 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
543 /* malloc is a function declaration tree for a function to allocate
544 memory. */
545 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
546 NULL_TREE,
547 build_function_type (ptr_void_type_node,
548 tree_cons (NULL_TREE,
549 sizetype,
550 endlink)),
551 NULL_TREE, 0, 1, 1, 0);
553 /* free is a function declaration tree for a function to free memory. */
554 free_decl
555 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
556 build_function_type (void_type_node,
557 tree_cons (NULL_TREE,
558 ptr_void_type_node,
559 endlink)),
560 NULL_TREE, 0, 1, 1, 0);
562 /* Make the types and functions used for exception processing. */
563 jmpbuf_type
564 = build_array_type (gnat_type_for_mode (Pmode, 0),
565 build_index_type (build_int_2 (5, 0)));
566 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
567 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
569 /* Functions to get and set the jumpbuf pointer for the current thread. */
570 get_jmpbuf_decl
571 = create_subprog_decl
572 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
573 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
574 NULL_TREE, 0, 1, 1, 0);
576 set_jmpbuf_decl
577 = create_subprog_decl
578 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
579 NULL_TREE,
580 build_function_type (void_type_node,
581 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
582 NULL_TREE, 0, 1, 1, 0);
584 /* Function to get the current exception. */
585 get_excptr_decl
586 = create_subprog_decl
587 (get_identifier ("system__soft_links__get_gnat_exception"),
588 NULL_TREE,
589 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
590 NULL_TREE, 0, 1, 1, 0);
592 /* Functions that raise exceptions. */
593 raise_nodefer_decl
594 = create_subprog_decl
595 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
596 build_function_type (void_type_node,
597 tree_cons (NULL_TREE,
598 build_pointer_type (except_type_node),
599 endlink)),
600 NULL_TREE, 0, 1, 1, 0);
602 /* Hooks to call when entering/leaving an exception handler. */
603 begin_handler_decl
604 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
605 build_function_type (void_type_node,
606 tree_cons (NULL_TREE,
607 ptr_void_type_node,
608 endlink)),
609 NULL_TREE, 0, 1, 1, 0);
611 end_handler_decl
612 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
613 build_function_type (void_type_node,
614 tree_cons (NULL_TREE,
615 ptr_void_type_node,
616 endlink)),
617 NULL_TREE, 0, 1, 1, 0);
619 /* If in no exception handlers mode, all raise statements are redirected to
620 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
621 this procedure will never be called in this mode. */
622 if (No_Exception_Handlers_Set ())
624 decl
625 = create_subprog_decl
626 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
627 build_function_type (void_type_node,
628 tree_cons (NULL_TREE,
629 build_pointer_type (char_type_node),
630 tree_cons (NULL_TREE,
631 integer_type_node,
632 endlink))),
633 NULL_TREE, 0, 1, 1, 0);
635 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
636 gnat_raise_decls[i] = decl;
638 else
639 /* Otherwise, make one decl for each exception reason. */
640 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
642 char name[17];
644 sprintf (name, "__gnat_rcheck_%.2d", i);
645 gnat_raise_decls[i]
646 = create_subprog_decl
647 (get_identifier (name), NULL_TREE,
648 build_function_type (void_type_node,
649 tree_cons (NULL_TREE,
650 build_pointer_type
651 (char_type_node),
652 tree_cons (NULL_TREE,
653 integer_type_node,
654 endlink))),
655 NULL_TREE, 0, 1, 1, 0);
658 /* Indicate that these never return. */
659 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
660 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
661 TREE_TYPE (raise_nodefer_decl)
662 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
663 TYPE_QUAL_VOLATILE);
665 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
667 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
668 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
669 TREE_TYPE (gnat_raise_decls[i])
670 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
671 TYPE_QUAL_VOLATILE);
674 /* setjmp returns an integer and has one operand, which is a pointer to
675 a jmpbuf. */
676 setjmp_decl
677 = create_subprog_decl
678 (get_identifier ("__builtin_setjmp"), NULL_TREE,
679 build_function_type (integer_type_node,
680 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
681 NULL_TREE, 0, 1, 1, 0);
683 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
684 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
686 main_identifier_node = get_identifier ("main");
689 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
690 nodes (FIELDLIST), finish constructing the record or union type.
691 If HAS_REP is nonzero, this record has a rep clause; don't call
692 layout_type but merely set the size and alignment ourselves.
693 If DEFER_DEBUG is nonzero, do not call the debugging routines
694 on this type; it will be done later. */
696 void
697 finish_record_type (tree record_type,
698 tree fieldlist,
699 int has_rep,
700 int defer_debug)
702 enum tree_code code = TREE_CODE (record_type);
703 tree ada_size = bitsize_zero_node;
704 tree size = bitsize_zero_node;
705 tree size_unit = size_zero_node;
706 int var_size = 0;
707 tree field;
709 TYPE_FIELDS (record_type) = fieldlist;
711 if (TYPE_NAME (record_type) != 0
712 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
713 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
714 else
715 TYPE_STUB_DECL (record_type)
716 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
717 record_type));
719 /* We don't need both the typedef name and the record name output in
720 the debugging information, since they are the same. */
721 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
723 /* Globally initialize the record first. If this is a rep'ed record,
724 that just means some initializations; otherwise, layout the record. */
726 if (has_rep)
728 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
729 TYPE_MODE (record_type) = BLKmode;
730 if (TYPE_SIZE (record_type) == 0)
732 TYPE_SIZE (record_type) = bitsize_zero_node;
733 TYPE_SIZE_UNIT (record_type) = size_zero_node;
735 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
736 out just like a UNION_TYPE, since the size will be fixed. */
737 else if (code == QUAL_UNION_TYPE)
738 code = UNION_TYPE;
740 else
742 /* Ensure there isn't a size already set. There can be in an error
743 case where there is a rep clause but all fields have errors and
744 no longer have a position. */
745 TYPE_SIZE (record_type) = 0;
746 layout_type (record_type);
749 /* At this point, the position and size of each field is known. It was
750 either set before entry by a rep clause, or by laying out the type
751 above. We now make a pass through the fields (in reverse order for
752 QUAL_UNION_TYPEs) to compute the Ada size; the GCC size and alignment
753 (for rep'ed records that are not padding types); and the mode (for
754 rep'ed records). */
756 if (code == QUAL_UNION_TYPE)
757 fieldlist = nreverse (fieldlist);
759 for (field = fieldlist; field; field = TREE_CHAIN (field))
761 tree type = TREE_TYPE (field);
762 tree this_size = DECL_SIZE (field);
763 tree this_size_unit = DECL_SIZE_UNIT (field);
764 tree this_ada_size = DECL_SIZE (field);
766 /* We need to make an XVE/XVU record if any field has variable size,
767 whether or not the record does. For example, if we have an union,
768 it may be that all fields, rounded up to the alignment, have the
769 same size, in which case we'll use that size. But the debug
770 output routines (except Dwarf2) won't be able to output the fields,
771 so we need to make the special record. */
772 if (TREE_CODE (this_size) != INTEGER_CST)
773 var_size = 1;
775 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
776 || TREE_CODE (type) == QUAL_UNION_TYPE)
777 && ! TYPE_IS_FAT_POINTER_P (type)
778 && ! TYPE_CONTAINS_TEMPLATE_P (type)
779 && TYPE_ADA_SIZE (type) != 0)
780 this_ada_size = TYPE_ADA_SIZE (type);
782 if (has_rep && ! DECL_BIT_FIELD (field))
783 TYPE_ALIGN (record_type)
784 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
786 switch (code)
788 case UNION_TYPE:
789 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
790 size = size_binop (MAX_EXPR, size, this_size);
791 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
792 break;
794 case QUAL_UNION_TYPE:
795 ada_size
796 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
797 this_ada_size, ada_size));
798 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
799 this_size, size));
800 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
801 this_size_unit, size_unit));
802 break;
804 case RECORD_TYPE:
805 /* Since we know here that all fields are sorted in order of
806 increasing bit position, the size of the record is one
807 higher than the ending bit of the last field processed
808 unless we have a rep clause, since in that case we might
809 have a field outside a QUAL_UNION_TYPE that has a higher ending
810 position. So use a MAX in that case. Also, if this field is a
811 QUAL_UNION_TYPE, we need to take into account the previous size in
812 the case of empty variants. */
813 ada_size
814 = merge_sizes (ada_size, bit_position (field), this_ada_size,
815 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
816 size = merge_sizes (size, bit_position (field), this_size,
817 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
818 size_unit
819 = merge_sizes (size_unit, byte_position (field), this_size_unit,
820 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
821 break;
823 default:
824 abort ();
828 if (code == QUAL_UNION_TYPE)
829 nreverse (fieldlist);
831 /* If this is a padding record, we never want to make the size smaller than
832 what was specified in it, if any. */
833 if (TREE_CODE (record_type) == RECORD_TYPE
834 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
836 size = TYPE_SIZE (record_type);
837 size_unit = TYPE_SIZE_UNIT (record_type);
840 /* Now set any of the values we've just computed that apply. */
841 if (! TYPE_IS_FAT_POINTER_P (record_type)
842 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
843 SET_TYPE_ADA_SIZE (record_type, ada_size);
845 if (has_rep)
847 if (! (TREE_CODE (record_type) == RECORD_TYPE
848 && TYPE_IS_PADDING_P (record_type)
849 && CONTAINS_PLACEHOLDER_P (size)))
851 TYPE_SIZE (record_type) = round_up (size, TYPE_ALIGN (record_type));
852 TYPE_SIZE_UNIT (record_type)
853 = round_up (size_unit,
854 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
857 compute_record_mode (record_type);
860 if (! defer_debug)
862 /* If this record is of variable size, rename it so that the
863 debugger knows it is and make a new, parallel, record
864 that tells the debugger how the record is laid out. See
865 exp_dbug.ads. But don't do this for records that are padding
866 since they confuse GDB. */
867 if (var_size
868 && ! (TREE_CODE (record_type) == RECORD_TYPE
869 && TYPE_IS_PADDING_P (record_type)))
871 tree new_record_type
872 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
873 ? UNION_TYPE : TREE_CODE (record_type));
874 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
875 tree new_id
876 = concat_id_with_name (orig_id,
877 TREE_CODE (record_type) == QUAL_UNION_TYPE
878 ? "XVU" : "XVE");
879 tree last_pos = bitsize_zero_node;
880 tree old_field;
881 tree prev_old_field = 0;
883 TYPE_NAME (new_record_type) = new_id;
884 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
885 TYPE_STUB_DECL (new_record_type)
886 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
887 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
888 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
889 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
890 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
892 /* Now scan all the fields, replacing each field with a new
893 field corresponding to the new encoding. */
894 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
895 old_field = TREE_CHAIN (old_field))
897 tree field_type = TREE_TYPE (old_field);
898 tree field_name = DECL_NAME (old_field);
899 tree new_field;
900 tree curpos = bit_position (old_field);
901 int var = 0;
902 unsigned int align = 0;
903 tree pos;
905 /* See how the position was modified from the last position.
907 There are two basic cases we support: a value was added
908 to the last position or the last position was rounded to
909 a boundary and they something was added. Check for the
910 first case first. If not, see if there is any evidence
911 of rounding. If so, round the last position and try
912 again.
914 If this is a union, the position can be taken as zero. */
916 if (TREE_CODE (new_record_type) == UNION_TYPE)
917 pos = bitsize_zero_node, align = 0;
918 else
919 pos = compute_related_constant (curpos, last_pos);
921 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
922 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
924 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
925 pos = compute_related_constant (curpos,
926 round_up (last_pos, align));
928 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
929 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
930 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
931 && host_integerp (TREE_OPERAND
932 (TREE_OPERAND (curpos, 0), 1),
935 align
936 = tree_low_cst
937 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
938 pos = compute_related_constant (curpos,
939 round_up (last_pos, align));
941 else if (potential_alignment_gap (prev_old_field, old_field,
942 pos))
944 align = TYPE_ALIGN (field_type);
945 pos = compute_related_constant (curpos,
946 round_up (last_pos, align));
949 /* If we can't compute a position, set it to zero.
951 ??? We really should abort here, but it's too much work
952 to get this correct for all cases. */
954 if (pos == 0)
955 pos = bitsize_zero_node;
957 /* See if this type is variable-size and make a new type
958 and indicate the indirection if so. */
959 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
961 field_type = build_pointer_type (field_type);
962 var = 1;
965 /* Make a new field name, if necessary. */
966 if (var || align != 0)
968 char suffix[6];
970 if (align != 0)
971 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
972 align / BITS_PER_UNIT);
973 else
974 strcpy (suffix, "XVL");
976 field_name = concat_id_with_name (field_name, suffix);
979 new_field = create_field_decl (field_name, field_type,
980 new_record_type, 0,
981 DECL_SIZE (old_field), pos, 0);
982 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
983 TYPE_FIELDS (new_record_type) = new_field;
985 /* If old_field is a QUAL_UNION_TYPE, take its size as being
986 zero. The only time it's not the last field of the record
987 is when there are other components at fixed positions after
988 it (meaning there was a rep clause for every field) and we
989 want to be able to encode them. */
990 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
991 (TREE_CODE (TREE_TYPE (old_field))
992 == QUAL_UNION_TYPE)
993 ? bitsize_zero_node
994 : DECL_SIZE (old_field));
995 prev_old_field = old_field;
998 TYPE_FIELDS (new_record_type)
999 = nreverse (TYPE_FIELDS (new_record_type));
1001 rest_of_type_compilation (new_record_type, global_bindings_p ());
1004 rest_of_type_compilation (record_type, global_bindings_p ());
1008 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1009 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1010 if this represents a QUAL_UNION_TYPE in which case we must look for
1011 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1012 is nonzero, we must take the MAX of the end position of this field
1013 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1015 We return an expression for the size. */
1017 static tree
1018 merge_sizes (tree last_size,
1019 tree first_bit,
1020 tree size,
1021 int special,
1022 int has_rep)
1024 tree type = TREE_TYPE (last_size);
1025 tree new;
1027 if (! special || TREE_CODE (size) != COND_EXPR)
1029 new = size_binop (PLUS_EXPR, first_bit, size);
1030 if (has_rep)
1031 new = size_binop (MAX_EXPR, last_size, new);
1034 else
1035 new = fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1036 integer_zerop (TREE_OPERAND (size, 1))
1037 ? last_size : merge_sizes (last_size, first_bit,
1038 TREE_OPERAND (size, 1),
1039 1, has_rep),
1040 integer_zerop (TREE_OPERAND (size, 2))
1041 ? last_size : merge_sizes (last_size, first_bit,
1042 TREE_OPERAND (size, 2),
1043 1, has_rep)));
1045 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1046 when fed through substitute_in_expr) into thinking that a constant
1047 size is not constant. */
1048 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1049 new = TREE_OPERAND (new, 0);
1051 return new;
1054 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1055 related by the addition of a constant. Return that constant if so. */
1057 static tree
1058 compute_related_constant (tree op0, tree op1)
1060 tree op0_var, op1_var;
1061 tree op0_con = split_plus (op0, &op0_var);
1062 tree op1_con = split_plus (op1, &op1_var);
1063 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1065 if (operand_equal_p (op0_var, op1_var, 0))
1066 return result;
1067 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1068 return result;
1069 else
1070 return 0;
1073 /* Utility function of above to split a tree OP which may be a sum, into a
1074 constant part, which is returned, and a variable part, which is stored
1075 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1076 bitsizetype. */
1078 static tree
1079 split_plus (tree in, tree *pvar)
1081 /* Strip NOPS in order to ease the tree traversal and maximize the
1082 potential for constant or plus/minus discovery. We need to be careful
1083 to always return and set *pvar to bitsizetype trees, but it's worth
1084 the effort. */
1085 STRIP_NOPS (in);
1087 *pvar = convert (bitsizetype, in);
1089 if (TREE_CODE (in) == INTEGER_CST)
1091 *pvar = bitsize_zero_node;
1092 return convert (bitsizetype, in);
1094 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1096 tree lhs_var, rhs_var;
1097 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1098 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1100 if (lhs_var == TREE_OPERAND (in, 0)
1101 && rhs_var == TREE_OPERAND (in, 1))
1102 return bitsize_zero_node;
1104 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1105 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1107 else
1108 return bitsize_zero_node;
1111 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1112 subprogram. If it is void_type_node, then we are dealing with a procedure,
1113 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1114 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1115 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1116 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1117 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1118 RETURNS_WITH_DSP is nonzero if the function is to return with a
1119 depressed stack pointer. */
1121 tree
1122 create_subprog_type (tree return_type,
1123 tree param_decl_list,
1124 tree cico_list,
1125 int returns_unconstrained,
1126 int returns_by_ref,
1127 int returns_with_dsp)
1129 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1130 the subprogram formal parameters. This list is generated by traversing the
1131 input list of PARM_DECL nodes. */
1132 tree param_type_list = NULL;
1133 tree param_decl;
1134 tree type;
1136 for (param_decl = param_decl_list; param_decl;
1137 param_decl = TREE_CHAIN (param_decl))
1138 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1139 param_type_list);
1141 /* The list of the function parameter types has to be terminated by the void
1142 type to signal to the back-end that we are not dealing with a variable
1143 parameter subprogram, but that the subprogram has a fixed number of
1144 parameters. */
1145 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1147 /* The list of argument types has been created in reverse
1148 so nreverse it. */
1149 param_type_list = nreverse (param_type_list);
1151 type = build_function_type (return_type, param_type_list);
1153 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1154 or the new type should, make a copy of TYPE. Likewise for
1155 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1156 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1157 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1158 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1159 type = copy_type (type);
1161 SET_TYPE_CI_CO_LIST (type, cico_list);
1162 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1163 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1164 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1165 return type;
1168 /* Return a copy of TYPE but safe to modify in any way. */
1170 tree
1171 copy_type (tree type)
1173 tree new = copy_node (type);
1175 /* copy_node clears this field instead of copying it, because it is
1176 aliased with TREE_CHAIN. */
1177 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1179 TYPE_POINTER_TO (new) = 0;
1180 TYPE_REFERENCE_TO (new) = 0;
1181 TYPE_MAIN_VARIANT (new) = new;
1182 TYPE_NEXT_VARIANT (new) = 0;
1184 return new;
1187 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1188 TYPE_INDEX_TYPE is INDEX. */
1190 tree
1191 create_index_type (tree min, tree max, tree index)
1193 /* First build a type for the desired range. */
1194 tree type = build_index_2_type (min, max);
1196 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1197 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1198 is set, but not to INDEX, make a copy of this type with the requested
1199 index type. Note that we have no way of sharing these types, but that's
1200 only a small hole. */
1201 if (TYPE_INDEX_TYPE (type) == index)
1202 return type;
1203 else if (TYPE_INDEX_TYPE (type) != 0)
1204 type = copy_type (type);
1206 SET_TYPE_INDEX_TYPE (type, index);
1207 return type;
1210 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1211 string) and TYPE is a ..._TYPE node giving its data type.
1212 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1213 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1214 information about this type. */
1216 tree
1217 create_type_decl (tree type_name,
1218 tree type,
1219 struct attrib *attr_list,
1220 int artificial_p,
1221 int debug_info_p)
1223 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1224 enum tree_code code = TREE_CODE (type);
1226 DECL_ARTIFICIAL (type_decl) = artificial_p;
1227 pushdecl (type_decl);
1228 process_attributes (type_decl, attr_list);
1230 /* Pass type declaration information to the debugger unless this is an
1231 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1232 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1233 a dummy type, which will be completed later, or a type for which
1234 debugging information was not requested. */
1235 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1236 || ! debug_info_p)
1237 DECL_IGNORED_P (type_decl) = 1;
1238 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1239 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1240 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1241 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1243 return type_decl;
1246 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1247 ASM_NAME is its assembler name (if provided). TYPE is its data type
1248 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1249 expression; NULL_TREE if none.
1251 CONST_FLAG is nonzero if this variable is constant.
1253 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1254 the current compilation unit. This flag should be set when processing the
1255 variable definitions in a package specification. EXTERN_FLAG is nonzero
1256 when processing an external variable declaration (as opposed to a
1257 definition: no storage is to be allocated for the variable here).
1259 STATIC_FLAG is only relevant when not at top level. In that case
1260 it indicates whether to always allocate storage to the variable. */
1262 tree
1263 create_var_decl (tree var_name,
1264 tree asm_name,
1265 tree type,
1266 tree var_init,
1267 int const_flag,
1268 int public_flag,
1269 int extern_flag,
1270 int static_flag,
1271 struct attrib *attr_list)
1273 int init_const
1274 = (var_init == 0
1276 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1277 && (global_bindings_p () || static_flag
1278 ? 0 != initializer_constant_valid_p (var_init,
1279 TREE_TYPE (var_init))
1280 : TREE_CONSTANT (var_init))));
1281 tree var_decl
1282 = build_decl ((const_flag && init_const
1283 /* Only make a CONST_DECL for sufficiently-small objects.
1284 We consider complex double "sufficiently-small" */
1285 && TYPE_SIZE (type) != 0
1286 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1287 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1288 GET_MODE_SIZE (DCmode)))
1289 ? CONST_DECL : VAR_DECL, var_name, type);
1290 tree assign_init = 0;
1292 /* If this is external, throw away any initializations unless this is a
1293 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1294 we are defining a global here, leave a constant initialization and save
1295 any variable elaborations for the elaboration routine. Otherwise, if
1296 the initializing expression is not the same as TYPE, generate the
1297 initialization with an assignment statement, since it knows how
1298 to do the required adjustents. If we are just annotating types,
1299 throw away the initialization if it isn't a constant. */
1301 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1302 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1303 var_init = 0;
1305 if (global_bindings_p () && var_init != 0 && ! init_const)
1307 add_pending_elaborations (var_decl, var_init);
1308 var_init = 0;
1311 else if (var_init != 0
1312 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1313 != TYPE_MAIN_VARIANT (type))
1314 || (static_flag && ! init_const)))
1315 assign_init = var_init, var_init = 0;
1317 DECL_COMMON (var_decl) = !flag_no_common;
1318 DECL_INITIAL (var_decl) = var_init;
1319 TREE_READONLY (var_decl) = const_flag;
1320 DECL_EXTERNAL (var_decl) = extern_flag;
1321 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1322 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1323 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1324 = TYPE_VOLATILE (type);
1326 /* At the global binding level we need to allocate static storage for the
1327 variable if and only if its not external. If we are not at the top level
1328 we allocate automatic storage unless requested not to. */
1329 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1331 if (asm_name != 0)
1332 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1334 process_attributes (var_decl, attr_list);
1336 /* Add this decl to the current binding level and generate any
1337 needed code and RTL. */
1338 var_decl = pushdecl (var_decl);
1339 expand_decl (var_decl);
1341 if (DECL_CONTEXT (var_decl) != 0)
1342 expand_decl_init (var_decl);
1344 /* If this is volatile, force it into memory. */
1345 if (TREE_SIDE_EFFECTS (var_decl))
1346 gnat_mark_addressable (var_decl);
1348 if (TREE_CODE (var_decl) != CONST_DECL)
1349 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1351 if (assign_init != 0)
1353 /* If VAR_DECL has a padded type, convert it to the unpadded
1354 type so the assignment is done properly. */
1355 tree lhs = var_decl;
1357 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1358 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1359 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1361 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1362 assign_init));
1365 return var_decl;
1368 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1369 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1370 this field is in a record type with a "pragma pack". If SIZE is nonzero
1371 it is the specified size for this field. If POS is nonzero, it is the bit
1372 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1373 the address of this field for aliasing purposes. */
1375 tree
1376 create_field_decl (tree field_name,
1377 tree field_type,
1378 tree record_type,
1379 int packed,
1380 tree size,
1381 tree pos,
1382 int addressable)
1384 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1386 DECL_CONTEXT (field_decl) = record_type;
1387 TREE_READONLY (field_decl) = TREE_READONLY (field_type);
1389 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1390 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1391 if (packed && TYPE_MODE (field_type) == BLKmode)
1392 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1394 /* If a size is specified, use it. Otherwise, see if we have a size
1395 to use that may differ from the natural size of the object. */
1396 if (size != 0)
1397 size = convert (bitsizetype, size);
1398 else if (packed)
1400 if (packed == 1 && ! operand_equal_p (rm_size (field_type),
1401 TYPE_SIZE (field_type), 0))
1402 size = rm_size (field_type);
1404 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1405 byte. */
1406 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1407 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1408 size = round_up (size, BITS_PER_UNIT);
1411 /* Make a bitfield if a size is specified for two reasons: first if the size
1412 differs from the natural size. Second, if the alignment is insufficient.
1413 There are a number of ways the latter can be true. But never make a
1414 bitfield if the type of the field has a nonconstant size. */
1416 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1417 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1418 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1419 || (pos != 0
1420 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1421 bitsize_int (TYPE_ALIGN
1422 (field_type)))))
1423 || packed
1424 || (TYPE_ALIGN (record_type) != 0
1425 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1427 DECL_BIT_FIELD (field_decl) = 1;
1428 DECL_SIZE (field_decl) = size;
1429 if (! packed && pos == 0)
1430 DECL_ALIGN (field_decl)
1431 = (TYPE_ALIGN (record_type) != 0
1432 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1433 : TYPE_ALIGN (field_type));
1436 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1437 DECL_ALIGN (field_decl)
1438 = MAX (DECL_ALIGN (field_decl),
1439 DECL_BIT_FIELD (field_decl) ? 1
1440 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1441 : TYPE_ALIGN (field_type));
1443 if (pos != 0)
1445 /* We need to pass in the alignment the DECL is known to have.
1446 This is the lowest-order bit set in POS, but no more than
1447 the alignment of the record, if one is specified. Note
1448 that an alignment of 0 is taken as infinite. */
1449 unsigned int known_align;
1451 if (host_integerp (pos, 1))
1452 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1453 else
1454 known_align = BITS_PER_UNIT;
1456 if (TYPE_ALIGN (record_type)
1457 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1458 known_align = TYPE_ALIGN (record_type);
1460 layout_decl (field_decl, known_align);
1461 SET_DECL_OFFSET_ALIGN (field_decl,
1462 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1463 : BITS_PER_UNIT);
1464 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1465 &DECL_FIELD_BIT_OFFSET (field_decl),
1466 DECL_OFFSET_ALIGN (field_decl), pos);
1468 DECL_HAS_REP_P (field_decl) = 1;
1471 /* If the field type is passed by reference, we will have pointers to the
1472 field, so it is addressable. */
1473 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1474 addressable = 1;
1476 /* ??? For now, we say that any field of aggregate type is addressable
1477 because the front end may take 'Reference of it. */
1478 if (AGGREGATE_TYPE_P (field_type))
1479 addressable = 1;
1481 /* Mark the decl as nonaddressable if it either is indicated so semantically
1482 or if it is a bit field. */
1483 DECL_NONADDRESSABLE_P (field_decl)
1484 = ! addressable || DECL_BIT_FIELD (field_decl);
1486 return field_decl;
1489 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1490 effects, has the value of zero. */
1492 static int
1493 value_zerop (tree exp)
1495 if (TREE_CODE (exp) == COMPOUND_EXPR)
1496 return value_zerop (TREE_OPERAND (exp, 1));
1498 return integer_zerop (exp);
1501 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1502 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1503 readonly (either an IN parameter or an address of a pass-by-ref
1504 parameter). */
1506 tree
1507 create_param_decl (tree param_name, tree param_type, int readonly)
1509 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1511 /* Honor the PROMOTE_PROTOTYPES target macro, as not doing so can
1512 lead to various ABI violations. */
1513 #ifdef PROMOTE_PROTOTYPES
1514 if ((TREE_CODE (param_type) == INTEGER_TYPE
1515 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1516 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1518 /* We have to be careful about biased types here. Make a subtype
1519 of integer_type_node with the proper biasing. */
1520 if (TREE_CODE (param_type) == INTEGER_TYPE
1521 && TYPE_BIASED_REPRESENTATION_P (param_type))
1523 param_type
1524 = copy_type (build_range_type (integer_type_node,
1525 TYPE_MIN_VALUE (param_type),
1526 TYPE_MAX_VALUE (param_type)));
1528 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1530 else
1531 param_type = integer_type_node;
1533 #endif
1535 DECL_ARG_TYPE (param_decl) = param_type;
1536 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1537 TREE_READONLY (param_decl) = readonly;
1538 return param_decl;
1541 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1543 void
1544 process_attributes (tree decl, struct attrib *attr_list)
1546 for (; attr_list; attr_list = attr_list->next)
1547 switch (attr_list->type)
1549 case ATTR_MACHINE_ATTRIBUTE:
1550 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1551 NULL_TREE),
1552 ATTR_FLAG_TYPE_IN_PLACE);
1553 break;
1555 case ATTR_LINK_ALIAS:
1556 TREE_STATIC (decl) = 1;
1557 assemble_alias (decl, attr_list->name);
1558 break;
1560 case ATTR_WEAK_EXTERNAL:
1561 if (SUPPORTS_WEAK)
1562 declare_weak (decl);
1563 else
1564 post_error ("?weak declarations not supported on this target",
1565 attr_list->error_point);
1566 break;
1568 case ATTR_LINK_SECTION:
1569 #ifdef ASM_OUTPUT_SECTION_NAME
1570 DECL_SECTION_NAME (decl)
1571 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1572 IDENTIFIER_POINTER (attr_list->name));
1573 DECL_COMMON (decl) = 0;
1574 #else
1575 post_error ("?section attributes are not supported for this target",
1576 attr_list->error_point);
1577 #endif
1578 break;
1582 /* Add some pending elaborations on the list. */
1584 void
1585 add_pending_elaborations (tree var_decl, tree var_init)
1587 if (var_init != 0)
1588 Check_Elaboration_Code_Allowed (error_gnat_node);
1590 pending_elaborations
1591 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1594 /* Obtain any pending elaborations and clear the old list. */
1596 tree
1597 get_pending_elaborations (void)
1599 /* Each thing added to the list went on the end; we want it on the
1600 beginning. */
1601 tree result = TREE_CHAIN (pending_elaborations);
1603 TREE_CHAIN (pending_elaborations) = 0;
1604 return result;
1607 /* Return true if VALUE is a multiple of FACTOR. FACTOR must be a power
1608 of 2. */
1610 static int
1611 value_factor_p (tree value, int factor)
1613 if (host_integerp (value, 1))
1614 return tree_low_cst (value, 1) % factor == 0;
1616 if (TREE_CODE (value) == MULT_EXPR)
1617 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1618 || value_factor_p (TREE_OPERAND (value, 1), factor));
1620 return 0;
1623 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1624 unless we can prove these 2 fields are laid out in such a way that no gap
1625 exist between the end of PREV_FIELD and the begining of CURR_FIELD. OFFSET
1626 is the distance in bits between the end of PREV_FIELD and the starting
1627 position of CURR_FIELD. It is ignored if null. */
1629 static int
1630 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1632 /* If this is the first field of the record, there cannot be any gap */
1633 if (!prev_field)
1634 return 0;
1636 /* If the previous field is a union type, then return False: The only
1637 time when such a field is not the last field of the record is when
1638 there are other components at fixed positions after it (meaning there
1639 was a rep clause for every field), in which case we don't want the
1640 alignment constraint to override them. */
1641 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1642 return 0;
1644 /* If the distance between the end of prev_field and the begining of
1645 curr_field is constant, then there is a gap if the value of this
1646 constant is not null. */
1647 if (offset && host_integerp (offset, 1))
1648 return (!integer_zerop (offset));
1650 /* If the size and position of the previous field are constant,
1651 then check the sum of this size and position. There will be a gap
1652 iff it is not multiple of the current field alignment. */
1653 if (host_integerp (DECL_SIZE (prev_field), 1)
1654 && host_integerp (bit_position (prev_field), 1))
1655 return ((tree_low_cst (bit_position (prev_field), 1)
1656 + tree_low_cst (DECL_SIZE (prev_field), 1))
1657 % DECL_ALIGN (curr_field) != 0);
1659 /* If both the position and size of the previous field are multiples
1660 of the current field alignment, there can not be any gap. */
1661 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1662 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1663 return 0;
1665 /* Fallback, return that there may be a potential gap */
1666 return 1;
1669 /* Return nonzero if there are pending elaborations. */
1672 pending_elaborations_p (void)
1674 return TREE_CHAIN (pending_elaborations) != 0;
1677 /* Save a copy of the current pending elaboration list and make a new
1678 one. */
1680 void
1681 push_pending_elaborations (void)
1683 struct e_stack *p = (struct e_stack *) ggc_alloc (sizeof (struct e_stack));
1685 p->next = elist_stack;
1686 p->elab_list = pending_elaborations;
1687 elist_stack = p;
1688 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1691 /* Pop the stack of pending elaborations. */
1693 void
1694 pop_pending_elaborations (void)
1696 struct e_stack *p = elist_stack;
1698 pending_elaborations = p->elab_list;
1699 elist_stack = p->next;
1702 /* Return the current position in pending_elaborations so we can insert
1703 elaborations after that point. */
1705 tree
1706 get_elaboration_location (void)
1708 return tree_last (pending_elaborations);
1711 /* Insert the current elaborations after ELAB, which is in some elaboration
1712 list. */
1714 void
1715 insert_elaboration_list (tree elab)
1717 tree next = TREE_CHAIN (elab);
1719 if (TREE_CHAIN (pending_elaborations))
1721 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1722 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1723 TREE_CHAIN (pending_elaborations) = 0;
1727 /* Returns a LABEL_DECL node for LABEL_NAME. */
1729 tree
1730 create_label_decl (tree label_name)
1732 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1734 DECL_CONTEXT (label_decl) = current_function_decl;
1735 DECL_MODE (label_decl) = VOIDmode;
1736 DECL_SOURCE_LOCATION (label_decl) = input_location;
1738 return label_decl;
1741 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1742 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1743 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1744 PARM_DECL nodes chained through the TREE_CHAIN field).
1746 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1747 appropriate fields in the FUNCTION_DECL. */
1749 tree
1750 create_subprog_decl (tree subprog_name,
1751 tree asm_name,
1752 tree subprog_type,
1753 tree param_decl_list,
1754 int inline_flag,
1755 int public_flag,
1756 int extern_flag,
1757 struct attrib *attr_list)
1759 tree return_type = TREE_TYPE (subprog_type);
1760 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1762 /* If this is a function nested inside an inlined external function, it
1763 means we aren't going to compile the outer function unless it is
1764 actually inlined, so do the same for us. */
1765 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1766 && DECL_EXTERNAL (current_function_decl))
1767 extern_flag = 1;
1769 DECL_EXTERNAL (subprog_decl) = extern_flag;
1770 TREE_PUBLIC (subprog_decl) = public_flag;
1771 DECL_INLINE (subprog_decl) = inline_flag;
1772 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1773 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1774 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1775 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1776 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1778 if (asm_name != 0)
1779 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1781 process_attributes (subprog_decl, attr_list);
1783 /* Add this decl to the current binding level. */
1784 subprog_decl = pushdecl (subprog_decl);
1786 /* Output the assembler code and/or RTL for the declaration. */
1787 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1789 return subprog_decl;
1792 /* Count how deep we are into nested functions. This is because
1793 we shouldn't call the backend function context routines unless we
1794 are in a nested function. */
1796 static int function_nesting_depth;
1798 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1799 body. This routine needs to be invoked before processing the declarations
1800 appearing in the subprogram. */
1802 void
1803 begin_subprog_body (tree subprog_decl)
1805 tree param_decl_list;
1806 tree param_decl;
1807 tree next_param;
1809 if (function_nesting_depth++ != 0)
1810 push_function_context ();
1812 announce_function (subprog_decl);
1814 /* Make this field nonzero so further routines know that this is not
1815 tentative. error_mark_node is replaced below (in poplevel) with the
1816 adequate BLOCK. */
1817 DECL_INITIAL (subprog_decl) = error_mark_node;
1819 /* This function exists in static storage. This does not mean `static' in
1820 the C sense! */
1821 TREE_STATIC (subprog_decl) = 1;
1823 /* Enter a new binding level. */
1824 current_function_decl = subprog_decl;
1825 pushlevel (0);
1827 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1828 subprogram body) so that they can be recognized as local variables in the
1829 subprogram.
1831 The list of PARM_DECL nodes is stored in the right order in
1832 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1833 which they are transmitted to `pushdecl' we need to reverse the list of
1834 PARM_DECLs if we want it to be stored in the right order. The reason why
1835 we want to make sure the PARM_DECLs are stored in the correct order is
1836 that this list will be retrieved in a few lines with a call to `getdecl'
1837 to store it back into the DECL_ARGUMENTS field. */
1838 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1840 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1842 next_param = TREE_CHAIN (param_decl);
1843 TREE_CHAIN (param_decl) = NULL;
1844 pushdecl (param_decl);
1847 /* Store back the PARM_DECL nodes. They appear in the right order. */
1848 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1850 init_function_start (subprog_decl);
1851 expand_function_start (subprog_decl, 0);
1853 /* If this function is `main', emit a call to `__main'
1854 to run global initializers, etc. */
1855 if (DECL_ASSEMBLER_NAME (subprog_decl) != 0
1856 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl))
1857 && DECL_CONTEXT (subprog_decl) == NULL_TREE)
1858 expand_main_function ();
1861 /* Finish the definition of the current subprogram and compile it all the way
1862 to assembler language output. */
1864 void
1865 end_subprog_body (void)
1867 tree decl;
1868 tree cico_list;
1870 poplevel (1, 0, 1);
1871 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1872 = current_function_decl;
1874 /* Mark the RESULT_DECL as being in this subprogram. */
1875 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1877 expand_function_end ();
1879 /* If this is a nested function, push a new GC context. That will keep
1880 local variables on the stack from being collected while we're doing
1881 the compilation of this function. */
1882 if (function_nesting_depth > 1)
1883 ggc_push_context ();
1885 rest_of_compilation (current_function_decl);
1887 if (function_nesting_depth > 1)
1888 ggc_pop_context ();
1890 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1891 not be seen when we call this function and will be in
1892 unallocated memory anyway. */
1893 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1894 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1895 TREE_VALUE (cico_list) = 0;
1897 if (DECL_SAVED_INSNS (current_function_decl) == 0)
1899 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1900 was saved for inline, in which case the DECL_RTLs are in
1901 preserved memory. */
1902 for (decl = DECL_ARGUMENTS (current_function_decl);
1903 decl != 0; decl = TREE_CHAIN (decl))
1905 SET_DECL_RTL (decl, 0);
1906 DECL_INCOMING_RTL (decl) = 0;
1909 /* Similarly, discard DECL_RTL of the return value. */
1910 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1912 /* But DECL_INITIAL must remain nonzero so we know this
1913 was an actual function definition unless toplev.c decided not
1914 to inline it. */
1915 if (DECL_INITIAL (current_function_decl) != 0)
1916 DECL_INITIAL (current_function_decl) = error_mark_node;
1918 DECL_ARGUMENTS (current_function_decl) = 0;
1921 /* If we are not at the bottom of the function nesting stack, pop up to
1922 the containing function. Otherwise show we aren't in any function. */
1923 if (--function_nesting_depth != 0)
1924 pop_function_context ();
1925 else
1926 current_function_decl = 0;
1929 /* Return a definition for a builtin function named NAME and whose data type
1930 is TYPE. TYPE should be a function type with argument types.
1931 FUNCTION_CODE tells later passes how to compile calls to this function.
1932 See tree.h for its possible values.
1934 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1935 the name to be called if we can't opencode the function. If
1936 ATTRS is nonzero, use that for the function attribute list. */
1938 tree
1939 builtin_function (const char *name,
1940 tree type,
1941 int function_code,
1942 enum built_in_class class,
1943 const char *library_name,
1944 tree attrs)
1946 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1948 DECL_EXTERNAL (decl) = 1;
1949 TREE_PUBLIC (decl) = 1;
1950 if (library_name)
1951 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1953 pushdecl (decl);
1954 DECL_BUILT_IN_CLASS (decl) = class;
1955 DECL_FUNCTION_CODE (decl) = function_code;
1956 if (attrs)
1957 decl_attributes (&decl, attrs, ATTR_FLAG_BUILT_IN);
1958 return decl;
1961 /* Return an integer type with the number of bits of precision given by
1962 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1963 it is a signed type. */
1965 tree
1966 gnat_type_for_size (unsigned precision, int unsignedp)
1968 tree t;
1969 char type_name[20];
1971 if (precision <= 2 * MAX_BITS_PER_WORD
1972 && signed_and_unsigned_types[precision][unsignedp] != 0)
1973 return signed_and_unsigned_types[precision][unsignedp];
1975 if (unsignedp)
1976 t = make_unsigned_type (precision);
1977 else
1978 t = make_signed_type (precision);
1980 if (precision <= 2 * MAX_BITS_PER_WORD)
1981 signed_and_unsigned_types[precision][unsignedp] = t;
1983 if (TYPE_NAME (t) == 0)
1985 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1986 TYPE_NAME (t) = get_identifier (type_name);
1989 return t;
1992 /* Likewise for floating-point types. */
1994 static tree
1995 float_type_for_precision (int precision, enum machine_mode mode)
1997 tree t;
1998 char type_name[20];
2000 if (float_types[(int) mode] != 0)
2001 return float_types[(int) mode];
2003 float_types[(int) mode] = t = make_node (REAL_TYPE);
2004 TYPE_PRECISION (t) = precision;
2005 layout_type (t);
2007 if (TYPE_MODE (t) != mode)
2008 gigi_abort (414);
2010 if (TYPE_NAME (t) == 0)
2012 sprintf (type_name, "FLOAT_%d", precision);
2013 TYPE_NAME (t) = get_identifier (type_name);
2016 return t;
2019 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2020 an unsigned type; otherwise a signed type is returned. */
2022 tree
2023 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2025 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2026 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2027 else
2028 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2031 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2033 tree
2034 gnat_unsigned_type (tree type_node)
2036 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2038 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2040 type = copy_node (type);
2041 TREE_TYPE (type) = type_node;
2043 else if (TREE_TYPE (type_node) != 0
2044 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2045 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2047 type = copy_node (type);
2048 TREE_TYPE (type) = TREE_TYPE (type_node);
2051 return type;
2054 /* Return the signed version of a TYPE_NODE, a scalar type. */
2056 tree
2057 gnat_signed_type (tree type_node)
2059 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2061 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2063 type = copy_node (type);
2064 TREE_TYPE (type) = type_node;
2066 else if (TREE_TYPE (type_node) != 0
2067 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2068 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2070 type = copy_node (type);
2071 TREE_TYPE (type) = TREE_TYPE (type_node);
2074 return type;
2077 /* Return a type the same as TYPE except unsigned or signed according to
2078 UNSIGNEDP. */
2080 tree
2081 gnat_signed_or_unsigned_type (int unsignedp, tree type)
2083 if (! INTEGRAL_TYPE_P (type) || TREE_UNSIGNED (type) == unsignedp)
2084 return type;
2085 else
2086 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
2089 /* EXP is an expression for the size of an object. If this size contains
2090 discriminant references, replace them with the maximum (if MAX_P) or
2091 minimum (if ! MAX_P) possible value of the discriminant. */
2093 tree
2094 max_size (tree exp, int max_p)
2096 enum tree_code code = TREE_CODE (exp);
2097 tree type = TREE_TYPE (exp);
2099 switch (TREE_CODE_CLASS (code))
2101 case 'd':
2102 case 'c':
2103 return exp;
2105 case 'x':
2106 if (code == TREE_LIST)
2107 return tree_cons (TREE_PURPOSE (exp),
2108 max_size (TREE_VALUE (exp), max_p),
2109 TREE_CHAIN (exp) != 0
2110 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2111 break;
2113 case 'r':
2114 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2115 modify. Otherwise, we treat it like a variable. */
2116 if (! CONTAINS_PLACEHOLDER_P (exp))
2117 return exp;
2119 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2120 return
2121 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2123 case '<':
2124 return max_p ? size_one_node : size_zero_node;
2126 case '1':
2127 case '2':
2128 case 'e':
2129 switch (TREE_CODE_LENGTH (code))
2131 case 1:
2132 if (code == NON_LVALUE_EXPR)
2133 return max_size (TREE_OPERAND (exp, 0), max_p);
2134 else
2135 return
2136 fold (build1 (code, type,
2137 max_size (TREE_OPERAND (exp, 0),
2138 code == NEGATE_EXPR ? ! max_p : max_p)));
2140 case 2:
2141 if (code == RTL_EXPR)
2142 gigi_abort (407);
2143 else if (code == COMPOUND_EXPR)
2144 return max_size (TREE_OPERAND (exp, 1), max_p);
2145 else if (code == WITH_RECORD_EXPR)
2146 return exp;
2149 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2150 tree rhs = max_size (TREE_OPERAND (exp, 1),
2151 code == MINUS_EXPR ? ! max_p : max_p);
2153 /* Special-case wanting the maximum value of a MIN_EXPR.
2154 In that case, if one side overflows, return the other.
2155 sizetype is signed, but we know sizes are non-negative.
2156 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2157 overflowing or the maximum possible value and the RHS
2158 a variable. */
2159 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2160 return lhs;
2161 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2162 return rhs;
2163 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2164 && ((TREE_CONSTANT (lhs) && TREE_OVERFLOW (lhs))
2165 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2166 && ! TREE_CONSTANT (rhs))
2167 return lhs;
2168 else
2169 return fold (build (code, type, lhs, rhs));
2172 case 3:
2173 if (code == SAVE_EXPR)
2174 return exp;
2175 else if (code == COND_EXPR)
2176 return fold (build (MAX_EXPR, type,
2177 max_size (TREE_OPERAND (exp, 1), max_p),
2178 max_size (TREE_OPERAND (exp, 2), max_p)));
2179 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2180 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2181 max_size (TREE_OPERAND (exp, 1), max_p));
2185 gigi_abort (408);
2188 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2189 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2190 Return a constructor for the template. */
2192 tree
2193 build_template (tree template_type, tree array_type, tree expr)
2195 tree template_elts = NULL_TREE;
2196 tree bound_list = NULL_TREE;
2197 tree field;
2199 if (TREE_CODE (array_type) == RECORD_TYPE
2200 && (TYPE_IS_PADDING_P (array_type)
2201 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2202 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2204 if (TREE_CODE (array_type) == ARRAY_TYPE
2205 || (TREE_CODE (array_type) == INTEGER_TYPE
2206 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2207 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2209 /* First make the list for a CONSTRUCTOR for the template. Go down the
2210 field list of the template instead of the type chain because this
2211 array might be an Ada array of arrays and we can't tell where the
2212 nested arrays stop being the underlying object. */
2214 for (field = TYPE_FIELDS (template_type); field;
2215 (bound_list != 0
2216 ? (bound_list = TREE_CHAIN (bound_list))
2217 : (array_type = TREE_TYPE (array_type))),
2218 field = TREE_CHAIN (TREE_CHAIN (field)))
2220 tree bounds, min, max;
2222 /* If we have a bound list, get the bounds from there. Likewise
2223 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2224 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2225 This will give us a maximum range. */
2226 if (bound_list != 0)
2227 bounds = TREE_VALUE (bound_list);
2228 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2229 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2230 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2231 && DECL_BY_COMPONENT_PTR_P (expr))
2232 bounds = TREE_TYPE (field);
2233 else
2234 gigi_abort (411);
2236 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2237 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2239 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2240 surround them with a WITH_RECORD_EXPR giving EXPR as the
2241 OBJECT. */
2242 if (CONTAINS_PLACEHOLDER_P (min))
2243 min = build (WITH_RECORD_EXPR, TREE_TYPE (min), min, expr);
2244 if (CONTAINS_PLACEHOLDER_P (max))
2245 max = build (WITH_RECORD_EXPR, TREE_TYPE (max), max, expr);
2247 template_elts = tree_cons (TREE_CHAIN (field), max,
2248 tree_cons (field, min, template_elts));
2251 return gnat_build_constructor (template_type, nreverse (template_elts));
2254 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2255 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2256 in the type contains in its DECL_INITIAL the expression to use when
2257 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2258 to print out an error message if the mechanism cannot be applied to
2259 an object of that type and also for the name. */
2261 tree
2262 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2264 tree record_type = make_node (RECORD_TYPE);
2265 tree field_list = 0;
2266 int class;
2267 int dtype = 0;
2268 tree inner_type;
2269 int ndim;
2270 int i;
2271 tree *idx_arr;
2272 tree tem;
2274 /* If TYPE is an unconstrained array, use the underlying array type. */
2275 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2276 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2278 /* If this is an array, compute the number of dimensions in the array,
2279 get the index types, and point to the inner type. */
2280 if (TREE_CODE (type) != ARRAY_TYPE)
2281 ndim = 0;
2282 else
2283 for (ndim = 1, inner_type = type;
2284 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2285 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2286 ndim++, inner_type = TREE_TYPE (inner_type))
2289 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2291 if (mech != By_Descriptor_NCA
2292 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2293 for (i = ndim - 1, inner_type = type;
2294 i >= 0;
2295 i--, inner_type = TREE_TYPE (inner_type))
2296 idx_arr[i] = TYPE_DOMAIN (inner_type);
2297 else
2298 for (i = 0, inner_type = type;
2299 i < ndim;
2300 i++, inner_type = TREE_TYPE (inner_type))
2301 idx_arr[i] = TYPE_DOMAIN (inner_type);
2303 /* Now get the DTYPE value. */
2304 switch (TREE_CODE (type))
2306 case INTEGER_TYPE:
2307 case ENUMERAL_TYPE:
2308 if (TYPE_VAX_FLOATING_POINT_P (type))
2309 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2311 case 6:
2312 dtype = 10;
2313 break;
2314 case 9:
2315 dtype = 11;
2316 break;
2317 case 15:
2318 dtype = 27;
2319 break;
2321 else
2322 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2324 case 8:
2325 dtype = TREE_UNSIGNED (type) ? 2 : 6;
2326 break;
2327 case 16:
2328 dtype = TREE_UNSIGNED (type) ? 3 : 7;
2329 break;
2330 case 32:
2331 dtype = TREE_UNSIGNED (type) ? 4 : 8;
2332 break;
2333 case 64:
2334 dtype = TREE_UNSIGNED (type) ? 5 : 9;
2335 break;
2336 case 128:
2337 dtype = TREE_UNSIGNED (type) ? 25 : 26;
2338 break;
2340 break;
2342 case REAL_TYPE:
2343 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2344 break;
2346 case COMPLEX_TYPE:
2347 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2348 && TYPE_VAX_FLOATING_POINT_P (type))
2349 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2351 case 6:
2352 dtype = 12;
2353 break;
2354 case 9:
2355 dtype = 13;
2356 break;
2357 case 15:
2358 dtype = 29;
2360 else
2361 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2362 break;
2364 case ARRAY_TYPE:
2365 dtype = 14;
2366 break;
2368 default:
2369 break;
2372 /* Get the CLASS value. */
2373 switch (mech)
2375 case By_Descriptor_A:
2376 class = 4;
2377 break;
2378 case By_Descriptor_NCA:
2379 class = 10;
2380 break;
2381 case By_Descriptor_SB:
2382 class = 15;
2383 break;
2384 default:
2385 class = 1;
2388 /* Make the type for a descriptor for VMS. The first four fields
2389 are the same for all types. */
2391 field_list
2392 = chainon (field_list,
2393 make_descriptor_field
2394 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2395 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2397 field_list = chainon (field_list,
2398 make_descriptor_field ("DTYPE",
2399 gnat_type_for_size (8, 1),
2400 record_type, size_int (dtype)));
2401 field_list = chainon (field_list,
2402 make_descriptor_field ("CLASS",
2403 gnat_type_for_size (8, 1),
2404 record_type, size_int (class)));
2406 field_list
2407 = chainon (field_list,
2408 make_descriptor_field ("POINTER",
2409 build_pointer_type (type),
2410 record_type,
2411 build1 (ADDR_EXPR,
2412 build_pointer_type (type),
2413 build (PLACEHOLDER_EXPR,
2414 type))));
2416 switch (mech)
2418 case By_Descriptor:
2419 case By_Descriptor_S:
2420 break;
2422 case By_Descriptor_SB:
2423 field_list
2424 = chainon (field_list,
2425 make_descriptor_field
2426 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2427 TREE_CODE (type) == ARRAY_TYPE
2428 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2429 field_list
2430 = chainon (field_list,
2431 make_descriptor_field
2432 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2433 TREE_CODE (type) == ARRAY_TYPE
2434 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2435 break;
2437 case By_Descriptor_A:
2438 case By_Descriptor_NCA:
2439 field_list = chainon (field_list,
2440 make_descriptor_field ("SCALE",
2441 gnat_type_for_size (8, 1),
2442 record_type,
2443 size_zero_node));
2445 field_list = chainon (field_list,
2446 make_descriptor_field ("DIGITS",
2447 gnat_type_for_size (8, 1),
2448 record_type,
2449 size_zero_node));
2451 field_list
2452 = chainon (field_list,
2453 make_descriptor_field
2454 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2455 size_int (mech == By_Descriptor_NCA
2457 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2458 : (TREE_CODE (type) == ARRAY_TYPE
2459 && TYPE_CONVENTION_FORTRAN_P (type)
2460 ? 224 : 192))));
2462 field_list = chainon (field_list,
2463 make_descriptor_field ("DIMCT",
2464 gnat_type_for_size (8, 1),
2465 record_type,
2466 size_int (ndim)));
2468 field_list = chainon (field_list,
2469 make_descriptor_field ("ARSIZE",
2470 gnat_type_for_size (32, 1),
2471 record_type,
2472 size_in_bytes (type)));
2474 /* Now build a pointer to the 0,0,0... element. */
2475 tem = build (PLACEHOLDER_EXPR, type);
2476 for (i = 0, inner_type = type; i < ndim;
2477 i++, inner_type = TREE_TYPE (inner_type))
2478 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2479 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2481 field_list
2482 = chainon (field_list,
2483 make_descriptor_field
2484 ("A0", build_pointer_type (inner_type), record_type,
2485 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2487 /* Next come the addressing coefficients. */
2488 tem = size_int (1);
2489 for (i = 0; i < ndim; i++)
2491 char fname[3];
2492 tree idx_length
2493 = size_binop (MULT_EXPR, tem,
2494 size_binop (PLUS_EXPR,
2495 size_binop (MINUS_EXPR,
2496 TYPE_MAX_VALUE (idx_arr[i]),
2497 TYPE_MIN_VALUE (idx_arr[i])),
2498 size_int (1)));
2500 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2501 fname[1] = '0' + i, fname[2] = 0;
2502 field_list
2503 = chainon (field_list,
2504 make_descriptor_field (fname,
2505 gnat_type_for_size (32, 1),
2506 record_type, idx_length));
2508 if (mech == By_Descriptor_NCA)
2509 tem = idx_length;
2512 /* Finally here are the bounds. */
2513 for (i = 0; i < ndim; i++)
2515 char fname[3];
2517 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2518 field_list
2519 = chainon (field_list,
2520 make_descriptor_field
2521 (fname, gnat_type_for_size (32, 1), record_type,
2522 TYPE_MIN_VALUE (idx_arr[i])));
2524 fname[0] = 'U';
2525 field_list
2526 = chainon (field_list,
2527 make_descriptor_field
2528 (fname, gnat_type_for_size (32, 1), record_type,
2529 TYPE_MAX_VALUE (idx_arr[i])));
2531 break;
2533 default:
2534 post_error ("unsupported descriptor type for &", gnat_entity);
2537 finish_record_type (record_type, field_list, 0, 1);
2538 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2539 record_type));
2541 return record_type;
2544 /* Utility routine for above code to make a field. */
2546 static tree
2547 make_descriptor_field (const char *name, tree type,
2548 tree rec_type, tree initial)
2550 tree field
2551 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2553 DECL_INITIAL (field) = initial;
2554 return field;
2557 /* Build a type to be used to represent an aliased object whose nominal
2558 type is an unconstrained array. This consists of a RECORD_TYPE containing
2559 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2560 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2561 is used to represent an arbitrary unconstrained object. Use NAME
2562 as the name of the record. */
2564 tree
2565 build_unc_object_type (tree template_type, tree object_type, tree name)
2567 tree type = make_node (RECORD_TYPE);
2568 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2569 template_type, type, 0, 0, 0, 1);
2570 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2571 type, 0, 0, 0, 1);
2573 TYPE_NAME (type) = name;
2574 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2575 finish_record_type (type,
2576 chainon (chainon (NULL_TREE, template_field),
2577 array_field),
2578 0, 0);
2580 return type;
2583 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2584 the normal case this is just two adjustments, but we have more to do
2585 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2587 void
2588 update_pointer_to (tree old_type, tree new_type)
2590 tree ptr = TYPE_POINTER_TO (old_type);
2591 tree ref = TYPE_REFERENCE_TO (old_type);
2592 tree type;
2594 /* If this is the main variant, process all the other variants first. */
2595 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2596 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2597 type = TYPE_NEXT_VARIANT (type))
2598 update_pointer_to (type, new_type);
2600 /* If no pointer or reference, we are done. */
2601 if (ptr == 0 && ref == 0)
2602 return;
2604 /* Merge the old type qualifiers in the new type.
2606 Each old variant has qualifiers for specific reasons, and the new
2607 designated type as well. Each set of qualifiers represents useful
2608 information grabbed at some point, and merging the two simply unifies
2609 these inputs into the final type description.
2611 Consider for instance a volatile type frozen after an access to constant
2612 type designating it. After the designated type freeze, we get here with a
2613 volatile new_type and a dummy old_type with a readonly variant, created
2614 when the access type was processed. We shall make a volatile and readonly
2615 designated type, because that's what it really is.
2617 We might also get here for a non-dummy old_type variant with different
2618 qualifiers than the new_type ones, for instance in some cases of pointers
2619 to private record type elaboration (see the comments around the call to
2620 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
2621 qualifiers in thoses cases too, to avoid accidentally discarding the
2622 initial set, and will often end up with old_type == new_type then. */
2623 new_type = build_qualified_type (new_type,
2624 TYPE_QUALS (old_type)
2625 | TYPE_QUALS (new_type));
2627 /* If the new type and the old one are identical, there is nothing to
2628 update. */
2629 if (old_type == new_type)
2630 return;
2632 /* Otherwise, first handle the simple case. */
2633 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2635 if (ptr != 0)
2636 TREE_TYPE (ptr) = new_type;
2637 TYPE_POINTER_TO (new_type) = ptr;
2639 if (ref != 0)
2640 TREE_TYPE (ref) = new_type;
2641 TYPE_REFERENCE_TO (new_type) = ref;
2643 if (ptr != 0 && TYPE_NAME (ptr) != 0
2644 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2645 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2646 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2647 global_bindings_p (), 0);
2648 if (ref != 0 && TYPE_NAME (ref) != 0
2649 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2650 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2651 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2652 global_bindings_p (), 0);
2655 /* Now deal with the unconstrained array case. In this case the "pointer"
2656 is actually a RECORD_TYPE where the types of both fields are
2657 pointers to void. In that case, copy the field list from the
2658 old type to the new one and update the fields' context. */
2659 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2660 gigi_abort (412);
2662 else
2664 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2665 tree ptr_temp_type;
2666 tree new_ref;
2667 tree var;
2669 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2670 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2671 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2673 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2674 template bounds.
2676 ??? This is now the only use of gnat_substitute_in_type, which
2677 is now a very "heavy" routine to do this, so it should be replaced
2678 at some point. */
2679 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2680 new_ref = build (COMPONENT_REF, ptr_temp_type,
2681 build (PLACEHOLDER_EXPR, ptr),
2682 TREE_CHAIN (TYPE_FIELDS (ptr)));
2684 update_pointer_to
2685 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2686 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2687 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2689 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2690 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2692 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2693 = TREE_TYPE (new_type) = ptr;
2695 /* Now handle updating the allocation record, what the thin pointer
2696 points to. Update all pointers from the old record into the new
2697 one, update the types of the fields, and recompute the size. */
2699 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2701 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2702 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2703 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2704 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2705 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2706 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2707 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2709 TYPE_SIZE (new_obj_rec)
2710 = size_binop (PLUS_EXPR,
2711 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2712 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2713 TYPE_SIZE_UNIT (new_obj_rec)
2714 = size_binop (PLUS_EXPR,
2715 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2716 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2717 rest_of_type_compilation (ptr, global_bindings_p ());
2721 /* Convert a pointer to a constrained array into a pointer to a fat
2722 pointer. This involves making or finding a template. */
2724 static tree
2725 convert_to_fat_pointer (tree type, tree expr)
2727 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2728 tree template, template_addr;
2729 tree etype = TREE_TYPE (expr);
2731 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2732 pointer to the template and array. */
2733 if (integer_zerop (expr))
2734 return
2735 gnat_build_constructor
2736 (type,
2737 tree_cons (TYPE_FIELDS (type),
2738 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2739 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2740 convert (build_pointer_type (template_type),
2741 expr),
2742 NULL_TREE)));
2744 /* If EXPR is a thin pointer, make the template and data from the record. */
2746 else if (TYPE_THIN_POINTER_P (etype))
2748 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2750 expr = save_expr (expr);
2751 if (TREE_CODE (expr) == ADDR_EXPR)
2752 expr = TREE_OPERAND (expr, 0);
2753 else
2754 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2756 template = build_component_ref (expr, NULL_TREE, fields, 0);
2757 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2758 build_component_ref (expr, NULL_TREE,
2759 TREE_CHAIN (fields), 0));
2761 else
2762 /* Otherwise, build the constructor for the template. */
2763 template = build_template (template_type, TREE_TYPE (etype), expr);
2765 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2767 /* The result is a CONSTRUCTOR for the fat pointer.
2769 If expr is an argument of a foreign convention subprogram, the type it
2770 points to is directly the component type. In this case, the expression
2771 type may not match the corresponding FIELD_DECL type at this point, so we
2772 call "convert" here to fix that up if necessary. This type consistency is
2773 required, for instance because it ensures that possible later folding of
2774 component_refs against this constructor always yields something of the
2775 same type as the initial reference.
2777 Note that the call to "build_template" above is still fine, because it
2778 will only refer to the provided template_type in this case. */
2779 return
2780 gnat_build_constructor
2781 (type, tree_cons (TYPE_FIELDS (type),
2782 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2783 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2784 template_addr, NULL_TREE)));
2787 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2788 is something that is a fat pointer, so convert to it first if it EXPR
2789 is not already a fat pointer. */
2791 static tree
2792 convert_to_thin_pointer (tree type, tree expr)
2794 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2795 expr
2796 = convert_to_fat_pointer
2797 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2799 /* We get the pointer to the data and use a NOP_EXPR to make it the
2800 proper GCC type. */
2801 expr
2802 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)), 0);
2803 expr = build1 (NOP_EXPR, type, expr);
2805 return expr;
2808 /* Create an expression whose value is that of EXPR,
2809 converted to type TYPE. The TREE_TYPE of the value
2810 is always TYPE. This function implements all reasonable
2811 conversions; callers should filter out those that are
2812 not permitted by the language being compiled. */
2814 tree
2815 convert (tree type, tree expr)
2817 enum tree_code code = TREE_CODE (type);
2818 tree etype = TREE_TYPE (expr);
2819 enum tree_code ecode = TREE_CODE (etype);
2820 tree tem;
2822 /* If EXPR is already the right type, we are done. */
2823 if (type == etype)
2824 return expr;
2825 /* If we're converting between two aggregate types that have the same main
2826 variant, just make a NOP_EXPR. */
2827 else if (AGGREGATE_TYPE_P (type)
2828 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
2829 return build1 (NOP_EXPR, type, expr);
2830 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2831 new one. */
2832 else if (TREE_CODE (expr) == WITH_RECORD_EXPR)
2833 return build (WITH_RECORD_EXPR, type,
2834 convert (type, TREE_OPERAND (expr, 0)),
2835 TREE_OPERAND (expr, 1));
2837 /* If the input type has padding, remove it by doing a component reference
2838 to the field. If the output type has padding, make a constructor
2839 to build the record. If both input and output have padding and are
2840 of variable size, do this as an unchecked conversion. */
2841 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
2842 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2843 && (! TREE_CONSTANT (TYPE_SIZE (type))
2844 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2846 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2848 /* If we have just converted to this padded type, just get
2849 the inner expression. */
2850 if (TREE_CODE (expr) == CONSTRUCTOR
2851 && CONSTRUCTOR_ELTS (expr) != 0
2852 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2853 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2854 else
2855 return convert (type, build_component_ref (expr, NULL_TREE,
2856 TYPE_FIELDS (etype), 0));
2858 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2860 /* If we previously converted from another type and our type is
2861 of variable size, remove the conversion to avoid the need for
2862 variable-size temporaries. */
2863 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2864 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2865 expr = TREE_OPERAND (expr, 0);
2867 /* If we are just removing the padding from expr, convert the original
2868 object if we have variable size. That will avoid the need
2869 for some variable-size temporaries. */
2870 if (TREE_CODE (expr) == COMPONENT_REF
2871 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2872 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2873 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2874 return convert (type, TREE_OPERAND (expr, 0));
2876 /* If the result type is a padded type with a self-referentially-sized
2877 field and the expression type is a record, do this as an
2878 unchecked converstion. */
2879 else if (TREE_CODE (etype) == RECORD_TYPE
2880 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
2881 return unchecked_convert (type, expr, 0);
2883 else
2884 return
2885 gnat_build_constructor (type,
2886 tree_cons (TYPE_FIELDS (type),
2887 convert (TREE_TYPE
2888 (TYPE_FIELDS (type)),
2889 expr),
2890 NULL_TREE));
2893 /* If the input is a biased type, adjust first. */
2894 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2895 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2896 fold (build1 (GNAT_NOP_EXPR,
2897 TREE_TYPE (etype), expr)),
2898 TYPE_MIN_VALUE (etype))));
2900 /* If the input is a left-justified modular type, we need to extract
2901 the actual object before converting it to any other type with the
2902 exception of an unconstrained array. */
2903 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2904 && code != UNCONSTRAINED_ARRAY_TYPE)
2905 return convert (type, build_component_ref (expr, NULL_TREE,
2906 TYPE_FIELDS (etype), 0));
2908 /* If converting to a type that contains a template, convert to the data
2909 type and then build the template. */
2910 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
2912 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2914 /* If the source already has a template, get a reference to the
2915 associated array only, as we are going to rebuild a template
2916 for the target type anyway. */
2917 expr = maybe_unconstrained_array (expr);
2919 return
2920 gnat_build_constructor
2921 (type,
2922 tree_cons (TYPE_FIELDS (type),
2923 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2924 obj_type, NULL_TREE),
2925 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2926 convert (obj_type, expr), NULL_TREE)));
2929 /* There are some special cases of expressions that we process
2930 specially. */
2931 switch (TREE_CODE (expr))
2933 case ERROR_MARK:
2934 return expr;
2936 case TRANSFORM_EXPR:
2937 case NULL_EXPR:
2938 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2939 conversion in gnat_expand_expr. NULL_EXPR does not represent
2940 and actual value, so no conversion is needed. */
2941 expr = copy_node (expr);
2942 TREE_TYPE (expr) = type;
2943 return expr;
2945 case STRING_CST:
2946 case CONSTRUCTOR:
2947 /* If we are converting a STRING_CST to another constrained array type,
2948 just make a new one in the proper type. Likewise for a
2949 CONSTRUCTOR. */
2950 if (code == ecode && AGGREGATE_TYPE_P (etype)
2951 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2952 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
2954 expr = copy_node (expr);
2955 TREE_TYPE (expr) = type;
2956 return expr;
2958 break;
2960 case COMPONENT_REF:
2961 /* If we are converting between two aggregate types of the same
2962 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2963 This avoid unneeded conversions which makes reference computations
2964 more complex. */
2965 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
2966 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2967 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
2968 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
2969 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
2970 TREE_OPERAND (expr, 1));
2972 break;
2974 case UNCONSTRAINED_ARRAY_REF:
2975 /* Convert this to the type of the inner array by getting the address of
2976 the array from the template. */
2977 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2978 build_component_ref (TREE_OPERAND (expr, 0),
2979 get_identifier ("P_ARRAY"),
2980 NULL_TREE, 0));
2981 etype = TREE_TYPE (expr);
2982 ecode = TREE_CODE (etype);
2983 break;
2985 case VIEW_CONVERT_EXPR:
2986 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2987 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2988 return convert (type, TREE_OPERAND (expr, 0));
2989 break;
2991 case INDIRECT_REF:
2992 /* If both types are record types, just convert the pointer and
2993 make a new INDIRECT_REF.
2995 ??? Disable this for now since it causes problems with the
2996 code in build_binary_op for MODIFY_EXPR which wants to
2997 strip off conversions. But that code really is a mess and
2998 we need to do this a much better way some time. */
2999 if (0
3000 && (TREE_CODE (type) == RECORD_TYPE
3001 || TREE_CODE (type) == UNION_TYPE)
3002 && (TREE_CODE (etype) == RECORD_TYPE
3003 || TREE_CODE (etype) == UNION_TYPE)
3004 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3005 return build_unary_op (INDIRECT_REF, NULL_TREE,
3006 convert (build_pointer_type (type),
3007 TREE_OPERAND (expr, 0)));
3008 break;
3010 default:
3011 break;
3014 /* Check for converting to a pointer to an unconstrained array. */
3015 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3016 return convert_to_fat_pointer (type, expr);
3018 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3019 || (code == INTEGER_CST && ecode == INTEGER_CST
3020 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3021 return fold (build1 (NOP_EXPR, type, expr));
3023 switch (code)
3025 case VOID_TYPE:
3026 return build1 (CONVERT_EXPR, type, expr);
3028 case INTEGER_TYPE:
3029 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3030 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3031 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3032 return unchecked_convert (type, expr, 0);
3033 else if (TYPE_BIASED_REPRESENTATION_P (type))
3034 return fold (build1 (CONVERT_EXPR, type,
3035 fold (build (MINUS_EXPR, TREE_TYPE (type),
3036 convert (TREE_TYPE (type), expr),
3037 TYPE_MIN_VALUE (type)))));
3039 /* ... fall through ... */
3041 case ENUMERAL_TYPE:
3042 return fold (convert_to_integer (type, expr));
3044 case POINTER_TYPE:
3045 case REFERENCE_TYPE:
3046 /* If converting between two pointers to records denoting
3047 both a template and type, adjust if needed to account
3048 for any differing offsets, since one might be negative. */
3049 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3051 tree bit_diff
3052 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3053 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3054 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3055 sbitsize_int (BITS_PER_UNIT));
3057 expr = build1 (NOP_EXPR, type, expr);
3058 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3059 if (integer_zerop (byte_diff))
3060 return expr;
3062 return build_binary_op (PLUS_EXPR, type, expr,
3063 fold (convert_to_pointer (type, byte_diff)));
3066 /* If converting to a thin pointer, handle specially. */
3067 if (TYPE_THIN_POINTER_P (type)
3068 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3069 return convert_to_thin_pointer (type, expr);
3071 /* If converting fat pointer to normal pointer, get the pointer to the
3072 array and then convert it. */
3073 else if (TYPE_FAT_POINTER_P (etype))
3074 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3075 NULL_TREE, 0);
3077 return fold (convert_to_pointer (type, expr));
3079 case REAL_TYPE:
3080 return fold (convert_to_real (type, expr));
3082 case RECORD_TYPE:
3083 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3084 return
3085 gnat_build_constructor
3086 (type, tree_cons (TYPE_FIELDS (type),
3087 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3088 NULL_TREE));
3090 /* ... fall through ... */
3092 case ARRAY_TYPE:
3093 /* In these cases, assume the front-end has validated the conversion.
3094 If the conversion is valid, it will be a bit-wise conversion, so
3095 it can be viewed as an unchecked conversion. */
3096 return unchecked_convert (type, expr, 0);
3098 case UNION_TYPE:
3099 /* Just validate that the type is indeed that of a field
3100 of the type. Then make the simple conversion. */
3101 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3103 if (TREE_TYPE (tem) == etype)
3104 return build1 (CONVERT_EXPR, type, expr);
3105 else if (TREE_CODE (TREE_TYPE (tem)) == RECORD_TYPE
3106 && (TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (tem))
3107 || TYPE_IS_PADDING_P (TREE_TYPE (tem)))
3108 && TREE_TYPE (TYPE_FIELDS (TREE_TYPE (tem))) == etype)
3109 return build1 (CONVERT_EXPR, type,
3110 convert (TREE_TYPE (tem), expr));
3113 gigi_abort (413);
3115 case UNCONSTRAINED_ARRAY_TYPE:
3116 /* If EXPR is a constrained array, take its address, convert it to a
3117 fat pointer, and then dereference it. Likewise if EXPR is a
3118 record containing both a template and a constrained array.
3119 Note that a record representing a left justified modular type
3120 always represents a packed constrained array. */
3121 if (ecode == ARRAY_TYPE
3122 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3123 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3124 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3125 return
3126 build_unary_op
3127 (INDIRECT_REF, NULL_TREE,
3128 convert_to_fat_pointer (TREE_TYPE (type),
3129 build_unary_op (ADDR_EXPR,
3130 NULL_TREE, expr)));
3132 /* Do something very similar for converting one unconstrained
3133 array to another. */
3134 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3135 return
3136 build_unary_op (INDIRECT_REF, NULL_TREE,
3137 convert (TREE_TYPE (type),
3138 build_unary_op (ADDR_EXPR,
3139 NULL_TREE, expr)));
3140 else
3141 gigi_abort (409);
3143 case COMPLEX_TYPE:
3144 return fold (convert_to_complex (type, expr));
3146 default:
3147 gigi_abort (410);
3151 /* Remove all conversions that are done in EXP. This includes converting
3152 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3153 is nonzero, always return the address of the containing object even if
3154 the address is not bit-aligned. */
3156 tree
3157 remove_conversions (tree exp, int true_address)
3159 switch (TREE_CODE (exp))
3161 case CONSTRUCTOR:
3162 if (true_address
3163 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3164 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3165 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3166 break;
3168 case COMPONENT_REF:
3169 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3170 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3171 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3172 break;
3174 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3175 case NOP_EXPR: case CONVERT_EXPR: case GNAT_NOP_EXPR:
3176 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3178 default:
3179 break;
3182 return exp;
3185 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3186 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3187 likewise return an expression pointing to the underlying array. */
3189 tree
3190 maybe_unconstrained_array (tree exp)
3192 enum tree_code code = TREE_CODE (exp);
3193 tree new;
3195 switch (TREE_CODE (TREE_TYPE (exp)))
3197 case UNCONSTRAINED_ARRAY_TYPE:
3198 if (code == UNCONSTRAINED_ARRAY_REF)
3201 = build_unary_op (INDIRECT_REF, NULL_TREE,
3202 build_component_ref (TREE_OPERAND (exp, 0),
3203 get_identifier ("P_ARRAY"),
3204 NULL_TREE, 0));
3205 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3206 return new;
3209 else if (code == NULL_EXPR)
3210 return build1 (NULL_EXPR,
3211 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3212 (TREE_TYPE (TREE_TYPE (exp))))),
3213 TREE_OPERAND (exp, 0));
3215 else if (code == WITH_RECORD_EXPR
3216 && (TREE_OPERAND (exp, 0)
3217 != (new = maybe_unconstrained_array
3218 (TREE_OPERAND (exp, 0)))))
3219 return build (WITH_RECORD_EXPR, TREE_TYPE (new), new,
3220 TREE_OPERAND (exp, 1));
3222 case RECORD_TYPE:
3223 /* If this is a padded type, convert to the unpadded type and see if
3224 it contains a template. */
3225 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3227 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3228 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3229 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3230 return
3231 build_component_ref (new, NULL_TREE,
3232 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3235 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3236 return
3237 build_component_ref (exp, NULL_TREE,
3238 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3239 break;
3241 default:
3242 break;
3245 return exp;
3248 /* Return an expression that does an unchecked converstion of EXPR to TYPE.
3249 If NOTRUNC_P is set, truncation operations should be suppressed. */
3251 tree
3252 unchecked_convert (tree type, tree expr, int notrunc_p)
3254 tree etype = TREE_TYPE (expr);
3256 /* If the expression is already the right type, we are done. */
3257 if (etype == type)
3258 return expr;
3260 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3261 new one. */
3262 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
3263 return build (WITH_RECORD_EXPR, type,
3264 unchecked_convert (type, TREE_OPERAND (expr, 0), notrunc_p),
3265 TREE_OPERAND (expr, 1));
3267 /* If both types types are integral just do a normal conversion.
3268 Likewise for a conversion to an unconstrained array. */
3269 if ((((INTEGRAL_TYPE_P (type)
3270 && ! (TREE_CODE (type) == INTEGER_TYPE
3271 && TYPE_VAX_FLOATING_POINT_P (type)))
3272 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3273 || (TREE_CODE (type) == RECORD_TYPE
3274 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3275 && ((INTEGRAL_TYPE_P (etype)
3276 && ! (TREE_CODE (etype) == INTEGER_TYPE
3277 && TYPE_VAX_FLOATING_POINT_P (etype)))
3278 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3279 || (TREE_CODE (etype) == RECORD_TYPE
3280 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3281 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3283 tree rtype = type;
3285 if (TREE_CODE (etype) == INTEGER_TYPE
3286 && TYPE_BIASED_REPRESENTATION_P (etype))
3288 tree ntype = copy_type (etype);
3290 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3291 TYPE_MAIN_VARIANT (ntype) = ntype;
3292 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3295 if (TREE_CODE (type) == INTEGER_TYPE
3296 && TYPE_BIASED_REPRESENTATION_P (type))
3298 rtype = copy_type (type);
3299 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3300 TYPE_MAIN_VARIANT (rtype) = rtype;
3303 expr = convert (rtype, expr);
3304 if (type != rtype)
3305 expr = build1 (GNAT_NOP_EXPR, type, expr);
3308 /* If we are converting TO an integral type whose precision is not the
3309 same as its size, first unchecked convert to a record that contains
3310 an object of the output type. Then extract the field. */
3311 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3312 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3313 GET_MODE_BITSIZE (TYPE_MODE (type))))
3315 tree rec_type = make_node (RECORD_TYPE);
3316 tree field = create_field_decl (get_identifier ("OBJ"), type,
3317 rec_type, 1, 0, 0, 0);
3319 TYPE_FIELDS (rec_type) = field;
3320 layout_type (rec_type);
3322 expr = unchecked_convert (rec_type, expr, notrunc_p);
3323 expr = build_component_ref (expr, NULL_TREE, field, 0);
3326 /* Similarly for integral input type whose precision is not equal to its
3327 size. */
3328 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3329 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3330 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3332 tree rec_type = make_node (RECORD_TYPE);
3333 tree field
3334 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3335 1, 0, 0, 0);
3337 TYPE_FIELDS (rec_type) = field;
3338 layout_type (rec_type);
3340 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3341 expr = unchecked_convert (type, expr, notrunc_p);
3344 /* We have a special case when we are converting between two
3345 unconstrained array types. In that case, take the address,
3346 convert the fat pointer types, and dereference. */
3347 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3348 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3349 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3350 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3351 build_unary_op (ADDR_EXPR, NULL_TREE,
3352 expr)));
3353 else
3355 expr = maybe_unconstrained_array (expr);
3356 etype = TREE_TYPE (expr);
3357 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3360 /* If the result is an integral type whose size is not equal to
3361 the size of the underlying machine type, sign- or zero-extend
3362 the result. We need not do this in the case where the input is
3363 an integral type of the same precision and signedness or if the output
3364 is a biased type or if both the input and output are unsigned. */
3365 if (! notrunc_p
3366 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3367 && ! (TREE_CODE (type) == INTEGER_TYPE
3368 && TYPE_BIASED_REPRESENTATION_P (type))
3369 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3370 GET_MODE_BITSIZE (TYPE_MODE (type)))
3371 && ! (INTEGRAL_TYPE_P (etype)
3372 && TREE_UNSIGNED (type) == TREE_UNSIGNED (etype)
3373 && operand_equal_p (TYPE_RM_SIZE (type),
3374 (TYPE_RM_SIZE (etype) != 0
3375 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3377 && ! (TREE_UNSIGNED (type) && TREE_UNSIGNED (etype)))
3379 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3380 TREE_UNSIGNED (type));
3381 tree shift_expr
3382 = convert (base_type,
3383 size_binop (MINUS_EXPR,
3384 bitsize_int
3385 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3386 TYPE_RM_SIZE (type)));
3387 expr
3388 = convert (type,
3389 build_binary_op (RSHIFT_EXPR, base_type,
3390 build_binary_op (LSHIFT_EXPR, base_type,
3391 convert (base_type, expr),
3392 shift_expr),
3393 shift_expr));
3396 /* An unchecked conversion should never raise Constraint_Error. The code
3397 below assumes that GCC's conversion routines overflow the same way that
3398 the underlying hardware does. This is probably true. In the rare case
3399 when it is false, we can rely on the fact that such conversions are
3400 erroneous anyway. */
3401 if (TREE_CODE (expr) == INTEGER_CST)
3402 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3404 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3405 show no longer constant. */
3406 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3407 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3408 TREE_CONSTANT (expr) = 0;
3410 return expr;
3413 #include "gt-ada-utils.h"
3414 #include "gtype-ada.h"