* config/xtensa/xtensa.h (GO_IF_MODE_DEPENDENT_ADDRESS): Treat
[official-gcc.git] / gcc / ada / utils.c
blob776fbbeb2f0220200996119318827b57e2d99b53
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * *
10 * Copyright (C) 1992-2002, Free Software Foundation, Inc. *
11 * *
12 * GNAT is free software; you can redistribute it and/or modify it under *
13 * terms of the GNU General Public License as published by the Free Soft- *
14 * ware Foundation; either version 2, or (at your option) any later ver- *
15 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
16 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
17 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
18 * for more details. You should have received a copy of the GNU General *
19 * Public License distributed with GNAT; see file COPYING. If not, write *
20 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
21 * MA 02111-1307, USA. *
22 * *
23 * GNAT was originally developed by the GNAT team at New York University. *
24 * It is now maintained by Ada Core Technologies Inc (http://www.gnat.com). *
25 * *
26 ****************************************************************************/
28 #include "config.h"
29 #include "system.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "defaults.h"
33 #include "toplev.h"
34 #include "output.h"
35 #include "ggc.h"
36 #include "convert.h"
38 #include "ada.h"
39 #include "types.h"
40 #include "atree.h"
41 #include "elists.h"
42 #include "namet.h"
43 #include "nlists.h"
44 #include "stringt.h"
45 #include "uintp.h"
46 #include "fe.h"
47 #include "sinfo.h"
48 #include "einfo.h"
49 #include "ada-tree.h"
50 #include "gigi.h"
52 #ifndef MAX_FIXED_MODE_SIZE
53 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
54 #endif
56 #ifndef MAX_BITS_PER_WORD
57 #define MAX_BITS_PER_WORD BITS_PER_WORD
58 #endif
60 /* If nonzero, pretend we are allocating at global level. */
61 int force_global;
63 /* Tree nodes for the various types and decls we create. */
64 tree gnat_std_decls[(int) ADT_LAST];
66 /* Functions to call for each of the possible raise reasons. */
67 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
69 /* Associates a GNAT tree node to a GCC tree node. It is used in
70 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
71 of `save_gnu_tree' for more info. */
72 static tree *associate_gnat_to_gnu;
74 /* This listhead is used to record any global objects that need elaboration.
75 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
76 initial value to assign. */
78 static tree pending_elaborations;
80 /* This stack allows us to momentarily switch to generating elaboration
81 lists for an inner context. */
83 static struct e_stack {struct e_stack *next; tree elab_list; } *elist_stack;
85 /* This variable keeps a table for types for each precision so that we only
86 allocate each of them once. Signed and unsigned types are kept separate.
88 Note that these types are only used when fold-const requests something
89 special. Perhaps we should NOT share these types; we'll see how it
90 goes later. */
91 static tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
93 /* Likewise for float types, but record these by mode. */
94 static tree float_types[NUM_MACHINE_MODES];
96 /* For each binding contour we allocate a binding_level structure which records
97 the entities defined or declared in that contour. Contours include:
99 the global one
100 one for each subprogram definition
101 one for each compound statement (declare block)
103 Binding contours are used to create GCC tree BLOCK nodes. */
105 struct binding_level
107 /* A chain of ..._DECL nodes for all variables, constants, functions,
108 parameters and type declarations. These ..._DECL nodes are chained
109 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
110 in the reverse of the order supplied to be compatible with the
111 back-end. */
112 tree names;
113 /* For each level (except the global one), a chain of BLOCK nodes for all
114 the levels that were entered and exited one level down from this one. */
115 tree blocks;
116 /* The BLOCK node for this level, if one has been preallocated.
117 If 0, the BLOCK is allocated (if needed) when the level is popped. */
118 tree this_block;
119 /* The binding level containing this one (the enclosing binding level). */
120 struct binding_level *level_chain;
123 /* The binding level currently in effect. */
124 static struct binding_level *current_binding_level = NULL;
126 /* A chain of binding_level structures awaiting reuse. */
127 static struct binding_level *free_binding_level = NULL;
129 /* The outermost binding level. This binding level is created when the
130 compiler is started and it will exist through the entire compilation. */
131 static struct binding_level *global_binding_level;
133 /* Binding level structures are initialized by copying this one. */
134 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
136 static tree merge_sizes PARAMS ((tree, tree, tree, int, int));
137 static tree compute_related_constant PARAMS ((tree, tree));
138 static tree split_plus PARAMS ((tree, tree *));
139 static int value_zerop PARAMS ((tree));
140 static tree float_type_for_size PARAMS ((int, enum machine_mode));
141 static tree convert_to_fat_pointer PARAMS ((tree, tree));
142 static tree convert_to_thin_pointer PARAMS ((tree, tree));
143 static tree make_descriptor_field PARAMS ((const char *,tree, tree,
144 tree));
145 static void mark_binding_level PARAMS ((PTR));
146 static void mark_e_stack PARAMS ((PTR));
148 /* Initialize the association of GNAT nodes to GCC trees. */
150 void
151 init_gnat_to_gnu ()
153 Node_Id gnat_node;
155 associate_gnat_to_gnu = (tree *) xmalloc (max_gnat_nodes * sizeof (tree));
156 ggc_add_tree_root (associate_gnat_to_gnu, max_gnat_nodes);
158 for (gnat_node = 0; gnat_node < max_gnat_nodes; gnat_node++)
159 associate_gnat_to_gnu[gnat_node] = NULL_TREE;
161 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
162 ggc_add_tree_root (&pending_elaborations, 1);
163 ggc_add_root ((PTR) &elist_stack, 1, sizeof (struct e_stack), mark_e_stack);
164 ggc_add_tree_root (&signed_and_unsigned_types[0][0],
165 (sizeof signed_and_unsigned_types
166 / sizeof signed_and_unsigned_types[0][0]));
167 ggc_add_tree_root (float_types, ARRAY_SIZE (float_types));
169 ggc_add_root (&current_binding_level, 1, sizeof current_binding_level,
170 mark_binding_level);
173 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
174 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
175 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
177 If GNU_DECL is zero, a previous association is to be reset. */
179 void
180 save_gnu_tree (gnat_entity, gnu_decl, no_check)
181 Entity_Id gnat_entity;
182 tree gnu_decl;
183 int no_check;
185 if (gnu_decl
186 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
187 || (! no_check && ! DECL_P (gnu_decl))))
188 gigi_abort (401);
190 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
193 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
194 Return the ..._DECL node that was associated with it. If there is no tree
195 node associated with GNAT_ENTITY, abort.
197 In some cases, such as delayed elaboration or expressions that need to
198 be elaborated only once, GNAT_ENTITY is really not an entity. */
200 tree
201 get_gnu_tree (gnat_entity)
202 Entity_Id gnat_entity;
204 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
205 gigi_abort (402);
207 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
210 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
213 present_gnu_tree (gnat_entity)
214 Entity_Id gnat_entity;
216 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
220 /* Return non-zero if we are currently in the global binding level. */
223 global_bindings_p ()
225 return (force_global != 0 || current_binding_level == global_binding_level
226 ? -1 : 0);
229 /* Return the list of declarations in the current level. Note that this list
230 is in reverse order (it has to be so for back-end compatibility). */
232 tree
233 getdecls ()
235 return current_binding_level->names;
238 /* Nonzero if the current level needs to have a BLOCK made. */
241 kept_level_p ()
243 return (current_binding_level->names != 0);
246 /* Enter a new binding level. The input parameter is ignored, but has to be
247 specified for back-end compatibility. */
249 void
250 pushlevel (ignore)
251 int ignore ATTRIBUTE_UNUSED;
253 struct binding_level *newlevel = NULL;
255 /* Reuse a struct for this binding level, if there is one. */
256 if (free_binding_level)
258 newlevel = free_binding_level;
259 free_binding_level = free_binding_level->level_chain;
261 else
262 newlevel
263 = (struct binding_level *) xmalloc (sizeof (struct binding_level));
265 *newlevel = clear_binding_level;
267 /* Add this level to the front of the chain (stack) of levels that are
268 active. */
269 newlevel->level_chain = current_binding_level;
270 current_binding_level = newlevel;
273 /* Exit a binding level.
274 Pop the level off, and restore the state of the identifier-decl mappings
275 that were in effect when this level was entered.
277 If KEEP is nonzero, this level had explicit declarations, so
278 and create a "block" (a BLOCK node) for the level
279 to record its declarations and subblocks for symbol table output.
281 If FUNCTIONBODY is nonzero, this level is the body of a function,
282 so create a block as if KEEP were set and also clear out all
283 label names.
285 If REVERSE is nonzero, reverse the order of decls before putting
286 them into the BLOCK. */
288 tree
289 poplevel (keep, reverse, functionbody)
290 int keep;
291 int reverse;
292 int functionbody;
294 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
295 binding level that we are about to exit and which is returned by this
296 routine. */
297 tree block = NULL_TREE;
298 tree decl_chain;
299 tree decl_node;
300 tree subblock_chain = current_binding_level->blocks;
301 tree subblock_node;
302 int block_previously_created;
304 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
305 nodes chained through the `names' field of current_binding_level are in
306 reverse order except for PARM_DECL node, which are explicitly stored in
307 the right order. */
308 current_binding_level->names
309 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
310 : current_binding_level->names;
312 /* Output any nested inline functions within this block which must be
313 compiled because their address is needed. */
314 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
315 if (TREE_CODE (decl_node) == FUNCTION_DECL
316 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
317 && DECL_INITIAL (decl_node) != 0)
319 push_function_context ();
320 output_inline_function (decl_node);
321 pop_function_context ();
324 block = 0;
325 block_previously_created = (current_binding_level->this_block != 0);
326 if (block_previously_created)
327 block = current_binding_level->this_block;
328 else if (keep || functionbody)
329 block = make_node (BLOCK);
330 if (block != 0)
332 BLOCK_VARS (block) = keep ? decl_chain : 0;
333 BLOCK_SUBBLOCKS (block) = subblock_chain;
336 /* Record the BLOCK node just built as the subblock its enclosing scope. */
337 for (subblock_node = subblock_chain; subblock_node;
338 subblock_node = TREE_CHAIN (subblock_node))
339 BLOCK_SUPERCONTEXT (subblock_node) = block;
341 /* Clear out the meanings of the local variables of this level. */
343 for (subblock_node = decl_chain; subblock_node;
344 subblock_node = TREE_CHAIN (subblock_node))
345 if (DECL_NAME (subblock_node) != 0)
346 /* If the identifier was used or addressed via a local extern decl,
347 don't forget that fact. */
348 if (DECL_EXTERNAL (subblock_node))
350 if (TREE_USED (subblock_node))
351 TREE_USED (DECL_NAME (subblock_node)) = 1;
352 if (TREE_ADDRESSABLE (subblock_node))
353 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
357 /* Pop the current level, and free the structure for reuse. */
358 struct binding_level *level = current_binding_level;
359 current_binding_level = current_binding_level->level_chain;
360 level->level_chain = free_binding_level;
361 free_binding_level = level;
364 if (functionbody)
366 /* This is the top level block of a function. The ..._DECL chain stored
367 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
368 leave them in the BLOCK because they are found in the FUNCTION_DECL
369 instead. */
370 DECL_INITIAL (current_function_decl) = block;
371 BLOCK_VARS (block) = 0;
373 else if (block)
375 if (!block_previously_created)
376 current_binding_level->blocks
377 = chainon (current_binding_level->blocks, block);
380 /* If we did not make a block for the level just exited, any blocks made for
381 inner levels (since they cannot be recorded as subblocks in that level)
382 must be carried forward so they will later become subblocks of something
383 else. */
384 else if (subblock_chain)
385 current_binding_level->blocks
386 = chainon (current_binding_level->blocks, subblock_chain);
387 if (block)
388 TREE_USED (block) = 1;
390 return block;
393 /* Insert BLOCK at the end of the list of subblocks of the
394 current binding level. This is used when a BIND_EXPR is expanded,
395 to handle the BLOCK node inside the BIND_EXPR. */
397 void
398 insert_block (block)
399 tree block;
401 TREE_USED (block) = 1;
402 current_binding_level->blocks
403 = chainon (current_binding_level->blocks, block);
406 /* Set the BLOCK node for the innermost scope
407 (the one we are currently in). */
409 void
410 set_block (block)
411 tree block;
413 current_binding_level->this_block = block;
414 current_binding_level->names = chainon (current_binding_level->names,
415 BLOCK_VARS (block));
416 current_binding_level->blocks = chainon (current_binding_level->blocks,
417 BLOCK_SUBBLOCKS (block));
420 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
421 Returns the ..._DECL node. */
423 tree
424 pushdecl (decl)
425 tree decl;
427 struct binding_level *b;
429 /* If at top level, there is no context. But PARM_DECLs always go in the
430 level of its function. */
431 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
433 b = global_binding_level;
434 DECL_CONTEXT (decl) = 0;
436 else
438 b = current_binding_level;
439 DECL_CONTEXT (decl) = current_function_decl;
442 /* Put the declaration on the list. The list of declarations is in reverse
443 order. The list will be reversed later if necessary. This needs to be
444 this way for compatibility with the back-end.
446 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
447 will cause trouble with the debugger and aren't needed anyway. */
448 if (TREE_CODE (decl) != TYPE_DECL
449 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
451 TREE_CHAIN (decl) = b->names;
452 b->names = decl;
455 /* For the declaration of a type, set its name if it either is not already
456 set, was set to an IDENTIFIER_NODE, indicating an internal name,
457 or if the previous type name was not derived from a source name.
458 We'd rather have the type named with a real name and all the pointer
459 types to the same object have the same POINTER_TYPE node. Code in this
460 function in c-decl.c makes a copy of the type node here, but that may
461 cause us trouble with incomplete types, so let's not try it (at least
462 for now). */
464 if (TREE_CODE (decl) == TYPE_DECL
465 && DECL_NAME (decl) != 0
466 && (TYPE_NAME (TREE_TYPE (decl)) == 0
467 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
468 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
469 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
470 && ! DECL_ARTIFICIAL (decl))))
471 TYPE_NAME (TREE_TYPE (decl)) = decl;
473 return decl;
476 /* Do little here. Set up the standard declarations later after the
477 front end has been run. */
479 void
480 gnat_init_decl_processing ()
482 lineno = 0;
484 /* incomplete_decl_finalize_hook is defined in toplev.c. It needs to be set
485 by each front end to the appropriate routine that handles incomplete
486 VAR_DECL nodes. This routine will be invoked by compile_file when a
487 VAR_DECL node of DECL_SIZE zero is encountered. */
488 incomplete_decl_finalize_hook = finish_incomplete_decl;
490 /* Make the binding_level structure for global names. */
491 current_function_decl = 0;
492 current_binding_level = 0;
493 free_binding_level = 0;
494 pushlevel (0);
495 global_binding_level = current_binding_level;
497 build_common_tree_nodes (0);
499 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
500 corresponding to the size of ptr_mode. Make this here since we need
501 this before we can expand the GNAT types. */
502 set_sizetype (type_for_size (GET_MODE_BITSIZE (ptr_mode), 0));
503 build_common_tree_nodes_2 (0);
505 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
507 /* We need to make the integer type before doing anything else.
508 We stitch this in to the appropriate GNAT type later. */
509 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
510 integer_type_node));
511 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
512 char_type_node));
514 ptr_void_type_node = build_pointer_type (void_type_node);
518 /* Create the predefined scalar types such as `integer_type_node' needed
519 in the gcc back-end and initialize the global binding level. */
521 void
522 init_gigi_decls (long_long_float_type, exception_type)
523 tree long_long_float_type, exception_type;
525 tree endlink, decl;
526 unsigned int i;
528 /* Set the types that GCC and Gigi use from the front end. We would like
529 to do this for char_type_node, but it needs to correspond to the C
530 char type. */
531 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
533 /* In this case, the builtin floating point types are VAX float,
534 so make up a type for use. */
535 longest_float_type_node = make_node (REAL_TYPE);
536 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
537 layout_type (longest_float_type_node);
538 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
539 longest_float_type_node));
541 else
542 longest_float_type_node = TREE_TYPE (long_long_float_type);
544 except_type_node = TREE_TYPE (exception_type);
546 unsigned_type_node = type_for_size (INT_TYPE_SIZE, 1);
547 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
548 unsigned_type_node));
550 void_type_decl_node
551 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
552 void_type_node));
554 void_ftype = build_function_type (void_type_node, NULL_TREE);
555 ptr_void_ftype = build_pointer_type (void_ftype);
557 /* Now declare runtime functions. */
558 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
560 /* malloc is a function declaration tree for a function to allocate
561 memory. */
562 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
563 NULL_TREE,
564 build_function_type (ptr_void_type_node,
565 tree_cons (NULL_TREE,
566 sizetype,
567 endlink)),
568 NULL_TREE, 0, 1, 1, 0);
570 /* free is a function declaration tree for a function to free memory. */
572 free_decl
573 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
574 build_function_type (void_type_node,
575 tree_cons (NULL_TREE,
576 ptr_void_type_node,
577 endlink)),
578 NULL_TREE, 0, 1, 1, 0);
580 /* Make the types and functions used for exception processing. */
581 jmpbuf_type
582 = build_array_type (type_for_mode (Pmode, 0),
583 build_index_type (build_int_2 (5, 0)));
584 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
585 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
587 /* Functions to get and set the jumpbuf pointer for the current thread. */
588 get_jmpbuf_decl
589 = create_subprog_decl
590 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
591 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
592 NULL_TREE, 0, 1, 1, 0);
594 set_jmpbuf_decl
595 = create_subprog_decl
596 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
597 NULL_TREE,
598 build_function_type (void_type_node,
599 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
600 NULL_TREE, 0, 1, 1, 0);
602 /* Function to get the current exception. */
603 get_excptr_decl
604 = create_subprog_decl
605 (get_identifier ("system__soft_links__get_gnat_exception"),
606 NULL_TREE,
607 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
608 NULL_TREE, 0, 1, 1, 0);
610 /* Functions that raise exceptions. */
611 raise_nodefer_decl
612 = create_subprog_decl
613 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
614 build_function_type (void_type_node,
615 tree_cons (NULL_TREE,
616 build_pointer_type (except_type_node),
617 endlink)),
618 NULL_TREE, 0, 1, 1, 0);
620 /* If in no exception handlers mode, all raise statements are redirected to
621 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
622 this procedure will never be called in this mode. */
623 if (No_Exception_Handlers_Set ())
625 decl
626 = create_subprog_decl
627 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
628 build_function_type (void_type_node,
629 tree_cons (NULL_TREE,
630 build_pointer_type (char_type_node),
631 tree_cons (NULL_TREE,
632 integer_type_node,
633 endlink))),
634 NULL_TREE, 0, 1, 1, 0);
636 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0];
637 i++)
638 gnat_raise_decls[i] = decl;
640 else
641 /* Otherwise, make one decl for each exception reason. */
642 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0]; i++)
644 char name[17];
646 sprintf (name, "__gnat_rcheck_%.2d", i);
647 gnat_raise_decls[i]
648 = create_subprog_decl
649 (get_identifier (name), NULL_TREE,
650 build_function_type (void_type_node,
651 tree_cons (NULL_TREE,
652 build_pointer_type
653 (char_type_node),
654 tree_cons (NULL_TREE,
655 integer_type_node,
656 endlink))),
657 NULL_TREE, 0, 1, 1, 0);
660 /* Indicate that these never return. */
661 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
662 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
663 TREE_TYPE (raise_nodefer_decl)
664 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
665 TYPE_QUAL_VOLATILE);
667 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0]; i++)
669 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
670 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
671 TREE_TYPE (gnat_raise_decls[i])
672 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
673 TYPE_QUAL_VOLATILE);
676 /* setjmp returns an integer and has one operand, which is a pointer to
677 a jmpbuf. */
678 setjmp_decl
679 = create_subprog_decl
680 (get_identifier ("setjmp"), NULL_TREE,
681 build_function_type (integer_type_node,
682 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
683 NULL_TREE, 0, 1, 1, 0);
685 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
686 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
688 main_identifier_node = get_identifier ("main");
690 ggc_add_tree_root (gnat_std_decls, ARRAY_SIZE (gnat_std_decls));
691 ggc_add_tree_root (gnat_raise_decls, ARRAY_SIZE (gnat_raise_decls));
694 /* This routine is called in tree.c to print an error message for invalid use
695 of an incomplete type. */
697 void
698 incomplete_type_error (dont_care_1, dont_care_2)
699 tree dont_care_1 ATTRIBUTE_UNUSED;
700 tree dont_care_2 ATTRIBUTE_UNUSED;
702 gigi_abort (404);
705 /* This function is called indirectly from toplev.c to handle incomplete
706 declarations, i.e. VAR_DECL nodes whose DECL_SIZE is zero. To be precise,
707 compile_file in toplev.c makes an indirect call through the function pointer
708 incomplete_decl_finalize_hook which is initialized to this routine in
709 init_decl_processing. */
711 void
712 finish_incomplete_decl (dont_care)
713 tree dont_care ATTRIBUTE_UNUSED;
715 gigi_abort (405);
718 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
719 nodes (FIELDLIST), finish constructing the record or union type.
720 If HAS_REP is nonzero, this record has a rep clause; don't call
721 layout_type but merely set the size and alignment ourselves.
722 If DEFER_DEBUG is nonzero, do not call the debugging routines
723 on this type; it will be done later. */
725 void
726 finish_record_type (record_type, fieldlist, has_rep, defer_debug)
727 tree record_type;
728 tree fieldlist;
729 int has_rep;
730 int defer_debug;
732 enum tree_code code = TREE_CODE (record_type);
733 tree ada_size = bitsize_zero_node;
734 tree size = bitsize_zero_node;
735 tree size_unit = size_zero_node;
736 int var_size = 0;
737 tree field;
739 TYPE_FIELDS (record_type) = fieldlist;
741 if (TYPE_NAME (record_type) != 0
742 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
743 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
744 else
745 TYPE_STUB_DECL (record_type)
746 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
747 record_type));
749 /* We don't need both the typedef name and the record name output in
750 the debugging information, since they are the same. */
751 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
753 /* Globally initialize the record first. If this is a rep'ed record,
754 that just means some initializations; otherwise, layout the record. */
756 if (has_rep)
758 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
759 TYPE_MODE (record_type) = BLKmode;
760 if (TYPE_SIZE (record_type) == 0)
762 TYPE_SIZE (record_type) = bitsize_zero_node;
763 TYPE_SIZE_UNIT (record_type) = size_zero_node;
766 else
768 /* Ensure there isn't a size already set. There can be in an error
769 case where there is a rep clause but all fields have errors and
770 no longer have a position. */
771 TYPE_SIZE (record_type) = 0;
772 layout_type (record_type);
775 /* At this point, the position and size of each field is known. It was
776 either set before entry by a rep clause, or by laying out the type
777 above. We now make a pass through the fields (in reverse order for
778 QUAL_UNION_TYPEs) to compute the Ada size; the GCC size and alignment
779 (for rep'ed records that are not padding types); and the mode (for
780 rep'ed records). */
782 if (code == QUAL_UNION_TYPE)
783 fieldlist = nreverse (fieldlist);
785 for (field = fieldlist; field; field = TREE_CHAIN (field))
787 tree type = TREE_TYPE (field);
788 tree this_size = DECL_SIZE (field);
789 tree this_size_unit = DECL_SIZE_UNIT (field);
790 tree this_ada_size = DECL_SIZE (field);
792 /* We need to make an XVE/XVU record if any field has variable size,
793 whether or not the record does. For example, if we have an union,
794 it may be that all fields, rounded up to the alignment, have the
795 same size, in which case we'll use that size. But the debug
796 output routines (except Dwarf2) won't be able to output the fields,
797 so we need to make the special record. */
798 if (TREE_CODE (this_size) != INTEGER_CST)
799 var_size = 1;
801 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
802 || TREE_CODE (type) == QUAL_UNION_TYPE)
803 && ! TYPE_IS_FAT_POINTER_P (type)
804 && ! TYPE_CONTAINS_TEMPLATE_P (type)
805 && TYPE_ADA_SIZE (type) != 0)
806 this_ada_size = TYPE_ADA_SIZE (type);
808 if (has_rep && ! DECL_BIT_FIELD (field))
809 TYPE_ALIGN (record_type)
810 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
812 switch (code)
814 case UNION_TYPE:
815 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
816 size = size_binop (MAX_EXPR, size, this_size);
817 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
818 break;
820 case QUAL_UNION_TYPE:
821 ada_size
822 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
823 this_ada_size, ada_size));
824 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
825 this_size, size));
826 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
827 this_size_unit, size_unit));
828 break;
830 case RECORD_TYPE:
831 /* Since we know here that all fields are sorted in order of
832 increasing bit position, the size of the record is one
833 higher than the ending bit of the last field processed
834 unless we have a rep clause, since in that case we might
835 have a field outside a QUAL_UNION_TYPE that has a higher ending
836 position. So use a MAX in that case. Also, if this field is a
837 QUAL_UNION_TYPE, we need to take into account the previous size in
838 the case of empty variants. */
839 ada_size
840 = merge_sizes (ada_size, bit_position (field), this_ada_size,
841 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
842 size = merge_sizes (size, bit_position (field), this_size,
843 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
844 size_unit
845 = merge_sizes (size_unit, byte_position (field), this_size_unit,
846 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
847 break;
849 default:
850 abort ();
854 if (code == QUAL_UNION_TYPE)
855 nreverse (fieldlist);
857 /* If this is a padding record, we never want to make the size smaller than
858 what was specified in it, if any. */
859 if (TREE_CODE (record_type) == RECORD_TYPE
860 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
862 size = TYPE_SIZE (record_type);
863 size_unit = TYPE_SIZE_UNIT (record_type);
866 /* Now set any of the values we've just computed that apply. */
867 if (! TYPE_IS_FAT_POINTER_P (record_type)
868 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
869 TYPE_ADA_SIZE (record_type) = ada_size;
871 #ifdef ROUND_TYPE_SIZE
872 size = ROUND_TYPE_SIZE (record_type, size, TYPE_ALIGN (record_type));
873 size_unit = ROUND_TYPE_SIZE_UNIT (record_size, size_unit,
874 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
875 #else
876 size = round_up (size, TYPE_ALIGN (record_type));
877 size_unit = round_up (size_unit, TYPE_ALIGN (record_type) / BITS_PER_UNIT);
878 #endif
880 if (has_rep
881 && ! (TREE_CODE (record_type) == RECORD_TYPE
882 && TYPE_IS_PADDING_P (record_type)
883 && TREE_CODE (size) != INTEGER_CST
884 && contains_placeholder_p (size)))
886 TYPE_SIZE (record_type) = size;
887 TYPE_SIZE_UNIT (record_type) = size_unit;
890 if (has_rep)
891 compute_record_mode (record_type);
893 if (! defer_debug)
895 /* If this record is of variable size, rename it so that the
896 debugger knows it is and make a new, parallel, record
897 that tells the debugger how the record is laid out. See
898 exp_dbug.ads. */
899 if (var_size)
901 tree new_record_type
902 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
903 ? UNION_TYPE : TREE_CODE (record_type));
904 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
905 tree new_id
906 = concat_id_with_name (orig_id,
907 TREE_CODE (record_type) == QUAL_UNION_TYPE
908 ? "XVU" : "XVE");
909 tree last_pos = bitsize_zero_node;
910 tree old_field;
912 TYPE_NAME (new_record_type) = new_id;
913 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
914 TYPE_STUB_DECL (new_record_type)
915 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
916 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
917 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
918 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
919 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
921 /* Now scan all the fields, replacing each field with a new
922 field corresponding to the new encoding. */
923 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
924 old_field = TREE_CHAIN (old_field))
926 tree field_type = TREE_TYPE (old_field);
927 tree field_name = DECL_NAME (old_field);
928 tree new_field;
929 tree curpos = bit_position (old_field);
930 int var = 0;
931 unsigned int align = 0;
932 tree pos;
934 /* See how the position was modified from the last position.
936 There are two basic cases we support: a value was added
937 to the last position or the last position was rounded to
938 a boundary and they something was added. Check for the
939 first case first. If not, see if there is any evidence
940 of rounding. If so, round the last position and try
941 again.
943 If this is a union, the position can be taken as zero. */
945 if (TREE_CODE (new_record_type) == UNION_TYPE)
946 pos = bitsize_zero_node, align = 0;
947 else
948 pos = compute_related_constant (curpos, last_pos);
950 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
951 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
953 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
954 pos = compute_related_constant (curpos,
955 round_up (last_pos, align));
957 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
958 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
959 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
960 && host_integerp (TREE_OPERAND
961 (TREE_OPERAND (curpos, 0), 1),
964 align
965 = tree_low_cst
966 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
967 pos = compute_related_constant (curpos,
968 round_up (last_pos, align));
971 /* If we can't compute a position, set it to zero.
973 ??? We really should abort here, but it's too much work
974 to get this correct for all cases. */
976 if (pos == 0)
977 pos = bitsize_zero_node;
979 /* See if this type is variable-size and make a new type
980 and indicate the indirection if so. */
981 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
983 field_type = build_pointer_type (field_type);
984 var = 1;
987 /* Make a new field name, if necessary. */
988 if (var || align != 0)
990 char suffix[6];
992 if (align != 0)
993 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
994 align / BITS_PER_UNIT);
995 else
996 strcpy (suffix, "XVL");
998 field_name = concat_id_with_name (field_name, suffix);
1001 new_field = create_field_decl (field_name, field_type,
1002 new_record_type, 0,
1003 DECL_SIZE (old_field), pos, 0);
1004 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1005 TYPE_FIELDS (new_record_type) = new_field;
1007 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1008 zero. The only time it's not the last field of the record
1009 is when there are other components at fixed positions after
1010 it (meaning there was a rep clause for every field) and we
1011 want to be able to encode them. */
1012 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1013 (TREE_CODE (TREE_TYPE (old_field))
1014 == QUAL_UNION_TYPE)
1015 ? bitsize_zero_node
1016 : DECL_SIZE (old_field));
1019 TYPE_FIELDS (new_record_type)
1020 = nreverse (TYPE_FIELDS (new_record_type));
1022 rest_of_type_compilation (new_record_type, global_bindings_p ());
1025 rest_of_type_compilation (record_type, global_bindings_p ());
1029 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1030 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1031 if this represents a QUAL_UNION_TYPE in which case we must look for
1032 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1033 is nonzero, we must take the MAX of the end position of this field
1034 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1036 We return an expression for the size. */
1038 static tree
1039 merge_sizes (last_size, first_bit, size, special, has_rep)
1040 tree last_size;
1041 tree first_bit, size;
1042 int special;
1043 int has_rep;
1045 tree type = TREE_TYPE (last_size);
1047 if (! special || TREE_CODE (size) != COND_EXPR)
1049 tree new = size_binop (PLUS_EXPR, first_bit, size);
1051 if (has_rep)
1052 new = size_binop (MAX_EXPR, last_size, new);
1054 return new;
1057 return fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1058 integer_zerop (TREE_OPERAND (size, 1))
1059 ? last_size : merge_sizes (last_size, first_bit,
1060 TREE_OPERAND (size, 1),
1061 1, has_rep),
1062 integer_zerop (TREE_OPERAND (size, 2))
1063 ? last_size : merge_sizes (last_size, first_bit,
1064 TREE_OPERAND (size, 2),
1065 1, has_rep)));
1068 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1069 related by the addition of a constant. Return that constant if so. */
1071 static tree
1072 compute_related_constant (op0, op1)
1073 tree op0, op1;
1075 tree op0_var, op1_var;
1076 tree op0_con = split_plus (op0, &op0_var);
1077 tree op1_con = split_plus (op1, &op1_var);
1078 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1080 if (operand_equal_p (op0_var, op1_var, 0))
1081 return result;
1082 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1083 return result;
1084 else
1085 return 0;
1088 /* Utility function of above to split a tree OP which may be a sum, into a
1089 constant part, which is returned, and a variable part, which is stored
1090 in *PVAR. *PVAR may be size_zero_node. All operations must be of
1091 sizetype. */
1093 static tree
1094 split_plus (in, pvar)
1095 tree in;
1096 tree *pvar;
1098 tree result = bitsize_zero_node;
1100 while (TREE_CODE (in) == NON_LVALUE_EXPR)
1101 in = TREE_OPERAND (in, 0);
1103 *pvar = in;
1104 if (TREE_CODE (in) == INTEGER_CST)
1106 *pvar = bitsize_zero_node;
1107 return in;
1109 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1111 tree lhs_var, rhs_var;
1112 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1113 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1115 result = size_binop (PLUS_EXPR, result, lhs_con);
1116 result = size_binop (TREE_CODE (in), result, rhs_con);
1118 if (lhs_var == TREE_OPERAND (in, 0)
1119 && rhs_var == TREE_OPERAND (in, 1))
1120 return bitsize_zero_node;
1122 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1123 return result;
1125 else
1126 return bitsize_zero_node;
1129 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1130 subprogram. If it is void_type_node, then we are dealing with a procedure,
1131 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1132 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1133 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1134 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1135 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1136 RETURNS_WITH_DSP is nonzero if the function is to return with a
1137 depressed stack pointer. */
1139 tree
1140 create_subprog_type (return_type, param_decl_list, cico_list,
1141 returns_unconstrained, returns_by_ref, returns_with_dsp)
1142 tree return_type;
1143 tree param_decl_list;
1144 tree cico_list;
1145 int returns_unconstrained, returns_by_ref, returns_with_dsp;
1147 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1148 the subprogram formal parameters. This list is generated by traversing the
1149 input list of PARM_DECL nodes. */
1150 tree param_type_list = NULL;
1151 tree param_decl;
1152 tree type;
1154 for (param_decl = param_decl_list; param_decl;
1155 param_decl = TREE_CHAIN (param_decl))
1156 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1157 param_type_list);
1159 /* The list of the function parameter types has to be terminated by the void
1160 type to signal to the back-end that we are not dealing with a variable
1161 parameter subprogram, but that the subprogram has a fixed number of
1162 parameters. */
1163 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1165 /* The list of argument types has been created in reverse
1166 so nreverse it. */
1167 param_type_list = nreverse (param_type_list);
1169 type = build_function_type (return_type, param_type_list);
1171 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1172 or the new type should, make a copy of TYPE. Likewise for
1173 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1174 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1175 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1176 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1177 type = copy_type (type);
1179 TYPE_CI_CO_LIST (type) = cico_list;
1180 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1181 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1182 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1183 return type;
1186 /* Return a copy of TYPE but safe to modify in any way. */
1188 tree
1189 copy_type (type)
1190 tree type;
1192 tree new = copy_node (type);
1194 /* copy_node clears this field instead of copying it, because it is
1195 aliased with TREE_CHAIN. */
1196 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1198 TYPE_POINTER_TO (new) = 0;
1199 TYPE_REFERENCE_TO (new) = 0;
1200 TYPE_MAIN_VARIANT (new) = new;
1201 TYPE_NEXT_VARIANT (new) = 0;
1203 return new;
1206 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1207 TYPE_INDEX_TYPE is INDEX. */
1209 tree
1210 create_index_type (min, max, index)
1211 tree min, max;
1212 tree index;
1214 /* First build a type for the desired range. */
1215 tree type = build_index_2_type (min, max);
1217 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1218 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1219 is set, but not to INDEX, make a copy of this type with the requested
1220 index type. Note that we have no way of sharing these types, but that's
1221 only a small hole. */
1222 if (TYPE_INDEX_TYPE (type) == index)
1223 return type;
1224 else if (TYPE_INDEX_TYPE (type) != 0)
1225 type = copy_type (type);
1227 TYPE_INDEX_TYPE (type) = index;
1228 return type;
1231 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1232 string) and TYPE is a ..._TYPE node giving its data type.
1233 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1234 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1235 information about this type. */
1237 tree
1238 create_type_decl (type_name, type, attr_list, artificial_p, debug_info_p)
1239 tree type_name;
1240 tree type;
1241 struct attrib *attr_list;
1242 int artificial_p;
1243 int debug_info_p;
1245 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1246 enum tree_code code = TREE_CODE (type);
1248 DECL_ARTIFICIAL (type_decl) = artificial_p;
1249 pushdecl (type_decl);
1250 process_attributes (type_decl, attr_list);
1252 /* Pass type declaration information to the debugger unless this is an
1253 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1254 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1255 a dummy type, which will be completed later, or a type for which
1256 debugging information was not requested. */
1257 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1258 || ! debug_info_p)
1259 DECL_IGNORED_P (type_decl) = 1;
1260 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1261 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1262 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1263 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1265 return type_decl;
1268 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1269 ASM_NAME is its assembler name (if provided). TYPE is its data type
1270 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1271 expression; NULL_TREE if none.
1273 CONST_FLAG is nonzero if this variable is constant.
1275 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1276 the current compilation unit. This flag should be set when processing the
1277 variable definitions in a package specification. EXTERN_FLAG is nonzero
1278 when processing an external variable declaration (as opposed to a
1279 definition: no storage is to be allocated for the variable here).
1281 STATIC_FLAG is only relevant when not at top level. In that case
1282 it indicates whether to always allocate storage to the variable. */
1284 tree
1285 create_var_decl (var_name, asm_name, type, var_init, const_flag, public_flag,
1286 extern_flag, static_flag, attr_list)
1287 tree var_name;
1288 tree asm_name;
1289 tree type;
1290 tree var_init;
1291 int const_flag;
1292 int public_flag;
1293 int extern_flag;
1294 int static_flag;
1295 struct attrib *attr_list;
1297 int init_const
1298 = (var_init == 0
1300 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1301 && (global_bindings_p () || static_flag
1302 ? 0 != initializer_constant_valid_p (var_init,
1303 TREE_TYPE (var_init))
1304 : TREE_CONSTANT (var_init))));
1305 tree var_decl
1306 = build_decl ((const_flag && init_const
1307 /* Only make a CONST_DECL for sufficiently-small objects.
1308 We consider complex double "sufficiently-small" */
1309 && TYPE_SIZE (type) != 0
1310 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1311 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1312 GET_MODE_SIZE (DCmode)))
1313 ? CONST_DECL : VAR_DECL, var_name, type);
1314 tree assign_init = 0;
1316 /* If this is external, throw away any initializations unless this is a
1317 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1318 we are defining a global here, leave a constant initialization and save
1319 any variable elaborations for the elaboration routine. Otherwise, if
1320 the initializing expression is not the same as TYPE, generate the
1321 initialization with an assignment statement, since it knows how
1322 to do the required adjustents. If we are just annotating types,
1323 throw away the initialization if it isn't a constant. */
1325 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1326 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1327 var_init = 0;
1329 if (global_bindings_p () && var_init != 0 && ! init_const)
1331 add_pending_elaborations (var_decl, var_init);
1332 var_init = 0;
1335 else if (var_init != 0
1336 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1337 != TYPE_MAIN_VARIANT (type))
1338 || (static_flag && ! init_const)))
1339 assign_init = var_init, var_init = 0;
1341 DECL_COMMON (var_decl) = !flag_no_common;
1342 DECL_INITIAL (var_decl) = var_init;
1343 TREE_READONLY (var_decl) = const_flag;
1344 DECL_EXTERNAL (var_decl) = extern_flag;
1345 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1346 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1347 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1348 = TYPE_VOLATILE (type);
1350 /* At the global binding level we need to allocate static storage for the
1351 variable if and only if its not external. If we are not at the top level
1352 we allocate automatic storage unless requested not to. */
1353 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1355 if (asm_name != 0)
1356 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1358 process_attributes (var_decl, attr_list);
1360 /* Add this decl to the current binding level and generate any
1361 needed code and RTL. */
1362 var_decl = pushdecl (var_decl);
1363 expand_decl (var_decl);
1365 if (DECL_CONTEXT (var_decl) != 0)
1366 expand_decl_init (var_decl);
1368 /* If this is volatile, force it into memory. */
1369 if (TREE_SIDE_EFFECTS (var_decl))
1370 mark_addressable (var_decl);
1372 if (TREE_CODE (var_decl) != CONST_DECL)
1373 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1375 if (assign_init != 0)
1377 /* If VAR_DECL has a padded type, convert it to the unpadded
1378 type so the assignment is done properly. */
1379 tree lhs = var_decl;
1381 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1382 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1383 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1385 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1386 assign_init));
1389 return var_decl;
1392 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1393 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1394 this field is in a record type with a "pragma pack". If SIZE is nonzero
1395 it is the specified size for this field. If POS is nonzero, it is the bit
1396 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1397 the address of this field for aliasing purposes. */
1399 tree
1400 create_field_decl (field_name, field_type, record_type, packed, size, pos,
1401 addressable)
1402 tree field_name;
1403 tree field_type;
1404 tree record_type;
1405 int packed;
1406 tree size, pos;
1407 int addressable;
1409 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1411 DECL_CONTEXT (field_decl) = record_type;
1412 TREE_READONLY (field_decl) = TREE_READONLY (field_type);
1414 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1415 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1416 If it is a padding type where the inner field is of variable size, it
1417 must be at its natural alignment. Just handle the packed case here; we
1418 will disallow non-aligned rep clauses elsewhere. */
1419 if (packed && TYPE_MODE (field_type) == BLKmode)
1420 DECL_ALIGN (field_decl)
1421 = ((TREE_CODE (field_type) == RECORD_TYPE
1422 && TYPE_IS_PADDING_P (field_type)
1423 && ! TREE_CONSTANT (DECL_SIZE (TYPE_FIELDS (field_type))))
1424 ? TYPE_ALIGN (field_type) : BITS_PER_UNIT);
1426 /* If a size is specified, use it. Otherwise, see if we have a size
1427 to use that may differ from the natural size of the object. */
1428 if (size != 0)
1429 size = convert (bitsizetype, size);
1430 else if (packed)
1432 if (packed == 1 && ! operand_equal_p (rm_size (field_type),
1433 TYPE_SIZE (field_type), 0))
1434 size = rm_size (field_type);
1436 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1437 byte. */
1438 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1439 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1440 size = round_up (size, BITS_PER_UNIT);
1443 /* Make a bitfield if a size is specified for two reasons: first if the size
1444 differs from the natural size. Second, if the alignment is insufficient.
1445 There are a number of ways the latter can be true. But never make a
1446 bitfield if the type of the field has a nonconstant size. */
1448 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1449 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1450 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1451 || (pos != 0
1452 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1453 bitsize_int (TYPE_ALIGN
1454 (field_type)))))
1455 || packed
1456 || (TYPE_ALIGN (record_type) != 0
1457 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1459 DECL_BIT_FIELD (field_decl) = 1;
1460 DECL_SIZE (field_decl) = size;
1461 if (! packed && pos == 0)
1462 DECL_ALIGN (field_decl)
1463 = (TYPE_ALIGN (record_type) != 0
1464 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1465 : TYPE_ALIGN (field_type));
1468 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1469 DECL_ALIGN (field_decl)
1470 = MAX (DECL_ALIGN (field_decl),
1471 DECL_BIT_FIELD (field_decl) ? 1
1472 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1473 : TYPE_ALIGN (field_type));
1475 if (pos != 0)
1477 /* We need to pass in the alignment the DECL is known to have.
1478 This is the lowest-order bit set in POS, but no more than
1479 the alignment of the record, if one is specified. Note
1480 that an alignment of 0 is taken as infinite. */
1481 unsigned int known_align;
1483 if (host_integerp (pos, 1))
1484 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1485 else
1486 known_align = BITS_PER_UNIT;
1488 if (TYPE_ALIGN (record_type)
1489 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1490 known_align = TYPE_ALIGN (record_type);
1492 layout_decl (field_decl, known_align);
1493 SET_DECL_OFFSET_ALIGN (field_decl,
1494 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1495 : BITS_PER_UNIT);
1496 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1497 &DECL_FIELD_BIT_OFFSET (field_decl),
1498 DECL_OFFSET_ALIGN (field_decl), pos);
1500 DECL_HAS_REP_P (field_decl) = 1;
1503 /* If the field type is passed by reference, we will have pointers to the
1504 field, so it is addressable. */
1505 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1506 addressable = 1;
1508 /* Mark the decl as nonaddressable if it either is indicated so semantically
1509 or if it is a bit field. */
1510 DECL_NONADDRESSABLE_P (field_decl)
1511 = ! addressable || DECL_BIT_FIELD (field_decl);
1513 return field_decl;
1516 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1517 effects, has the value of zero. */
1519 static int
1520 value_zerop (exp)
1521 tree exp;
1523 if (TREE_CODE (exp) == COMPOUND_EXPR)
1524 return value_zerop (TREE_OPERAND (exp, 1));
1526 return integer_zerop (exp);
1529 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1530 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1531 readonly (either an IN parameter or an address of a pass-by-ref
1532 parameter). */
1534 tree
1535 create_param_decl (param_name, param_type, readonly)
1536 tree param_name;
1537 tree param_type;
1538 int readonly;
1540 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1542 DECL_ARG_TYPE (param_decl) = param_type;
1543 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1544 TREE_READONLY (param_decl) = readonly;
1545 return param_decl;
1548 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1550 void
1551 process_attributes (decl, attr_list)
1552 tree decl;
1553 struct attrib *attr_list;
1555 for (; attr_list; attr_list = attr_list->next)
1556 switch (attr_list->type)
1558 case ATTR_MACHINE_ATTRIBUTE:
1559 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1560 NULL_TREE),
1561 ATTR_FLAG_TYPE_IN_PLACE);
1562 break;
1564 case ATTR_LINK_ALIAS:
1565 TREE_STATIC (decl) = 1;
1566 assemble_alias (decl, attr_list->name);
1567 break;
1569 case ATTR_WEAK_EXTERNAL:
1570 if (SUPPORTS_WEAK)
1571 declare_weak (decl);
1572 else
1573 post_error ("?weak declarations not supported on this target",
1574 attr_list->error_point);
1575 break;
1577 case ATTR_LINK_SECTION:
1578 #ifdef ASM_OUTPUT_SECTION_NAME
1579 DECL_SECTION_NAME (decl)
1580 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1581 IDENTIFIER_POINTER (attr_list->name));
1582 DECL_COMMON (decl) = 0;
1583 #else
1584 post_error ("?section attributes are not supported for this target",
1585 attr_list->error_point);
1586 #endif
1587 break;
1591 /* Add some pending elaborations on the list. */
1593 void
1594 add_pending_elaborations (var_decl, var_init)
1595 tree var_decl;
1596 tree var_init;
1598 if (var_init != 0)
1599 Check_Elaboration_Code_Allowed (error_gnat_node);
1601 pending_elaborations
1602 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1605 /* Obtain any pending elaborations and clear the old list. */
1607 tree
1608 get_pending_elaborations ()
1610 /* Each thing added to the list went on the end; we want it on the
1611 beginning. */
1612 tree result = TREE_CHAIN (pending_elaborations);
1614 TREE_CHAIN (pending_elaborations) = 0;
1615 return result;
1618 /* Mark the binding level stack. */
1620 static void
1621 mark_binding_level (arg)
1622 PTR arg;
1624 struct binding_level *level = *(struct binding_level **) arg;
1626 for (; level != 0; level = level->level_chain)
1628 ggc_mark_tree (level->names);
1629 ggc_mark_tree (level->blocks);
1630 ggc_mark_tree (level->this_block);
1634 /* Mark the pending elaboration list. */
1636 static void
1637 mark_e_stack (data)
1638 PTR data;
1640 struct e_stack *p = *((struct e_stack **) data);
1642 if (p != 0)
1644 ggc_mark_tree (p->elab_list);
1645 mark_e_stack (&p->next);
1649 /* Return nonzero if there are pending elaborations. */
1652 pending_elaborations_p ()
1654 return TREE_CHAIN (pending_elaborations) != 0;
1657 /* Save a copy of the current pending elaboration list and make a new
1658 one. */
1660 void
1661 push_pending_elaborations ()
1663 struct e_stack *p = (struct e_stack *) xmalloc (sizeof (struct e_stack));
1665 p->next = elist_stack;
1666 p->elab_list = pending_elaborations;
1667 elist_stack = p;
1668 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1671 /* Pop the stack of pending elaborations. */
1673 void
1674 pop_pending_elaborations ()
1676 struct e_stack *p = elist_stack;
1678 pending_elaborations = p->elab_list;
1679 elist_stack = p->next;
1680 free (p);
1683 /* Return the current position in pending_elaborations so we can insert
1684 elaborations after that point. */
1686 tree
1687 get_elaboration_location ()
1689 return tree_last (pending_elaborations);
1692 /* Insert the current elaborations after ELAB, which is in some elaboration
1693 list. */
1695 void
1696 insert_elaboration_list (elab)
1697 tree elab;
1699 tree next = TREE_CHAIN (elab);
1701 if (TREE_CHAIN (pending_elaborations))
1703 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1704 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1705 TREE_CHAIN (pending_elaborations) = 0;
1709 /* Returns a LABEL_DECL node for LABEL_NAME. */
1711 tree
1712 create_label_decl (label_name)
1713 tree label_name;
1715 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1717 DECL_CONTEXT (label_decl) = current_function_decl;
1718 DECL_MODE (label_decl) = VOIDmode;
1719 DECL_SOURCE_LINE (label_decl) = lineno;
1720 DECL_SOURCE_FILE (label_decl) = input_filename;
1722 return label_decl;
1725 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1726 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1727 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1728 PARM_DECL nodes chained through the TREE_CHAIN field).
1730 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1731 appropriate fields in the FUNCTION_DECL. */
1733 tree
1734 create_subprog_decl (subprog_name, asm_name, subprog_type, param_decl_list,
1735 inline_flag, public_flag, extern_flag, attr_list)
1736 tree subprog_name;
1737 tree asm_name;
1738 tree subprog_type;
1739 tree param_decl_list;
1740 int inline_flag;
1741 int public_flag;
1742 int extern_flag;
1743 struct attrib *attr_list;
1745 tree return_type = TREE_TYPE (subprog_type);
1746 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1748 /* If this is a function nested inside an inlined external function, it
1749 means we aren't going to compile the outer function unless it is
1750 actually inlined, so do the same for us. */
1751 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1752 && DECL_EXTERNAL (current_function_decl))
1753 extern_flag = 1;
1755 DECL_EXTERNAL (subprog_decl) = extern_flag;
1756 TREE_PUBLIC (subprog_decl) = public_flag;
1757 DECL_INLINE (subprog_decl) = inline_flag;
1758 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1759 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1760 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1761 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1762 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1764 if (asm_name != 0)
1765 DECL_ASSEMBLER_NAME (subprog_decl) = asm_name;
1767 process_attributes (subprog_decl, attr_list);
1769 /* Add this decl to the current binding level. */
1770 subprog_decl = pushdecl (subprog_decl);
1772 /* Output the assembler code and/or RTL for the declaration. */
1773 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1775 return subprog_decl;
1778 /* Count how deep we are into nested functions. This is because
1779 we shouldn't call the backend function context routines unless we
1780 are in a nested function. */
1782 static int function_nesting_depth;
1784 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1785 body. This routine needs to be invoked before processing the declarations
1786 appearing in the subprogram. */
1788 void
1789 begin_subprog_body (subprog_decl)
1790 tree subprog_decl;
1792 tree param_decl_list;
1793 tree param_decl;
1794 tree next_param;
1796 if (function_nesting_depth++ != 0)
1797 push_function_context ();
1799 announce_function (subprog_decl);
1801 /* Make this field nonzero so further routines know that this is not
1802 tentative. error_mark_node is replaced below (in poplevel) with the
1803 adequate BLOCK. */
1804 DECL_INITIAL (subprog_decl) = error_mark_node;
1806 /* This function exists in static storage. This does not mean `static' in
1807 the C sense! */
1808 TREE_STATIC (subprog_decl) = 1;
1810 /* Enter a new binding level. */
1811 current_function_decl = subprog_decl;
1812 pushlevel (0);
1814 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1815 subprogram body) so that they can be recognized as local variables in the
1816 subprogram.
1818 The list of PARM_DECL nodes is stored in the right order in
1819 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1820 which they are transmitted to `pushdecl' we need to reverse the list of
1821 PARM_DECLs if we want it to be stored in the right order. The reason why
1822 we want to make sure the PARM_DECLs are stored in the correct order is
1823 that this list will be retrieved in a few lines with a call to `getdecl'
1824 to store it back into the DECL_ARGUMENTS field. */
1825 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1827 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1829 next_param = TREE_CHAIN (param_decl);
1830 TREE_CHAIN (param_decl) = NULL;
1831 pushdecl (param_decl);
1834 /* Store back the PARM_DECL nodes. They appear in the right order. */
1835 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1837 init_function_start (subprog_decl, input_filename, lineno);
1838 expand_function_start (subprog_decl, 0);
1840 /* If this function is `main', emit a call to `__main'
1841 to run global initializers, etc. */
1842 if (DECL_ASSEMBLER_NAME (subprog_decl) != 0
1843 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl))
1844 && DECL_CONTEXT (subprog_decl) == NULL_TREE)
1845 expand_main_function ();
1848 /* Finish the definition of the current subprogram and compile it all the way
1849 to assembler language output. */
1851 void
1852 end_subprog_body ()
1854 tree decl;
1855 tree cico_list;
1857 poplevel (1, 0, 1);
1858 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1859 = current_function_decl;
1861 /* Mark the RESULT_DECL as being in this subprogram. */
1862 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1864 expand_function_end (input_filename, lineno, 0);
1866 /* If this is a nested function, push a new GC context. That will keep
1867 local variables on the stack from being collected while we're doing
1868 the compilation of this function. */
1869 if (function_nesting_depth > 1)
1870 ggc_push_context ();
1872 rest_of_compilation (current_function_decl);
1874 if (function_nesting_depth > 1)
1875 ggc_pop_context ();
1877 #if 0
1878 /* If we're sure this function is defined in this file then mark it
1879 as such */
1880 if (TREE_ASM_WRITTEN (current_function_decl))
1881 mark_fn_defined_in_this_file (current_function_decl);
1882 #endif
1884 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1885 not be seen when we call this function and will be in
1886 unallocated memory anyway. */
1887 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1888 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1889 TREE_VALUE (cico_list) = 0;
1891 if (DECL_SAVED_INSNS (current_function_decl) == 0)
1893 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1894 was saved for inline, in which case the DECL_RTLs are in
1895 preserved memory. */
1896 for (decl = DECL_ARGUMENTS (current_function_decl);
1897 decl != 0; decl = TREE_CHAIN (decl))
1899 SET_DECL_RTL (decl, 0);
1900 DECL_INCOMING_RTL (decl) = 0;
1903 /* Similarly, discard DECL_RTL of the return value. */
1904 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1906 /* But DECL_INITIAL must remain nonzero so we know this
1907 was an actual function definition unless toplev.c decided not
1908 to inline it. */
1909 if (DECL_INITIAL (current_function_decl) != 0)
1910 DECL_INITIAL (current_function_decl) = error_mark_node;
1912 DECL_ARGUMENTS (current_function_decl) = 0;
1915 /* If we are not at the bottom of the function nesting stack, pop up to
1916 the containing function. Otherwise show we aren't in any function. */
1917 if (--function_nesting_depth != 0)
1918 pop_function_context ();
1919 else
1920 current_function_decl = 0;
1923 /* Return a definition for a builtin function named NAME and whose data type
1924 is TYPE. TYPE should be a function type with argument types.
1925 FUNCTION_CODE tells later passes how to compile calls to this function.
1926 See tree.h for its possible values.
1928 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1929 the name to be called if we can't opencode the function. */
1931 tree
1932 builtin_function (name, type, function_code, class, library_name)
1933 const char *name;
1934 tree type;
1935 int function_code;
1936 enum built_in_class class;
1937 const char *library_name;
1939 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1941 DECL_EXTERNAL (decl) = 1;
1942 TREE_PUBLIC (decl) = 1;
1943 if (library_name)
1944 DECL_ASSEMBLER_NAME (decl) = get_identifier (library_name);
1946 pushdecl (decl);
1947 DECL_BUILT_IN_CLASS (decl) = class;
1948 DECL_FUNCTION_CODE (decl) = function_code;
1949 return decl;
1952 /* Return an integer type with the number of bits of precision given by
1953 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1954 it is a signed type. */
1956 tree
1957 type_for_size (precision, unsignedp)
1958 unsigned precision;
1959 int unsignedp;
1961 tree t;
1962 char type_name[20];
1964 if (precision <= 2 * MAX_BITS_PER_WORD
1965 && signed_and_unsigned_types[precision][unsignedp] != 0)
1966 return signed_and_unsigned_types[precision][unsignedp];
1968 if (unsignedp)
1969 t = make_unsigned_type (precision);
1970 else
1971 t = make_signed_type (precision);
1973 if (precision <= 2 * MAX_BITS_PER_WORD)
1974 signed_and_unsigned_types[precision][unsignedp] = t;
1976 if (TYPE_NAME (t) == 0)
1978 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1979 TYPE_NAME (t) = get_identifier (type_name);
1982 return t;
1985 /* Likewise for floating-point types. */
1987 static tree
1988 float_type_for_size (precision, mode)
1989 int precision;
1990 enum machine_mode mode;
1992 tree t;
1993 char type_name[20];
1995 if (float_types[(int) mode] != 0)
1996 return float_types[(int) mode];
1998 float_types[(int) mode] = t = make_node (REAL_TYPE);
1999 TYPE_PRECISION (t) = precision;
2000 layout_type (t);
2002 if (TYPE_MODE (t) != mode)
2003 gigi_abort (414);
2005 if (TYPE_NAME (t) == 0)
2007 sprintf (type_name, "FLOAT_%d", precision);
2008 TYPE_NAME (t) = get_identifier (type_name);
2011 return t;
2014 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2015 an unsigned type; otherwise a signed type is returned. */
2017 tree
2018 type_for_mode (mode, unsignedp)
2019 enum machine_mode mode;
2020 int unsignedp;
2022 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2023 return float_type_for_size (GET_MODE_BITSIZE (mode), mode);
2024 else
2025 return type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2028 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2030 tree
2031 unsigned_type (type_node)
2032 tree type_node;
2034 tree type = type_for_size (TYPE_PRECISION (type_node), 1);
2036 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2038 type = copy_node (type);
2039 TREE_TYPE (type) = type_node;
2041 else if (TREE_TYPE (type_node) != 0
2042 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2043 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2045 type = copy_node (type);
2046 TREE_TYPE (type) = TREE_TYPE (type_node);
2049 return type;
2052 /* Return the signed version of a TYPE_NODE, a scalar type. */
2054 tree
2055 signed_type (type_node)
2056 tree type_node;
2058 tree type = type_for_size (TYPE_PRECISION (type_node), 0);
2060 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2062 type = copy_node (type);
2063 TREE_TYPE (type) = type_node;
2065 else if (TREE_TYPE (type_node) != 0
2066 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2067 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2069 type = copy_node (type);
2070 TREE_TYPE (type) = TREE_TYPE (type_node);
2073 return type;
2076 /* Return a type the same as TYPE except unsigned or signed according to
2077 UNSIGNEDP. */
2079 tree
2080 signed_or_unsigned_type (unsignedp, type)
2081 int unsignedp;
2082 tree type;
2084 if (! INTEGRAL_TYPE_P (type) || TREE_UNSIGNED (type) == unsignedp)
2085 return type;
2086 else
2087 return type_for_size (TYPE_PRECISION (type), unsignedp);
2090 /* EXP is an expression for the size of an object. If this size contains
2091 discriminant references, replace them with the maximum (if MAX_P) or
2092 minimum (if ! MAX_P) possible value of the discriminant. */
2094 tree
2095 max_size (exp, max_p)
2096 tree exp;
2097 int max_p;
2099 enum tree_code code = TREE_CODE (exp);
2100 tree type = TREE_TYPE (exp);
2102 switch (TREE_CODE_CLASS (code))
2104 case 'd':
2105 case 'c':
2106 return exp;
2108 case 'x':
2109 if (code == TREE_LIST)
2110 return tree_cons (TREE_PURPOSE (exp),
2111 max_size (TREE_VALUE (exp), max_p),
2112 TREE_CHAIN (exp) != 0
2113 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2114 break;
2116 case 'r':
2117 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2118 modify. Otherwise, we abort since it is something we can't
2119 handle. */
2120 if (! contains_placeholder_p (exp))
2121 gigi_abort (406);
2123 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2124 return
2125 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2127 case '<':
2128 return max_p ? size_one_node : size_zero_node;
2130 case '1':
2131 case '2':
2132 case 'e':
2133 switch (TREE_CODE_LENGTH (code))
2135 case 1:
2136 if (code == NON_LVALUE_EXPR)
2137 return max_size (TREE_OPERAND (exp, 0), max_p);
2138 else
2139 return
2140 fold (build1 (code, type,
2141 max_size (TREE_OPERAND (exp, 0),
2142 code == NEGATE_EXPR ? ! max_p : max_p)));
2144 case 2:
2145 if (code == RTL_EXPR)
2146 gigi_abort (407);
2147 else if (code == COMPOUND_EXPR)
2148 return max_size (TREE_OPERAND (exp, 1), max_p);
2149 else if (code == WITH_RECORD_EXPR)
2150 return exp;
2153 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2154 tree rhs = max_size (TREE_OPERAND (exp, 1),
2155 code == MINUS_EXPR ? ! max_p : max_p);
2157 /* Special-case wanting the maximum value of a MIN_EXPR.
2158 In that case, if one side overflows, return the other.
2159 sizetype is signed, but we know sizes are non-negative.
2160 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2161 overflowing or the maximum possible value and the RHS
2162 a variable. */
2163 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2164 return lhs;
2165 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2166 return rhs;
2167 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2168 && (TREE_OVERFLOW (lhs)
2169 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2170 && ! TREE_CONSTANT (rhs))
2171 return lhs;
2172 else
2173 return fold (build (code, type, lhs, rhs));
2176 case 3:
2177 if (code == SAVE_EXPR)
2178 return exp;
2179 else if (code == COND_EXPR)
2180 return fold (build (MAX_EXPR, type,
2181 max_size (TREE_OPERAND (exp, 1), max_p),
2182 max_size (TREE_OPERAND (exp, 2), max_p)));
2183 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2184 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2185 max_size (TREE_OPERAND (exp, 1), max_p));
2189 gigi_abort (408);
2192 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2193 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2194 Return a constructor for the template. */
2196 tree
2197 build_template (template_type, array_type, expr)
2198 tree template_type;
2199 tree array_type;
2200 tree expr;
2202 tree template_elts = NULL_TREE;
2203 tree bound_list = NULL_TREE;
2204 tree field;
2206 if (TREE_CODE (array_type) == RECORD_TYPE
2207 && (TYPE_IS_PADDING_P (array_type)
2208 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2209 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2211 if (TREE_CODE (array_type) == ARRAY_TYPE
2212 || (TREE_CODE (array_type) == INTEGER_TYPE
2213 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2214 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2216 /* First make the list for a CONSTRUCTOR for the template. Go down the
2217 field list of the template instead of the type chain because this
2218 array might be an Ada array of arrays and we can't tell where the
2219 nested arrays stop being the underlying object. */
2221 for (field = TYPE_FIELDS (template_type); field;
2222 (bound_list != 0
2223 ? (bound_list = TREE_CHAIN (bound_list))
2224 : (array_type = TREE_TYPE (array_type))),
2225 field = TREE_CHAIN (TREE_CHAIN (field)))
2227 tree bounds, min, max;
2229 /* If we have a bound list, get the bounds from there. Likewise
2230 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2231 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2232 This will give us a maximum range. */
2233 if (bound_list != 0)
2234 bounds = TREE_VALUE (bound_list);
2235 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2236 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2237 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2238 && DECL_BY_COMPONENT_PTR_P (expr))
2239 bounds = TREE_TYPE (field);
2240 else
2241 gigi_abort (411);
2243 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2244 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2246 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2247 surround them with a WITH_RECORD_EXPR giving EXPR as the
2248 OBJECT. */
2249 if (! TREE_CONSTANT (min) && contains_placeholder_p (min))
2250 min = build (WITH_RECORD_EXPR, TREE_TYPE (min), min, expr);
2251 if (! TREE_CONSTANT (max) && contains_placeholder_p (max))
2252 max = build (WITH_RECORD_EXPR, TREE_TYPE (max), max, expr);
2254 template_elts = tree_cons (TREE_CHAIN (field), max,
2255 tree_cons (field, min, template_elts));
2258 return build_constructor (template_type, nreverse (template_elts));
2261 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2262 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2263 in the type contains in its DECL_INITIAL the expression to use when
2264 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2265 to print out an error message if the mechanism cannot be applied to
2266 an object of that type and also for the name. */
2268 tree
2269 build_vms_descriptor (type, mech, gnat_entity)
2270 tree type;
2271 Mechanism_Type mech;
2272 Entity_Id gnat_entity;
2274 tree record_type = make_node (RECORD_TYPE);
2275 tree field_list = 0;
2276 int class;
2277 int dtype = 0;
2278 tree inner_type;
2279 int ndim;
2280 int i;
2281 tree *idx_arr;
2282 tree tem;
2284 /* If TYPE is an unconstrained array, use the underlying array type. */
2285 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2286 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2288 /* If this is an array, compute the number of dimensions in the array,
2289 get the index types, and point to the inner type. */
2290 if (TREE_CODE (type) != ARRAY_TYPE)
2291 ndim = 0;
2292 else
2293 for (ndim = 1, inner_type = type;
2294 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2295 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2296 ndim++, inner_type = TREE_TYPE (inner_type))
2299 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2301 if (mech != By_Descriptor_NCA
2302 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2303 for (i = ndim - 1, inner_type = type;
2304 i >= 0;
2305 i--, inner_type = TREE_TYPE (inner_type))
2306 idx_arr[i] = TYPE_DOMAIN (inner_type);
2307 else
2308 for (i = 0, inner_type = type;
2309 i < ndim;
2310 i++, inner_type = TREE_TYPE (inner_type))
2311 idx_arr[i] = TYPE_DOMAIN (inner_type);
2313 /* Now get the DTYPE value. */
2314 switch (TREE_CODE (type))
2316 case INTEGER_TYPE:
2317 case ENUMERAL_TYPE:
2318 if (TYPE_VAX_FLOATING_POINT_P (type))
2319 switch ((int) TYPE_DIGITS_VALUE (type))
2321 case 6:
2322 dtype = 10;
2323 break;
2324 case 9:
2325 dtype = 11;
2326 break;
2327 case 15:
2328 dtype = 27;
2329 break;
2331 else
2332 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2334 case 8:
2335 dtype = TREE_UNSIGNED (type) ? 2 : 6;
2336 break;
2337 case 16:
2338 dtype = TREE_UNSIGNED (type) ? 3 : 7;
2339 break;
2340 case 32:
2341 dtype = TREE_UNSIGNED (type) ? 4 : 8;
2342 break;
2343 case 64:
2344 dtype = TREE_UNSIGNED (type) ? 5 : 9;
2345 break;
2346 case 128:
2347 dtype = TREE_UNSIGNED (type) ? 25 : 26;
2348 break;
2350 break;
2352 case REAL_TYPE:
2353 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2354 break;
2356 case COMPLEX_TYPE:
2357 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2358 && TYPE_VAX_FLOATING_POINT_P (type))
2359 switch ((int) TYPE_DIGITS_VALUE (type))
2361 case 6:
2362 dtype = 12;
2363 break;
2364 case 9:
2365 dtype = 13;
2366 break;
2367 case 15:
2368 dtype = 29;
2370 else
2371 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2372 break;
2374 case ARRAY_TYPE:
2375 dtype = 14;
2376 break;
2378 default:
2379 break;
2382 /* Get the CLASS value. */
2383 switch (mech)
2385 case By_Descriptor_A:
2386 class = 4;
2387 break;
2388 case By_Descriptor_NCA:
2389 class = 10;
2390 break;
2391 case By_Descriptor_SB:
2392 class = 15;
2393 break;
2394 default:
2395 class = 1;
2398 /* Make the type for a descriptor for VMS. The first four fields
2399 are the same for all types. */
2401 field_list
2402 = chainon (field_list,
2403 make_descriptor_field
2404 ("LENGTH", type_for_size (16, 1), record_type,
2405 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2407 field_list = chainon (field_list,
2408 make_descriptor_field ("DTYPE", type_for_size (8, 1),
2409 record_type, size_int (dtype)));
2410 field_list = chainon (field_list,
2411 make_descriptor_field ("CLASS", type_for_size (8, 1),
2412 record_type, size_int (class)));
2414 field_list
2415 = chainon (field_list,
2416 make_descriptor_field ("POINTER",
2417 build_pointer_type (type),
2418 record_type,
2419 build1 (ADDR_EXPR,
2420 build_pointer_type (type),
2421 build (PLACEHOLDER_EXPR,
2422 type))));
2424 switch (mech)
2426 case By_Descriptor:
2427 case By_Descriptor_S:
2428 break;
2430 case By_Descriptor_SB:
2431 field_list
2432 = chainon (field_list,
2433 make_descriptor_field
2434 ("SB_L1", type_for_size (32, 1), record_type,
2435 TREE_CODE (type) == ARRAY_TYPE
2436 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2437 field_list
2438 = chainon (field_list,
2439 make_descriptor_field
2440 ("SB_L2", type_for_size (32, 1), record_type,
2441 TREE_CODE (type) == ARRAY_TYPE
2442 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2443 break;
2445 case By_Descriptor_A:
2446 case By_Descriptor_NCA:
2447 field_list = chainon (field_list,
2448 make_descriptor_field ("SCALE",
2449 type_for_size (8, 1),
2450 record_type,
2451 size_zero_node));
2453 field_list = chainon (field_list,
2454 make_descriptor_field ("DIGITS",
2455 type_for_size (8, 1),
2456 record_type,
2457 size_zero_node));
2459 field_list
2460 = chainon (field_list,
2461 make_descriptor_field
2462 ("AFLAGS", type_for_size (8, 1), record_type,
2463 size_int (mech == By_Descriptor_NCA
2465 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2466 : (TREE_CODE (type) == ARRAY_TYPE
2467 && TYPE_CONVENTION_FORTRAN_P (type)
2468 ? 224 : 192))));
2470 field_list = chainon (field_list,
2471 make_descriptor_field ("DIMCT",
2472 type_for_size (8, 1),
2473 record_type,
2474 size_int (ndim)));
2476 field_list = chainon (field_list,
2477 make_descriptor_field ("ARSIZE",
2478 type_for_size (32, 1),
2479 record_type,
2480 size_in_bytes (type)));
2482 /* Now build a pointer to the 0,0,0... element. */
2483 tem = build (PLACEHOLDER_EXPR, type);
2484 for (i = 0, inner_type = type; i < ndim;
2485 i++, inner_type = TREE_TYPE (inner_type))
2486 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2487 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2489 field_list
2490 = chainon (field_list,
2491 make_descriptor_field
2492 ("A0", build_pointer_type (inner_type), record_type,
2493 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2495 /* Next come the addressing coefficients. */
2496 tem = size_int (1);
2497 for (i = 0; i < ndim; i++)
2499 char fname[3];
2500 tree idx_length
2501 = size_binop (MULT_EXPR, tem,
2502 size_binop (PLUS_EXPR,
2503 size_binop (MINUS_EXPR,
2504 TYPE_MAX_VALUE (idx_arr[i]),
2505 TYPE_MIN_VALUE (idx_arr[i])),
2506 size_int (1)));
2508 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2509 fname[1] = '0' + i, fname[2] = 0;
2510 field_list = chainon (field_list,
2511 make_descriptor_field (fname,
2512 type_for_size (32, 1),
2513 record_type,
2514 idx_length));
2516 if (mech == By_Descriptor_NCA)
2517 tem = idx_length;
2520 /* Finally here are the bounds. */
2521 for (i = 0; i < ndim; i++)
2523 char fname[3];
2525 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2526 field_list
2527 = chainon (field_list,
2528 make_descriptor_field
2529 (fname, type_for_size (32, 1), record_type,
2530 TYPE_MIN_VALUE (idx_arr[i])));
2532 fname[0] = 'U';
2533 field_list
2534 = chainon (field_list,
2535 make_descriptor_field
2536 (fname, type_for_size (32, 1), record_type,
2537 TYPE_MAX_VALUE (idx_arr[i])));
2539 break;
2541 default:
2542 post_error ("unsupported descriptor type for &", gnat_entity);
2545 finish_record_type (record_type, field_list, 0, 1);
2546 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2547 record_type));
2549 return record_type;
2552 /* Utility routine for above code to make a field. */
2554 static tree
2555 make_descriptor_field (name, type, rec_type, initial)
2556 const char *name;
2557 tree type;
2558 tree rec_type;
2559 tree initial;
2561 tree field
2562 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2564 DECL_INITIAL (field) = initial;
2565 return field;
2568 /* Build a type to be used to represent an aliased object whose nominal
2569 type is an unconstrained array. This consists of a RECORD_TYPE containing
2570 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2571 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2572 is used to represent an arbitrary unconstrained object. Use NAME
2573 as the name of the record. */
2575 tree
2576 build_unc_object_type (template_type, object_type, name)
2577 tree template_type;
2578 tree object_type;
2579 tree name;
2581 tree type = make_node (RECORD_TYPE);
2582 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2583 template_type, type, 0, 0, 0, 1);
2584 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2585 type, 0, 0, 0, 1);
2587 TYPE_NAME (type) = name;
2588 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2589 finish_record_type (type,
2590 chainon (chainon (NULL_TREE, template_field),
2591 array_field),
2592 0, 0);
2594 return type;
2597 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2598 the normal case this is just two adjustments, but we have more to do
2599 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2601 void
2602 update_pointer_to (old_type, new_type)
2603 tree old_type;
2604 tree new_type;
2606 tree ptr = TYPE_POINTER_TO (old_type);
2607 tree ref = TYPE_REFERENCE_TO (old_type);
2608 tree type;
2610 /* If this is the main variant, process all the other variants first. */
2611 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2612 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2613 type = TYPE_NEXT_VARIANT (type))
2614 update_pointer_to (type, new_type);
2616 /* If no pointer or reference, we are done. Otherwise, get the new type with
2617 the same qualifiers as the old type and see if it is the same as the old
2618 type. */
2619 if (ptr == 0 && ref == 0)
2620 return;
2622 new_type = build_qualified_type (new_type, TYPE_QUALS (old_type));
2623 if (old_type == new_type)
2624 return;
2626 /* First handle the simple case. */
2627 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2629 if (ptr != 0)
2630 TREE_TYPE (ptr) = new_type;
2631 TYPE_POINTER_TO (new_type) = ptr;
2633 if (ref != 0)
2634 TREE_TYPE (ref) = new_type;
2635 TYPE_REFERENCE_TO (new_type) = ref;
2637 if (ptr != 0 && TYPE_NAME (ptr) != 0
2638 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2639 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2640 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2641 global_bindings_p (), 0);
2642 if (ref != 0 && TYPE_NAME (ref) != 0
2643 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2644 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2645 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2646 global_bindings_p (), 0);
2649 /* Now deal with the unconstrained array case. In this case the "pointer"
2650 is actually a RECORD_TYPE where the types of both fields are
2651 pointers to void. In that case, copy the field list from the
2652 old type to the new one and update the fields' context. */
2653 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2654 gigi_abort (412);
2656 else
2658 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2659 tree ptr_temp_type;
2660 tree new_ref;
2661 tree var;
2663 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2664 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2665 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2667 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2668 template bounds.
2670 ??? This is now the only use of gnat_substitute_in_type, which
2671 is now a very "heavy" routine to do this, so it should be replaced
2672 at some point. */
2673 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2674 new_ref = build (COMPONENT_REF, ptr_temp_type,
2675 build (PLACEHOLDER_EXPR, ptr),
2676 TREE_CHAIN (TYPE_FIELDS (ptr)));
2678 update_pointer_to
2679 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2680 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2681 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2683 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2684 TYPE_UNCONSTRAINED_ARRAY (var) = new_type;
2686 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2687 = TREE_TYPE (new_type) = ptr;
2689 /* Now handle updating the allocation record, what the thin pointer
2690 points to. Update all pointers from the old record into the new
2691 one, update the types of the fields, and recompute the size. */
2693 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2695 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2696 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2697 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2698 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2699 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2700 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2701 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2703 TYPE_SIZE (new_obj_rec)
2704 = size_binop (PLUS_EXPR,
2705 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2706 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2707 TYPE_SIZE_UNIT (new_obj_rec)
2708 = size_binop (PLUS_EXPR,
2709 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2710 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2711 rest_of_type_compilation (ptr, global_bindings_p ());
2715 /* Convert a pointer to a constrained array into a pointer to a fat
2716 pointer. This involves making or finding a template. */
2718 static tree
2719 convert_to_fat_pointer (type, expr)
2720 tree type;
2721 tree expr;
2723 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2724 tree template, template_addr;
2725 tree etype = TREE_TYPE (expr);
2727 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2728 pointer to the template and array. */
2729 if (integer_zerop (expr))
2730 return
2731 build_constructor
2732 (type,
2733 tree_cons (TYPE_FIELDS (type),
2734 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2735 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2736 convert (build_pointer_type (template_type),
2737 expr),
2738 NULL_TREE)));
2740 /* If EXPR is a thin pointer, make the template and data from the record. */
2742 else if (TYPE_THIN_POINTER_P (etype))
2744 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2746 expr = save_expr (expr);
2747 if (TREE_CODE (expr) == ADDR_EXPR)
2748 expr = TREE_OPERAND (expr, 0);
2749 else
2750 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2752 template = build_component_ref (expr, NULL_TREE, fields);
2753 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2754 build_component_ref (expr, NULL_TREE,
2755 TREE_CHAIN (fields)));
2757 else
2758 /* Otherwise, build the constructor for the template. */
2759 template = build_template (template_type, TREE_TYPE (etype), expr);
2761 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2763 /* The result is a CONSTRUCTOR for the fat pointer. */
2764 return
2765 build_constructor (type,
2766 tree_cons (TYPE_FIELDS (type), expr,
2767 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2768 template_addr, NULL_TREE)));
2771 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2772 is something that is a fat pointer, so convert to it first if it EXPR
2773 is not already a fat pointer. */
2775 static tree
2776 convert_to_thin_pointer (type, expr)
2777 tree type;
2778 tree expr;
2780 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2781 expr
2782 = convert_to_fat_pointer
2783 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2785 /* We get the pointer to the data and use a NOP_EXPR to make it the
2786 proper GCC type. */
2787 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)));
2788 expr = build1 (NOP_EXPR, type, expr);
2790 return expr;
2793 /* Create an expression whose value is that of EXPR,
2794 converted to type TYPE. The TREE_TYPE of the value
2795 is always TYPE. This function implements all reasonable
2796 conversions; callers should filter out those that are
2797 not permitted by the language being compiled. */
2799 tree
2800 convert (type, expr)
2801 tree type, expr;
2803 enum tree_code code = TREE_CODE (type);
2804 tree etype = TREE_TYPE (expr);
2805 enum tree_code ecode = TREE_CODE (etype);
2806 tree tem;
2808 /* If EXPR is already the right type, we are done. */
2809 if (type == etype)
2810 return expr;
2812 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2813 new one. */
2814 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
2815 return build (WITH_RECORD_EXPR, type,
2816 convert (type, TREE_OPERAND (expr, 0)),
2817 TREE_OPERAND (expr, 1));
2819 /* If the input type has padding, remove it by doing a component reference
2820 to the field. If the output type has padding, make a constructor
2821 to build the record. If both input and output have padding and are
2822 of variable size, do this as an unchecked conversion. */
2823 if (ecode == RECORD_TYPE && code == RECORD_TYPE
2824 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2825 && (! TREE_CONSTANT (TYPE_SIZE (type))
2826 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2828 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2830 /* If we have just converted to this padded type, just get
2831 the inner expression. */
2832 if (TREE_CODE (expr) == CONSTRUCTOR
2833 && CONSTRUCTOR_ELTS (expr) != 0
2834 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2835 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2836 else
2837 return convert (type, build_component_ref (expr, NULL_TREE,
2838 TYPE_FIELDS (etype)));
2840 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2842 /* If we previously converted from another type and our type is
2843 of variable size, remove the conversion to avoid the need for
2844 variable-size temporaries. */
2845 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2846 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2847 expr = TREE_OPERAND (expr, 0);
2849 /* If we are just removing the padding from expr, convert the original
2850 object if we have variable size. That will avoid the need
2851 for some variable-size temporaries. */
2852 if (TREE_CODE (expr) == COMPONENT_REF
2853 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2854 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2855 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2856 return convert (type, TREE_OPERAND (expr, 0));
2858 /* If the result type is a padded type with a self-referentially-sized
2859 field and the expression type is a record, do this as an
2860 unchecked converstion. */
2861 else if (TREE_CODE (DECL_SIZE (TYPE_FIELDS (type))) != INTEGER_CST
2862 && contains_placeholder_p (DECL_SIZE (TYPE_FIELDS (type)))
2863 && TREE_CODE (etype) == RECORD_TYPE)
2864 return unchecked_convert (type, expr);
2866 else
2867 return
2868 build_constructor (type,
2869 tree_cons (TYPE_FIELDS (type),
2870 convert (TREE_TYPE
2871 (TYPE_FIELDS (type)),
2872 expr),
2873 NULL_TREE));
2876 /* If the input is a biased type, adjust first. */
2877 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2878 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2879 fold (build1 (GNAT_NOP_EXPR,
2880 TREE_TYPE (etype), expr)),
2881 TYPE_MIN_VALUE (etype))));
2883 /* If the input is a left-justified modular type, we need to extract
2884 the actual object before converting it to any other type with the
2885 exception of an unconstrained array. */
2886 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2887 && code != UNCONSTRAINED_ARRAY_TYPE)
2888 return convert (type, build_component_ref (expr, NULL_TREE,
2889 TYPE_FIELDS (etype)));
2891 /* If converting a type that does not contain a template into one
2892 that does, convert to the data type and then build the template. */
2893 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type)
2894 && ! (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype)))
2896 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2898 return
2899 build_constructor
2900 (type,
2901 tree_cons (TYPE_FIELDS (type),
2902 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2903 obj_type, NULL_TREE),
2904 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2905 convert (obj_type, expr), NULL_TREE)));
2908 /* There are some special cases of expressions that we process
2909 specially. */
2910 switch (TREE_CODE (expr))
2912 case ERROR_MARK:
2913 return expr;
2915 case TRANSFORM_EXPR:
2916 case NULL_EXPR:
2917 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2918 conversion in gnat_expand_expr. NULL_EXPR does not represent
2919 and actual value, so no conversion is needed. */
2920 TREE_TYPE (expr) = type;
2921 return expr;
2923 case STRING_CST:
2924 case CONSTRUCTOR:
2925 /* If we are converting a STRING_CST to another constrained array type,
2926 just make a new one in the proper type. Likewise for a
2927 CONSTRUCTOR. But if the mode of the type is different, we must
2928 ensure a new RTL is made for the constant. */
2929 if (code == ecode && AGGREGATE_TYPE_P (etype)
2930 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2931 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
2933 expr = copy_node (expr);
2934 TREE_TYPE (expr) = type;
2936 if (TYPE_MODE (type) != TYPE_MODE (etype))
2937 TREE_CST_RTL (expr) = 0;
2939 return expr;
2941 break;
2943 case COMPONENT_REF:
2944 /* If we are converting between two aggregate types of the same
2945 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2946 This avoid unneeded conversions which makes reference computations
2947 more complex. */
2948 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
2949 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2950 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
2951 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
2952 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
2953 TREE_OPERAND (expr, 1));
2955 break;
2957 case UNCONSTRAINED_ARRAY_REF:
2958 /* Convert this to the type of the inner array by getting the address of
2959 the array from the template. */
2960 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2961 build_component_ref (TREE_OPERAND (expr, 0),
2962 get_identifier ("P_ARRAY"),
2963 NULL_TREE));
2964 etype = TREE_TYPE (expr);
2965 ecode = TREE_CODE (etype);
2966 break;
2968 case VIEW_CONVERT_EXPR:
2969 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2970 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2971 return convert (type, TREE_OPERAND (expr, 0));
2972 break;
2974 case INDIRECT_REF:
2975 /* If both types are record types, just convert the pointer and
2976 make a new INDIRECT_REF.
2978 ??? Disable this for now since it causes problems with the
2979 code in build_binary_op for MODIFY_EXPR which wants to
2980 strip off conversions. But that code really is a mess and
2981 we need to do this a much better way some time. */
2982 if (0
2983 && (TREE_CODE (type) == RECORD_TYPE
2984 || TREE_CODE (type) == UNION_TYPE)
2985 && (TREE_CODE (etype) == RECORD_TYPE
2986 || TREE_CODE (etype) == UNION_TYPE)
2987 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2988 return build_unary_op (INDIRECT_REF, NULL_TREE,
2989 convert (build_pointer_type (type),
2990 TREE_OPERAND (expr, 0)));
2991 break;
2993 default:
2994 break;
2997 /* Check for converting to a pointer to an unconstrained array. */
2998 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2999 return convert_to_fat_pointer (type, expr);
3001 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3002 || (code == INTEGER_CST && ecode == INTEGER_CST
3003 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3004 return fold (build1 (NOP_EXPR, type, expr));
3006 switch (code)
3008 case VOID_TYPE:
3009 return build1 (CONVERT_EXPR, type, expr);
3011 case INTEGER_TYPE:
3012 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3013 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE))
3014 return unchecked_convert (type, expr);
3015 else if (TYPE_BIASED_REPRESENTATION_P (type))
3016 return fold (build1 (CONVERT_EXPR, type,
3017 fold (build (MINUS_EXPR, TREE_TYPE (type),
3018 convert (TREE_TYPE (type), expr),
3019 TYPE_MIN_VALUE (type)))));
3021 /* ... fall through ... */
3023 case ENUMERAL_TYPE:
3024 return fold (convert_to_integer (type, expr));
3026 case POINTER_TYPE:
3027 case REFERENCE_TYPE:
3028 /* If converting between two pointers to records denoting
3029 both a template and type, adjust if needed to account
3030 for any differing offsets, since one might be negative. */
3031 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3033 tree bit_diff
3034 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3035 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3036 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3037 sbitsize_int (BITS_PER_UNIT));
3039 expr = build1 (NOP_EXPR, type, expr);
3040 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3041 if (integer_zerop (byte_diff))
3042 return expr;
3044 return build_binary_op (PLUS_EXPR, type, expr,
3045 fold (convert_to_pointer (type, byte_diff)));
3048 /* If converting to a thin pointer, handle specially. */
3049 if (TYPE_THIN_POINTER_P (type)
3050 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3051 return convert_to_thin_pointer (type, expr);
3053 /* If converting fat pointer to normal pointer, get the pointer to the
3054 array and then convert it. */
3055 else if (TYPE_FAT_POINTER_P (etype))
3056 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3057 NULL_TREE);
3059 return fold (convert_to_pointer (type, expr));
3061 case REAL_TYPE:
3062 return fold (convert_to_real (type, expr));
3064 case RECORD_TYPE:
3065 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3066 return
3067 build_constructor
3068 (type, tree_cons (TYPE_FIELDS (type),
3069 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3070 NULL_TREE));
3072 /* ... fall through ... */
3074 case ARRAY_TYPE:
3075 /* In these cases, assume the front-end has validated the conversion.
3076 If the conversion is valid, it will be a bit-wise conversion, so
3077 it can be viewed as an unchecked conversion. */
3078 return unchecked_convert (type, expr);
3080 case UNION_TYPE:
3081 /* Just validate that the type is indeed that of a field
3082 of the type. Then make the simple conversion. */
3083 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3084 if (TREE_TYPE (tem) == etype)
3085 return build1 (CONVERT_EXPR, type, expr);
3087 gigi_abort (413);
3089 case UNCONSTRAINED_ARRAY_TYPE:
3090 /* If EXPR is a constrained array, take its address, convert it to a
3091 fat pointer, and then dereference it. Likewise if EXPR is a
3092 record containing both a template and a constrained array.
3093 Note that a record representing a left justified modular type
3094 always represents a packed constrained array. */
3095 if (ecode == ARRAY_TYPE
3096 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3097 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3098 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3099 return
3100 build_unary_op
3101 (INDIRECT_REF, NULL_TREE,
3102 convert_to_fat_pointer (TREE_TYPE (type),
3103 build_unary_op (ADDR_EXPR,
3104 NULL_TREE, expr)));
3106 /* Do something very similar for converting one unconstrained
3107 array to another. */
3108 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3109 return
3110 build_unary_op (INDIRECT_REF, NULL_TREE,
3111 convert (TREE_TYPE (type),
3112 build_unary_op (ADDR_EXPR,
3113 NULL_TREE, expr)));
3114 else
3115 gigi_abort (409);
3117 case COMPLEX_TYPE:
3118 return fold (convert_to_complex (type, expr));
3120 default:
3121 gigi_abort (410);
3125 /* Remove all conversions that are done in EXP. This includes converting
3126 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3127 is nonzero, always return the address of the containing object even if
3128 the address is not bit-aligned. */
3130 tree
3131 remove_conversions (exp, true_address)
3132 tree exp;
3133 int true_address;
3135 switch (TREE_CODE (exp))
3137 case CONSTRUCTOR:
3138 if (true_address
3139 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3140 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3141 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3142 break;
3144 case COMPONENT_REF:
3145 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3146 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3147 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3148 break;
3150 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3151 case NOP_EXPR: case CONVERT_EXPR: case GNAT_NOP_EXPR:
3152 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3154 default:
3155 break;
3158 return exp;
3161 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3162 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3163 likewise return an expression pointing to the underlying array. */
3165 tree
3166 maybe_unconstrained_array (exp)
3167 tree exp;
3169 enum tree_code code = TREE_CODE (exp);
3170 tree new;
3172 switch (TREE_CODE (TREE_TYPE (exp)))
3174 case UNCONSTRAINED_ARRAY_TYPE:
3175 if (code == UNCONSTRAINED_ARRAY_REF)
3178 = build_unary_op (INDIRECT_REF, NULL_TREE,
3179 build_component_ref (TREE_OPERAND (exp, 0),
3180 get_identifier ("P_ARRAY"),
3181 NULL_TREE));
3182 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3183 return new;
3186 else if (code == NULL_EXPR)
3187 return build1 (NULL_EXPR,
3188 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3189 (TREE_TYPE (TREE_TYPE (exp))))),
3190 TREE_OPERAND (exp, 0));
3192 else if (code == WITH_RECORD_EXPR
3193 && (TREE_OPERAND (exp, 0)
3194 != (new = maybe_unconstrained_array
3195 (TREE_OPERAND (exp, 0)))))
3196 return build (WITH_RECORD_EXPR, TREE_TYPE (new), new,
3197 TREE_OPERAND (exp, 1));
3199 case RECORD_TYPE:
3200 if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3203 = build_component_ref (exp, NULL_TREE,
3204 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))));
3205 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3206 && TYPE_IS_PADDING_P (TREE_TYPE (new)))
3207 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (new))), new);
3209 return new;
3211 break;
3213 default:
3214 break;
3217 return exp;
3220 /* Return an expression that does an unchecked converstion of EXPR to TYPE. */
3222 tree
3223 unchecked_convert (type, expr)
3224 tree type;
3225 tree expr;
3227 tree etype = TREE_TYPE (expr);
3229 /* If the expression is already the right type, we are done. */
3230 if (etype == type)
3231 return expr;
3233 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3234 new one. */
3235 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
3236 return build (WITH_RECORD_EXPR, type,
3237 unchecked_convert (type, TREE_OPERAND (expr, 0)),
3238 TREE_OPERAND (expr, 1));
3240 /* If both types types are integral just do a normal conversion.
3241 Likewise for a conversion to an unconstrained array. */
3242 if ((((INTEGRAL_TYPE_P (type)
3243 && ! (TREE_CODE (type) == INTEGER_TYPE
3244 && TYPE_VAX_FLOATING_POINT_P (type)))
3245 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3246 || (TREE_CODE (type) == RECORD_TYPE
3247 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3248 && ((INTEGRAL_TYPE_P (etype)
3249 && ! (TREE_CODE (etype) == INTEGER_TYPE
3250 && TYPE_VAX_FLOATING_POINT_P (etype)))
3251 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3252 || (TREE_CODE (etype) == RECORD_TYPE
3253 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3254 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3256 tree rtype = type;
3258 if (TREE_CODE (etype) == INTEGER_TYPE
3259 && TYPE_BIASED_REPRESENTATION_P (etype))
3261 tree ntype = copy_type (etype);
3263 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3264 TYPE_MAIN_VARIANT (ntype) = ntype;
3265 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3268 if (TREE_CODE (type) == INTEGER_TYPE
3269 && TYPE_BIASED_REPRESENTATION_P (type))
3271 rtype = copy_type (type);
3272 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3273 TYPE_MAIN_VARIANT (rtype) = rtype;
3276 expr = convert (rtype, expr);
3277 if (type != rtype)
3278 expr = build1 (GNAT_NOP_EXPR, type, expr);
3281 /* If we are converting TO an integral type whose precision is not the
3282 same as its size, first unchecked convert to a record that contains
3283 an object of the output type. Then extract the field. */
3284 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3285 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3286 GET_MODE_BITSIZE (TYPE_MODE (type))))
3288 tree rec_type = make_node (RECORD_TYPE);
3289 tree field = create_field_decl (get_identifier ("OBJ"), type,
3290 rec_type, 1, 0, 0, 0);
3292 TYPE_FIELDS (rec_type) = field;
3293 layout_type (rec_type);
3295 expr = unchecked_convert (rec_type, expr);
3296 expr = build_component_ref (expr, NULL_TREE, field);
3299 /* Similarly for integral input type whose precision is not equal to its
3300 size. */
3301 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3302 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3303 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3305 tree rec_type = make_node (RECORD_TYPE);
3306 tree field
3307 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3308 1, 0, 0, 0);
3310 TYPE_FIELDS (rec_type) = field;
3311 layout_type (rec_type);
3313 expr = build_constructor (rec_type, build_tree_list (field, expr));
3314 expr = unchecked_convert (type, expr);
3317 /* We have a special case when we are converting between two
3318 unconstrained array types. In that case, take the address,
3319 convert the fat pointer types, and dereference. */
3320 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3321 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3322 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3323 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3324 build_unary_op (ADDR_EXPR, NULL_TREE,
3325 expr)));
3326 else
3328 expr = maybe_unconstrained_array (expr);
3329 etype = TREE_TYPE (expr);
3330 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3333 /* If the result is an integral type whose size is not equal to
3334 the size of the underlying machine type, sign- or zero-extend
3335 the result. We need not do this in the case where the input is
3336 an integral type of the same precision and signedness or if the output
3337 is a biased type or if both the input and output are unsigned. */
3338 if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3339 && ! (TREE_CODE (type) == INTEGER_TYPE
3340 && TYPE_BIASED_REPRESENTATION_P (type))
3341 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3342 GET_MODE_BITSIZE (TYPE_MODE (type)))
3343 && ! (INTEGRAL_TYPE_P (etype)
3344 && TREE_UNSIGNED (type) == TREE_UNSIGNED (etype)
3345 && operand_equal_p (TYPE_RM_SIZE (type),
3346 (TYPE_RM_SIZE (etype) != 0
3347 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3349 && ! (TREE_UNSIGNED (type) && TREE_UNSIGNED (etype)))
3351 tree base_type = type_for_mode (TYPE_MODE (type), TREE_UNSIGNED (type));
3352 tree shift_expr
3353 = convert (base_type,
3354 size_binop (MINUS_EXPR,
3355 bitsize_int
3356 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3357 TYPE_RM_SIZE (type)));
3358 expr
3359 = convert (type,
3360 build_binary_op (RSHIFT_EXPR, base_type,
3361 build_binary_op (LSHIFT_EXPR, base_type,
3362 convert (base_type, expr),
3363 shift_expr),
3364 shift_expr));
3367 /* An unchecked conversion should never raise Constraint_Error. The code
3368 below assumes that GCC's conversion routines overflow the same way that
3369 the underlying hardware does. This is probably true. In the rare case
3370 when it is false, we can rely on the fact that such conversions are
3371 erroneous anyway. */
3372 if (TREE_CODE (expr) == INTEGER_CST)
3373 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3375 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3376 show no longer constant. */
3377 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3378 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3379 TREE_CONSTANT (expr) = 0;
3381 return expr;