Daily bump.
[official-gcc.git] / gcc / ada / utils.c
bloba81c612f4160ccb2d50ec3d51990b2139315ae07
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * $Revision: 1.8 $
10 * *
11 * Copyright (C) 1992-2001, Free Software Foundation, Inc. *
12 * *
13 * GNAT is free software; you can redistribute it and/or modify it under *
14 * terms of the GNU General Public License as published by the Free Soft- *
15 * ware Foundation; either version 2, or (at your option) any later ver- *
16 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
17 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
18 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
19 * for more details. You should have received a copy of the GNU General *
20 * Public License distributed with GNAT; see file COPYING. If not, write *
21 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
22 * MA 02111-1307, USA. *
23 * *
24 * GNAT was originally developed by the GNAT team at New York University. *
25 * Extensive contributions were provided by Ada Core Technologies Inc. *
26 * *
27 ****************************************************************************/
29 #include "config.h"
30 #include "system.h"
31 #include "tree.h"
32 #include "flags.h"
33 #include "defaults.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "ggc.h"
37 #include "convert.h"
39 #include "ada.h"
40 #include "types.h"
41 #include "atree.h"
42 #include "elists.h"
43 #include "namet.h"
44 #include "nlists.h"
45 #include "stringt.h"
46 #include "uintp.h"
47 #include "fe.h"
48 #include "sinfo.h"
49 #include "einfo.h"
50 #include "ada-tree.h"
51 #include "gigi.h"
53 #ifndef MAX_FIXED_MODE_SIZE
54 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
55 #endif
57 #ifndef MAX_BITS_PER_WORD
58 #define MAX_BITS_PER_WORD BITS_PER_WORD
59 #endif
61 /* If nonzero, pretend we are allocating at global level. */
62 int force_global;
64 /* Global Variables for the various types we create. */
65 tree gnat_std_decls[(int) ADT_LAST];
67 /* Associates a GNAT tree node to a GCC tree node. It is used in
68 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
69 of `save_gnu_tree' for more info. */
70 static tree *associate_gnat_to_gnu;
72 /* This listhead is used to record any global objects that need elaboration.
73 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
74 initial value to assign. */
76 static tree pending_elaborations;
78 /* This stack allows us to momentarily switch to generating elaboration
79 lists for an inner context. */
81 static struct e_stack {struct e_stack *next; tree elab_list; } *elist_stack;
83 /* This variable keeps a table for types for each precision so that we only
84 allocate each of them once. Signed and unsigned types are kept separate.
86 Note that these types are only used when fold-const requests something
87 special. Perhaps we should NOT share these types; we'll see how it
88 goes later. */
89 static tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
91 /* Likewise for float types, but record these by mode. */
92 static tree float_types[NUM_MACHINE_MODES];
94 /* For each binding contour we allocate a binding_level structure which records
95 the entities defined or declared in that contour. Contours include:
97 the global one
98 one for each subprogram definition
99 one for each compound statement (declare block)
101 Binding contours are used to create GCC tree BLOCK nodes. */
103 struct binding_level
105 /* A chain of ..._DECL nodes for all variables, constants, functions,
106 parameters and type declarations. These ..._DECL nodes are chained
107 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
108 in the reverse of the order supplied to be compatible with the
109 back-end. */
110 tree names;
111 /* For each level (except the global one), a chain of BLOCK nodes for all
112 the levels that were entered and exited one level down from this one. */
113 tree blocks;
114 /* The BLOCK node for this level, if one has been preallocated.
115 If 0, the BLOCK is allocated (if needed) when the level is popped. */
116 tree this_block;
117 /* The binding level containing this one (the enclosing binding level). */
118 struct binding_level *level_chain;
121 /* The binding level currently in effect. */
122 static struct binding_level *current_binding_level = NULL;
124 /* A chain of binding_level structures awaiting reuse. */
125 static struct binding_level *free_binding_level = NULL;
127 /* The outermost binding level. This binding level is created when the
128 compiler is started and it will exist through the entire compilation. */
129 static struct binding_level *global_binding_level;
131 /* Binding level structures are initialized by copying this one. */
132 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
135 static tree merge_sizes PARAMS ((tree, tree, tree, int, int));
136 static tree compute_related_constant PARAMS ((tree, tree));
137 static tree split_plus PARAMS ((tree, tree *));
138 static int value_zerop PARAMS ((tree));
139 static tree float_type_for_size PARAMS ((int, enum machine_mode));
140 static tree convert_to_fat_pointer PARAMS ((tree, tree));
141 static tree convert_to_thin_pointer PARAMS ((tree, tree));
142 static tree make_descriptor_field PARAMS ((const char *,tree, tree,
143 tree));
144 static void mark_binding_level PARAMS((PTR));
145 static void mark_e_stack PARAMS((PTR));
147 /* Initialize the association of GNAT nodes to GCC trees. */
149 void
150 init_gnat_to_gnu ()
152 Node_Id gnat_node;
154 associate_gnat_to_gnu = (tree *) xmalloc (max_gnat_nodes * sizeof (tree));
155 ggc_add_tree_root (associate_gnat_to_gnu, max_gnat_nodes);
157 for (gnat_node = 0; gnat_node < max_gnat_nodes; gnat_node++)
158 associate_gnat_to_gnu [gnat_node] = NULL_TREE;
160 associate_gnat_to_gnu -= First_Node_Id;
162 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
163 ggc_add_tree_root (&pending_elaborations, 1);
164 ggc_add_root ((PTR) &elist_stack, 1, sizeof (struct e_stack), mark_e_stack);
165 ggc_add_tree_root (&signed_and_unsigned_types[0][0],
166 (sizeof signed_and_unsigned_types
167 / sizeof signed_and_unsigned_types[0][0]));
168 ggc_add_tree_root (float_types, sizeof float_types / sizeof float_types[0]);
170 ggc_add_root (&current_binding_level, 1, sizeof current_binding_level,
171 mark_binding_level);
174 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
175 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
176 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
178 If GNU_DECL is zero, a previous association is to be reset. */
180 void
181 save_gnu_tree (gnat_entity, gnu_decl, no_check)
182 Entity_Id gnat_entity;
183 tree gnu_decl;
184 int no_check;
186 if (gnu_decl
187 && (associate_gnat_to_gnu [gnat_entity]
188 || (! no_check && ! DECL_P (gnu_decl))))
189 gigi_abort (401);
191 associate_gnat_to_gnu [gnat_entity] = gnu_decl;
194 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
195 Return the ..._DECL node that was associated with it. If there is no tree
196 node associated with GNAT_ENTITY, abort.
198 In some cases, such as delayed elaboration or expressions that need to
199 be elaborated only once, GNAT_ENTITY is really not an entity. */
201 tree
202 get_gnu_tree (gnat_entity)
203 Entity_Id gnat_entity;
205 if (! associate_gnat_to_gnu [gnat_entity])
206 gigi_abort (402);
208 return associate_gnat_to_gnu [gnat_entity];
211 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
214 present_gnu_tree (gnat_entity)
215 Entity_Id gnat_entity;
217 return (associate_gnat_to_gnu [gnat_entity] != NULL_TREE);
221 /* Return non-zero if we are currently in the global binding level. */
224 global_bindings_p ()
226 return (force_global != 0 || current_binding_level == global_binding_level
227 ? -1 : 0);
230 /* Return the list of declarations in the current level. Note that this list
231 is in reverse order (it has to be so for back-end compatibility). */
233 tree
234 getdecls ()
236 return current_binding_level->names;
239 /* Nonzero if the current level needs to have a BLOCK made. */
242 kept_level_p ()
244 return (current_binding_level->names != 0);
247 /* Enter a new binding level. The input parameter is ignored, but has to be
248 specified for back-end compatibility. */
250 void
251 pushlevel (ignore)
252 int ignore ATTRIBUTE_UNUSED;
254 struct binding_level *newlevel = NULL;
256 /* Reuse a struct for this binding level, if there is one. */
257 if (free_binding_level)
259 newlevel = free_binding_level;
260 free_binding_level = free_binding_level->level_chain;
262 else
263 newlevel
264 = (struct binding_level *) xmalloc (sizeof (struct binding_level));
266 *newlevel = clear_binding_level;
268 /* Add this level to the front of the chain (stack) of levels that are
269 active. */
270 newlevel->level_chain = current_binding_level;
271 current_binding_level = newlevel;
274 /* Exit a binding level.
275 Pop the level off, and restore the state of the identifier-decl mappings
276 that were in effect when this level was entered.
278 If KEEP is nonzero, this level had explicit declarations, so
279 and create a "block" (a BLOCK node) for the level
280 to record its declarations and subblocks for symbol table output.
282 If FUNCTIONBODY is nonzero, this level is the body of a function,
283 so create a block as if KEEP were set and also clear out all
284 label names.
286 If REVERSE is nonzero, reverse the order of decls before putting
287 them into the BLOCK. */
289 tree
290 poplevel (keep, reverse, functionbody)
291 int keep;
292 int reverse;
293 int functionbody;
295 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
296 binding level that we are about to exit and which is returned by this
297 routine. */
298 tree block = NULL_TREE;
299 tree decl_chain;
300 tree decl_node;
301 tree subblock_chain = current_binding_level->blocks;
302 tree subblock_node;
303 int block_previously_created;
305 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
306 nodes chained through the `names' field of current_binding_level are in
307 reverse order except for PARM_DECL node, which are explicitly stored in
308 the right order. */
309 current_binding_level->names
310 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
311 : current_binding_level->names;
313 /* Output any nested inline functions within this block which must be
314 compiled because their address is needed. */
315 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
316 if (TREE_CODE (decl_node) == FUNCTION_DECL
317 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
318 && DECL_INITIAL (decl_node) != 0)
320 push_function_context ();
321 output_inline_function (decl_node);
322 pop_function_context ();
325 block = 0;
326 block_previously_created = (current_binding_level->this_block != 0);
327 if (block_previously_created)
328 block = current_binding_level->this_block;
329 else if (keep || functionbody)
330 block = make_node (BLOCK);
331 if (block != 0)
333 BLOCK_VARS (block) = keep ? decl_chain : 0;
334 BLOCK_SUBBLOCKS (block) = subblock_chain;
337 /* Record the BLOCK node just built as the subblock its enclosing scope. */
338 for (subblock_node = subblock_chain; subblock_node;
339 subblock_node = TREE_CHAIN (subblock_node))
340 BLOCK_SUPERCONTEXT (subblock_node) = block;
342 /* Clear out the meanings of the local variables of this level. */
344 for (subblock_node = decl_chain; subblock_node;
345 subblock_node = TREE_CHAIN (subblock_node))
346 if (DECL_NAME (subblock_node) != 0)
347 /* If the identifier was used or addressed via a local extern decl,
348 don't forget that fact. */
349 if (DECL_EXTERNAL (subblock_node))
351 if (TREE_USED (subblock_node))
352 TREE_USED (DECL_NAME (subblock_node)) = 1;
353 if (TREE_ADDRESSABLE (subblock_node))
354 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
358 /* Pop the current level, and free the structure for reuse. */
359 struct binding_level *level = current_binding_level;
360 current_binding_level = current_binding_level->level_chain;
361 level->level_chain = free_binding_level;
362 free_binding_level = level;
365 if (functionbody)
367 /* This is the top level block of a function. The ..._DECL chain stored
368 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
369 leave them in the BLOCK because they are found in the FUNCTION_DECL
370 instead. */
371 DECL_INITIAL (current_function_decl) = block;
372 BLOCK_VARS (block) = 0;
374 else if (block)
376 if (!block_previously_created)
377 current_binding_level->blocks
378 = chainon (current_binding_level->blocks, block);
381 /* If we did not make a block for the level just exited, any blocks made for
382 inner levels (since they cannot be recorded as subblocks in that level)
383 must be carried forward so they will later become subblocks of something
384 else. */
385 else if (subblock_chain)
386 current_binding_level->blocks
387 = chainon (current_binding_level->blocks, subblock_chain);
388 if (block)
389 TREE_USED (block) = 1;
391 return block;
394 /* Insert BLOCK at the end of the list of subblocks of the
395 current binding level. This is used when a BIND_EXPR is expanded,
396 to handle the BLOCK node inside the BIND_EXPR. */
398 void
399 insert_block (block)
400 tree block;
402 TREE_USED (block) = 1;
403 current_binding_level->blocks
404 = chainon (current_binding_level->blocks, block);
407 /* Set the BLOCK node for the innermost scope
408 (the one we are currently in). */
410 void
411 set_block (block)
412 tree block;
414 current_binding_level->this_block = block;
415 current_binding_level->names = chainon (current_binding_level->names,
416 BLOCK_VARS (block));
417 current_binding_level->blocks = chainon (current_binding_level->blocks,
418 BLOCK_SUBBLOCKS (block));
421 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
422 Returns the ..._DECL node. */
424 tree
425 pushdecl (decl)
426 tree decl;
428 struct binding_level *b;
430 /* If at top level, there is no context. But PARM_DECLs always go in the
431 level of its function. */
432 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
434 b = global_binding_level;
435 DECL_CONTEXT (decl) = 0;
437 else
439 b = current_binding_level;
440 DECL_CONTEXT (decl) = current_function_decl;
443 /* Put the declaration on the list. The list of declarations is in reverse
444 order. The list will be reversed later if necessary. This needs to be
445 this way for compatibility with the back-end.
447 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
448 will cause trouble with the debugger and aren't needed anyway. */
449 if (TREE_CODE (decl) != TYPE_DECL
450 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
452 TREE_CHAIN (decl) = b->names;
453 b->names = decl;
456 /* For the declaration of a type, set its name if it either is not already
457 set, was set to an IDENTIFIER_NODE, indicating an internal name,
458 or if the previous type name was not derived from a source name.
459 We'd rather have the type named with a real name and all the pointer
460 types to the same object have the same POINTER_TYPE node. Code in this
461 function in c-decl.c makes a copy of the type node here, but that may
462 cause us trouble with incomplete types, so let's not try it (at least
463 for now). */
465 if (TREE_CODE (decl) == TYPE_DECL
466 && DECL_NAME (decl) != 0
467 && (TYPE_NAME (TREE_TYPE (decl)) == 0
468 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
469 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
470 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
471 && ! DECL_ARTIFICIAL (decl))))
472 TYPE_NAME (TREE_TYPE (decl)) = decl;
474 return decl;
477 /* Do little here. Set up the standard declarations later after the
478 front end has been run. */
480 void
481 gnat_init_decl_processing ()
483 lineno = 0;
485 /* incomplete_decl_finalize_hook is defined in toplev.c. It needs to be set
486 by each front end to the appropriate routine that handles incomplete
487 VAR_DECL nodes. This routine will be invoked by compile_file when a
488 VAR_DECL node of DECL_SIZE zero is encountered. */
489 incomplete_decl_finalize_hook = finish_incomplete_decl;
491 /* Make the binding_level structure for global names. */
492 current_function_decl = 0;
493 current_binding_level = 0;
494 free_binding_level = 0;
495 pushlevel (0);
496 global_binding_level = current_binding_level;
498 build_common_tree_nodes (0);
500 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
501 corresponding to the size of ptr_mode. Make this here since we need
502 this before we can expand the GNAT types. */
503 set_sizetype (type_for_size (GET_MODE_BITSIZE (ptr_mode), 0));
504 build_common_tree_nodes_2 (0);
506 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
508 /* We need to make the integer type before doing anything else.
509 We stitch this in to the appropriate GNAT type later. */
510 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
511 integer_type_node));
512 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
513 char_type_node));
515 ptr_void_type_node = build_pointer_type (void_type_node);
519 /* Create the predefined scalar types such as `integer_type_node' needed
520 in the gcc back-end and initialize the global binding level. */
522 void
523 init_gigi_decls (long_long_float_type, exception_type)
524 tree long_long_float_type, exception_type;
526 tree endlink;
528 /* Set the types that GCC and Gigi use from the front end. We would like
529 to do this for char_type_node, but it needs to correspond to the C
530 char type. */
531 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
533 /* In this case, the builtin floating point types are VAX float,
534 so make up a type for use. */
535 longest_float_type_node = make_node (REAL_TYPE);
536 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
537 layout_type (longest_float_type_node);
538 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
539 longest_float_type_node));
541 else
542 longest_float_type_node = TREE_TYPE (long_long_float_type);
544 except_type_node = TREE_TYPE (exception_type);
546 unsigned_type_node = type_for_size (INT_TYPE_SIZE, 1);
547 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
548 unsigned_type_node));
550 void_type_decl_node
551 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
552 void_type_node));
554 void_ftype = build_function_type (void_type_node, NULL_TREE);
555 ptr_void_ftype = build_pointer_type (void_ftype);
557 /* Now declare runtime functions. */
558 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
560 /* malloc is a function declaration tree for a function to allocate
561 memory. */
562 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
563 NULL_TREE,
564 build_function_type (ptr_void_type_node,
565 tree_cons (NULL_TREE,
566 sizetype,
567 endlink)),
568 NULL_TREE, 0, 1, 1, 0);
570 /* free is a function declaration tree for a function to free memory. */
572 free_decl
573 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
574 build_function_type (void_type_node,
575 tree_cons (NULL_TREE,
576 ptr_void_type_node,
577 endlink)),
578 NULL_TREE, 0, 1, 1, 0);
580 /* Make the types and functions used for exception processing. */
581 jmpbuf_type
582 = build_array_type (type_for_mode (Pmode, 0),
583 build_index_type (build_int_2 (5, 0)));
584 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
585 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
587 /* Functions to get and set the jumpbuf pointer for the current thread. */
588 get_jmpbuf_decl
589 = create_subprog_decl
590 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
591 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
592 NULL_TREE, 0, 1, 1, 0);
594 set_jmpbuf_decl
595 = create_subprog_decl
596 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
597 NULL_TREE,
598 build_function_type (void_type_node,
599 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
600 NULL_TREE, 0, 1, 1, 0);
602 /* Function to get the current exception. */
603 get_excptr_decl
604 = create_subprog_decl
605 (get_identifier ("system__soft_links__get_gnat_exception"),
606 NULL_TREE,
607 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
608 NULL_TREE, 0, 1, 1, 0);
610 /* Function that raise exceptions. */
611 raise_nodefer_decl
612 = create_subprog_decl
613 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
614 build_function_type (void_type_node,
615 tree_cons (NULL_TREE,
616 build_pointer_type (except_type_node),
617 endlink)),
618 NULL_TREE, 0, 1, 1, 0);
621 /* __gnat_raise_constraint_error takes a string, an integer and never
622 returns. */
623 raise_constraint_error_decl
624 = create_subprog_decl
625 (get_identifier ("__gnat_raise_constraint_error"), NULL_TREE,
626 build_function_type (void_type_node,
627 tree_cons (NULL_TREE,
628 build_pointer_type (char_type_node),
629 tree_cons (NULL_TREE,
630 integer_type_node,
631 endlink))),
632 NULL_TREE, 0, 1, 1, 0);
634 /* Likewise for __gnat_raise_program_error. */
635 raise_program_error_decl
636 = create_subprog_decl
637 (get_identifier ("__gnat_raise_program_error"), NULL_TREE,
638 build_function_type (void_type_node,
639 tree_cons (NULL_TREE,
640 build_pointer_type (char_type_node),
641 tree_cons (NULL_TREE,
642 integer_type_node,
643 endlink))),
644 NULL_TREE, 0, 1, 1, 0);
646 /* Likewise for __gnat_raise_storage_error. */
647 raise_storage_error_decl
648 = create_subprog_decl
649 (get_identifier ("__gnat_raise_storage_error"), NULL_TREE,
650 build_function_type (void_type_node,
651 tree_cons (NULL_TREE,
652 build_pointer_type (char_type_node),
653 tree_cons (NULL_TREE,
654 integer_type_node,
655 endlink))),
656 NULL_TREE, 0, 1, 1, 0);
658 /* Indicate that these never return. */
660 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
661 TREE_THIS_VOLATILE (raise_constraint_error_decl) = 1;
662 TREE_THIS_VOLATILE (raise_program_error_decl) = 1;
663 TREE_THIS_VOLATILE (raise_storage_error_decl) = 1;
665 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
666 TREE_SIDE_EFFECTS (raise_constraint_error_decl) = 1;
667 TREE_SIDE_EFFECTS (raise_program_error_decl) = 1;
668 TREE_SIDE_EFFECTS (raise_storage_error_decl) = 1;
670 TREE_TYPE (raise_nodefer_decl)
671 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
672 TYPE_QUAL_VOLATILE);
673 TREE_TYPE (raise_constraint_error_decl)
674 = build_qualified_type (TREE_TYPE (raise_constraint_error_decl),
675 TYPE_QUAL_VOLATILE);
676 TREE_TYPE (raise_program_error_decl)
677 = build_qualified_type (TREE_TYPE (raise_program_error_decl),
678 TYPE_QUAL_VOLATILE);
679 TREE_TYPE (raise_storage_error_decl)
680 = build_qualified_type (TREE_TYPE (raise_storage_error_decl),
681 TYPE_QUAL_VOLATILE);
683 /* setjmp returns an integer and has one operand, which is a pointer to
684 a jmpbuf. */
685 setjmp_decl
686 = create_subprog_decl
687 (get_identifier ("setjmp"), NULL_TREE,
688 build_function_type (integer_type_node,
689 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
690 NULL_TREE, 0, 1, 1, 0);
692 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
693 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
695 ggc_add_tree_root (gnat_std_decls,
696 sizeof gnat_std_decls / sizeof gnat_std_decls[0]);
699 /* This routine is called in tree.c to print an error message for invalid use
700 of an incomplete type. */
702 void
703 incomplete_type_error (dont_care_1, dont_care_2)
704 tree dont_care_1 ATTRIBUTE_UNUSED;
705 tree dont_care_2 ATTRIBUTE_UNUSED;
707 gigi_abort (404);
710 /* This function is called indirectly from toplev.c to handle incomplete
711 declarations, i.e. VAR_DECL nodes whose DECL_SIZE is zero. To be precise,
712 compile_file in toplev.c makes an indirect call through the function pointer
713 incomplete_decl_finalize_hook which is initialized to this routine in
714 init_decl_processing. */
716 void
717 finish_incomplete_decl (dont_care)
718 tree dont_care ATTRIBUTE_UNUSED;
720 gigi_abort (405);
723 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
724 nodes (FIELDLIST), finish constructing the record or union type.
725 If HAS_REP is nonzero, this record has a rep clause; don't call
726 layout_type but merely set the size and alignment ourselves.
727 If DEFER_DEBUG is nonzero, do not call the debugging routines
728 on this type; it will be done later. */
730 void
731 finish_record_type (record_type, fieldlist, has_rep, defer_debug)
732 tree record_type;
733 tree fieldlist;
734 int has_rep;
735 int defer_debug;
737 enum tree_code code = TREE_CODE (record_type);
738 tree ada_size = bitsize_zero_node;
739 tree size = bitsize_zero_node;
740 tree size_unit = size_zero_node;
741 tree field;
743 TYPE_FIELDS (record_type) = fieldlist;
745 if (TYPE_NAME (record_type) != 0
746 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
747 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
748 else
749 TYPE_STUB_DECL (record_type)
750 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
751 record_type));
753 /* We don't need both the typedef name and the record name output in
754 the debugging information, since they are the same. */
755 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
757 /* Globally initialize the record first. If this is a rep'ed record,
758 that just means some initializations; otherwise, layout the record. */
760 if (has_rep)
762 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
763 TYPE_MODE (record_type) = BLKmode;
764 if (TYPE_SIZE (record_type) == 0)
766 TYPE_SIZE (record_type) = bitsize_zero_node;
767 TYPE_SIZE_UNIT (record_type) = size_zero_node;
770 else
772 /* Ensure there isn't a size already set. There can be in an error
773 case where there is a rep clause but all fields have errors and
774 no longer have a position. */
775 TYPE_SIZE (record_type) = 0;
776 layout_type (record_type);
779 /* At this point, the position and size of each field is known. It was
780 either set before entry by a rep clause, or by laying out the type
781 above. We now make a pass through the fields (in reverse order for
782 QUAL_UNION_TYPEs) to compute the Ada size; the GCC size and alignment
783 (for rep'ed records that are not padding types); and the mode (for
784 rep'ed records). */
786 if (code == QUAL_UNION_TYPE)
787 fieldlist = nreverse (fieldlist);
789 for (field = fieldlist; field; field = TREE_CHAIN (field))
791 tree type = TREE_TYPE (field);
792 tree this_size = DECL_SIZE (field);
793 tree this_size_unit = DECL_SIZE_UNIT (field);
794 tree this_ada_size = DECL_SIZE (field);
796 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
797 || TREE_CODE (type) == QUAL_UNION_TYPE)
798 && ! TYPE_IS_FAT_POINTER_P (type)
799 && ! TYPE_CONTAINS_TEMPLATE_P (type)
800 && TYPE_ADA_SIZE (type) != 0)
801 this_ada_size = TYPE_ADA_SIZE (type);
803 if (has_rep && ! DECL_BIT_FIELD (field))
804 TYPE_ALIGN (record_type)
805 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
807 switch (code)
809 case UNION_TYPE:
810 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
811 size = size_binop (MAX_EXPR, size, this_size);
812 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
813 break;
815 case QUAL_UNION_TYPE:
816 ada_size
817 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
818 this_ada_size, ada_size));
819 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
820 this_size, size));
821 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
822 this_size_unit, size_unit));
823 break;
825 case RECORD_TYPE:
826 /* Since we know here that all fields are sorted in order of
827 increasing bit position, the size of the record is one
828 higher than the ending bit of the last field processed
829 unless we have a rep clause, since in that case we might
830 have a field outside a QUAL_UNION_TYPE that has a higher ending
831 position. So use a MAX in that case. Also, if this field is a
832 QUAL_UNION_TYPE, we need to take into account the previous size in
833 the case of empty variants. */
834 ada_size
835 = merge_sizes (ada_size, bit_position (field), this_ada_size,
836 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
837 size = merge_sizes (size, bit_position (field), this_size,
838 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
839 size_unit
840 = merge_sizes (size_unit, byte_position (field), this_size_unit,
841 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
842 break;
844 default:
845 abort ();
849 if (code == QUAL_UNION_TYPE)
850 nreverse (fieldlist);
852 /* If this is a padding record, we never want to make the size smaller than
853 what was specified in it, if any. */
854 if (TREE_CODE (record_type) == RECORD_TYPE
855 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
857 size = TYPE_SIZE (record_type);
858 size_unit = TYPE_SIZE_UNIT (record_type);
861 /* Now set any of the values we've just computed that apply. */
862 if (! TYPE_IS_FAT_POINTER_P (record_type)
863 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
864 TYPE_ADA_SIZE (record_type) = ada_size;
866 #ifdef ROUND_TYPE_SIZE
867 size = ROUND_TYPE_SIZE (record_type, size, TYPE_ALIGN (record_type));
868 size_unit = ROUND_TYPE_SIZE_UNIT (record_size, size_unit,
869 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
870 #else
871 size = round_up (size, TYPE_ALIGN (record_type));
872 size_unit = round_up (size_unit, TYPE_ALIGN (record_type) / BITS_PER_UNIT);
873 #endif
875 if (has_rep
876 && ! (TREE_CODE (record_type) == RECORD_TYPE
877 && TYPE_IS_PADDING_P (record_type)
878 && TREE_CODE (size) != INTEGER_CST
879 && contains_placeholder_p (size)))
881 TYPE_SIZE (record_type) = size;
882 TYPE_SIZE_UNIT (record_type) = size_unit;
885 if (has_rep)
886 compute_record_mode (record_type);
888 if (! defer_debug)
890 /* If this record is of variable size, rename it so that the
891 debugger knows it is and make a new, parallel, record
892 that tells the debugger how the record is laid out. See
893 exp_dbug.ads. */
894 if (TREE_CODE (TYPE_SIZE (record_type)) != INTEGER_CST)
896 tree new_record_type
897 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
898 ? UNION_TYPE : TREE_CODE (record_type));
899 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
900 tree new_id
901 = concat_id_with_name (orig_id,
902 TREE_CODE (record_type) == QUAL_UNION_TYPE
903 ? "XVU" : "XVE");
904 tree last_pos = bitsize_zero_node;
905 tree old_field;
907 TYPE_NAME (new_record_type) = new_id;
908 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
909 TYPE_STUB_DECL (new_record_type)
910 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
911 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
912 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
913 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
914 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
916 /* Now scan all the fields, replacing each field with a new
917 field corresponding to the new encoding. */
918 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
919 old_field = TREE_CHAIN (old_field))
921 tree field_type = TREE_TYPE (old_field);
922 tree field_name = DECL_NAME (old_field);
923 tree new_field;
924 tree curpos = bit_position (old_field);
925 int var = 0;
926 unsigned int align = 0;
927 tree pos;
929 /* See how the position was modified from the last position.
931 There are two basic cases we support: a value was added
932 to the last position or the last position was rounded to
933 a boundary and they something was added. Check for the
934 first case first. If not, see if there is any evidence
935 of rounding. If so, round the last position and try
936 again.
938 If this is a union, the position can be taken as zero. */
940 if (TREE_CODE (new_record_type) == UNION_TYPE)
941 pos = bitsize_zero_node, align = 0;
942 else
943 pos = compute_related_constant (curpos, last_pos);
945 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
946 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
948 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
949 pos = compute_related_constant (curpos,
950 round_up (last_pos, align));
952 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
953 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
954 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
955 && host_integerp (TREE_OPERAND
956 (TREE_OPERAND (curpos, 0), 1),
959 align
960 = tree_low_cst
961 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
962 pos = compute_related_constant (curpos,
963 round_up (last_pos, align));
966 /* If we can't compute a position, set it to zero.
968 ??? We really should abort here, but it's too much work
969 to get this correct for all cases. */
971 if (pos == 0)
972 pos = bitsize_zero_node;
974 /* See if this type is variable-size and make a new type
975 and indicate the indirection if so. */
976 if (TREE_CODE (TYPE_SIZE (field_type)) != INTEGER_CST)
978 field_type = build_pointer_type (field_type);
979 var = 1;
982 /* Make a new field name, if necessary. */
983 if (var || align != 0)
985 char suffix[6];
987 if (align != 0)
988 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
989 align / BITS_PER_UNIT);
990 else
991 strcpy (suffix, "XVL");
993 field_name = concat_id_with_name (field_name, suffix);
996 new_field = create_field_decl (field_name, field_type,
997 new_record_type, 0,
998 TYPE_SIZE (field_type), pos, 0);
999 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1000 TYPE_FIELDS (new_record_type) = new_field;
1002 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1003 zero. The only time it's not the last field of the record
1004 is when there are other components at fixed positions after
1005 it (meaning there was a rep clause for every field) and we
1006 want to be able to encode them. */
1007 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1008 (TREE_CODE (TREE_TYPE (old_field))
1009 == QUAL_UNION_TYPE)
1010 ? bitsize_zero_node
1011 : TYPE_SIZE (TREE_TYPE (old_field)));
1014 TYPE_FIELDS (new_record_type)
1015 = nreverse (TYPE_FIELDS (new_record_type));
1017 rest_of_type_compilation (new_record_type, global_bindings_p ());
1020 rest_of_type_compilation (record_type, global_bindings_p ());
1024 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1025 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1026 if this represents a QUAL_UNION_TYPE in which case we must look for
1027 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1028 is nonzero, we must take the MAX of the end position of this field
1029 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1031 We return an expression for the size. */
1033 static tree
1034 merge_sizes (last_size, first_bit, size, special, has_rep)
1035 tree last_size;
1036 tree first_bit, size;
1037 int special;
1038 int has_rep;
1040 tree type = TREE_TYPE (last_size);
1042 if (! special || TREE_CODE (size) != COND_EXPR)
1044 tree new = size_binop (PLUS_EXPR, first_bit, size);
1046 if (has_rep)
1047 new = size_binop (MAX_EXPR, last_size, new);
1049 return new;
1052 return fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1053 integer_zerop (TREE_OPERAND (size, 1))
1054 ? last_size : merge_sizes (last_size, first_bit,
1055 TREE_OPERAND (size, 1),
1056 1, has_rep),
1057 integer_zerop (TREE_OPERAND (size, 2))
1058 ? last_size : merge_sizes (last_size, first_bit,
1059 TREE_OPERAND (size, 2),
1060 1, has_rep)));
1063 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1064 related by the addition of a constant. Return that constant if so. */
1066 static tree
1067 compute_related_constant (op0, op1)
1068 tree op0, op1;
1070 tree op0_var, op1_var;
1071 tree op0_con = split_plus (op0, &op0_var);
1072 tree op1_con = split_plus (op1, &op1_var);
1073 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1075 if (operand_equal_p (op0_var, op1_var, 0))
1076 return result;
1077 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1078 return result;
1079 else
1080 return 0;
1083 /* Utility function of above to split a tree OP which may be a sum, into a
1084 constant part, which is returned, and a variable part, which is stored
1085 in *PVAR. *PVAR may be size_zero_node. All operations must be of
1086 sizetype. */
1088 static tree
1089 split_plus (in, pvar)
1090 tree in;
1091 tree *pvar;
1093 tree result = bitsize_zero_node;
1095 while (TREE_CODE (in) == NON_LVALUE_EXPR)
1096 in = TREE_OPERAND (in, 0);
1098 *pvar = in;
1099 if (TREE_CODE (in) == INTEGER_CST)
1101 *pvar = bitsize_zero_node;
1102 return in;
1104 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1106 tree lhs_var, rhs_var;
1107 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1108 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1110 result = size_binop (PLUS_EXPR, result, lhs_con);
1111 result = size_binop (TREE_CODE (in), result, rhs_con);
1113 if (lhs_var == TREE_OPERAND (in, 0)
1114 && rhs_var == TREE_OPERAND (in, 1))
1115 return bitsize_zero_node;
1117 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1118 return result;
1120 else
1121 return bitsize_zero_node;
1124 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1125 subprogram. If it is void_type_node, then we are dealing with a procedure,
1126 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1127 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1128 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1129 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1130 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1131 RETURNS_WITH_DSP is nonzero if the function is to return with a
1132 depressed stack pointer. */
1134 tree
1135 create_subprog_type (return_type, param_decl_list, cico_list,
1136 returns_unconstrained, returns_by_ref, returns_with_dsp)
1137 tree return_type;
1138 tree param_decl_list;
1139 tree cico_list;
1140 int returns_unconstrained, returns_by_ref, returns_with_dsp;
1142 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1143 the subprogram formal parameters. This list is generated by traversing the
1144 input list of PARM_DECL nodes. */
1145 tree param_type_list = NULL;
1146 tree param_decl;
1147 tree type;
1149 for (param_decl = param_decl_list; param_decl;
1150 param_decl = TREE_CHAIN (param_decl))
1151 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1152 param_type_list);
1154 /* The list of the function parameter types has to be terminated by the void
1155 type to signal to the back-end that we are not dealing with a variable
1156 parameter subprogram, but that the subprogram has a fixed number of
1157 parameters. */
1158 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1160 /* The list of argument types has been created in reverse
1161 so nreverse it. */
1162 param_type_list = nreverse (param_type_list);
1164 type = build_function_type (return_type, param_type_list);
1166 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1167 or the new type should, make a copy of TYPE. Likewise for
1168 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1169 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1170 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1171 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1172 type = copy_type (type);
1174 TYPE_CI_CO_LIST (type) = cico_list;
1175 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1176 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1177 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1178 return type;
1181 /* Return a copy of TYPE but safe to modify in any way. */
1183 tree
1184 copy_type (type)
1185 tree type;
1187 tree new = copy_node (type);
1189 /* copy_node clears this field instead of copying it, because it is
1190 aliased with TREE_CHAIN. */
1191 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1193 TYPE_POINTER_TO (new) = 0;
1194 TYPE_REFERENCE_TO (new) = 0;
1195 TYPE_MAIN_VARIANT (new) = new;
1196 TYPE_NEXT_VARIANT (new) = 0;
1198 return new;
1201 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1202 TYPE_INDEX_TYPE is INDEX. */
1204 tree
1205 create_index_type (min, max, index)
1206 tree min, max;
1207 tree index;
1209 /* First build a type for the desired range. */
1210 tree type = build_index_2_type (min, max);
1212 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1213 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1214 is set, but not to INDEX, make a copy of this type with the requested
1215 index type. Note that we have no way of sharing these types, but that's
1216 only a small hole. */
1217 if (TYPE_INDEX_TYPE (type) == index)
1218 return type;
1219 else if (TYPE_INDEX_TYPE (type) != 0)
1220 type = copy_type (type);
1222 TYPE_INDEX_TYPE (type) = index;
1223 return type;
1226 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1227 string) and TYPE is a ..._TYPE node giving its data type.
1228 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1229 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1230 information about this type. */
1232 tree
1233 create_type_decl (type_name, type, attr_list, artificial_p, debug_info_p)
1234 tree type_name;
1235 tree type;
1236 struct attrib *attr_list;
1237 int artificial_p;
1238 int debug_info_p;
1240 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1241 enum tree_code code = TREE_CODE (type);
1243 DECL_ARTIFICIAL (type_decl) = artificial_p;
1244 pushdecl (type_decl);
1245 process_attributes (type_decl, attr_list);
1247 /* Pass type declaration information to the debugger unless this is an
1248 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1249 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1250 a dummy type, which will be completed later, or a type for which
1251 debugging information was not requested. */
1252 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1253 || ! debug_info_p)
1254 DECL_IGNORED_P (type_decl) = 1;
1255 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1256 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1257 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1258 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1260 return type_decl;
1263 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1264 ASM_NAME is its assembler name (if provided). TYPE is its data type
1265 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1266 expression; NULL_TREE if none.
1268 CONST_FLAG is nonzero if this variable is constant.
1270 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1271 the current compilation unit. This flag should be set when processing the
1272 variable definitions in a package specification. EXTERN_FLAG is nonzero
1273 when processing an external variable declaration (as opposed to a
1274 definition: no storage is to be allocated for the variable here).
1276 STATIC_FLAG is only relevant when not at top level. In that case
1277 it indicates whether to always allocate storage to the variable. */
1279 tree
1280 create_var_decl (var_name, asm_name, type, var_init, const_flag, public_flag,
1281 extern_flag, static_flag, attr_list)
1282 tree var_name;
1283 tree asm_name;
1284 tree type;
1285 tree var_init;
1286 int const_flag;
1287 int public_flag;
1288 int extern_flag;
1289 int static_flag;
1290 struct attrib *attr_list;
1292 int init_const
1293 = (var_init == 0
1295 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1296 && (global_bindings_p () || static_flag
1297 ? 0 != initializer_constant_valid_p (var_init,
1298 TREE_TYPE (var_init))
1299 : TREE_CONSTANT (var_init))));
1300 tree var_decl
1301 = build_decl ((const_flag && init_const
1302 /* Only make a CONST_DECL for sufficiently-small objects.
1303 We consider complex double "sufficiently-small" */
1304 && TYPE_SIZE (type) != 0
1305 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1306 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1307 GET_MODE_SIZE (DCmode)))
1308 ? CONST_DECL : VAR_DECL, var_name, type);
1309 tree assign_init = 0;
1311 /* If this is external, throw away any initializations unless this is a
1312 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1313 we are defining a global here, leave a constant initialization and save
1314 any variable elaborations for the elaboration routine. Otherwise, if
1315 the initializing expression is not the same as TYPE, generate the
1316 initialization with an assignment statement, since it knows how
1317 to do the required adjustents. If we are just annotating types,
1318 throw away the initialization if it isn't a constant. */
1320 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1321 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1322 var_init = 0;
1324 if (global_bindings_p () && var_init != 0 && ! init_const)
1326 add_pending_elaborations (var_decl, var_init);
1327 var_init = 0;
1330 else if (var_init != 0
1331 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1332 != TYPE_MAIN_VARIANT (type))
1333 || (static_flag && ! init_const)))
1334 assign_init = var_init, var_init = 0;
1336 DECL_COMMON (var_decl) = !flag_no_common;
1337 DECL_INITIAL (var_decl) = var_init;
1338 TREE_READONLY (var_decl) = const_flag;
1339 DECL_EXTERNAL (var_decl) = extern_flag;
1340 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1341 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1342 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1343 = TYPE_VOLATILE (type);
1345 /* At the global binding level we need to allocate static storage for the
1346 variable if and only if its not external. If we are not at the top level
1347 we allocate automatic storage unless requested not to. */
1348 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1350 if (asm_name != 0)
1351 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1353 process_attributes (var_decl, attr_list);
1355 /* Add this decl to the current binding level and generate any
1356 needed code and RTL. */
1357 var_decl = pushdecl (var_decl);
1358 expand_decl (var_decl);
1360 if (DECL_CONTEXT (var_decl) != 0)
1361 expand_decl_init (var_decl);
1363 /* If this is volatile, force it into memory. */
1364 if (TREE_SIDE_EFFECTS (var_decl))
1365 mark_addressable (var_decl);
1367 if (TREE_CODE (var_decl) != CONST_DECL)
1368 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1370 if (assign_init != 0)
1372 /* If VAR_DECL has a padded type, convert it to the unpadded
1373 type so the assignment is done properly. */
1374 tree lhs = var_decl;
1376 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1377 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1378 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1380 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1381 assign_init));
1384 return var_decl;
1387 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1388 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1389 this field is in a record type with a "pragma pack". If SIZE is nonzero
1390 it is the specified size for this field. If POS is nonzero, it is the bit
1391 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1392 the address of this field for aliasing purposes. */
1394 tree
1395 create_field_decl (field_name, field_type, record_type, packed, size, pos,
1396 addressable)
1397 tree field_name;
1398 tree field_type;
1399 tree record_type;
1400 int packed;
1401 tree size, pos;
1402 int addressable;
1404 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1406 DECL_CONTEXT (field_decl) = record_type;
1407 TREE_READONLY (field_decl) = TREE_READONLY (field_type);
1409 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1410 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1411 If it is a padding type where the inner field is of variable size, it
1412 must be at its natural alignment. Just handle the packed case here; we
1413 will disallow non-aligned rep clauses elsewhere. */
1414 if (packed && TYPE_MODE (field_type) == BLKmode)
1415 DECL_ALIGN (field_decl)
1416 = ((TREE_CODE (field_type) == RECORD_TYPE
1417 && TYPE_IS_PADDING_P (field_type)
1418 && ! TREE_CONSTANT (DECL_SIZE (TYPE_FIELDS (field_type))))
1419 ? TYPE_ALIGN (field_type) : BITS_PER_UNIT);
1421 /* If a size is specified, use it. Otherwise, see if we have a size
1422 to use that may differ from the natural size of the object. */
1423 if (size != 0)
1424 size = convert (bitsizetype, size);
1425 else if (packed)
1427 if (packed == 1 && ! operand_equal_p (rm_size (field_type),
1428 TYPE_SIZE (field_type), 0))
1429 size = rm_size (field_type);
1431 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1432 byte. */
1433 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1434 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1435 size = round_up (size, BITS_PER_UNIT);
1438 /* Make a bitfield if a size is specified for two reasons: first if the size
1439 differs from the natural size. Second, if the alignment is insufficient.
1440 There are a number of ways the latter can be true. But never make a
1441 bitfield if the type of the field has a nonconstant size. */
1443 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1444 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1445 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1446 || (pos != 0
1447 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1448 bitsize_int (TYPE_ALIGN
1449 (field_type)))))
1450 || packed
1451 || (TYPE_ALIGN (record_type) != 0
1452 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1454 DECL_BIT_FIELD (field_decl) = 1;
1455 DECL_SIZE (field_decl) = size;
1456 if (! packed && pos == 0)
1457 DECL_ALIGN (field_decl)
1458 = (TYPE_ALIGN (record_type) != 0
1459 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1460 : TYPE_ALIGN (field_type));
1463 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1464 DECL_ALIGN (field_decl)
1465 = MAX (DECL_ALIGN (field_decl),
1466 DECL_BIT_FIELD (field_decl) ? 1
1467 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1468 : TYPE_ALIGN (field_type));
1470 if (pos != 0)
1472 /* We need to pass in the alignment the DECL is known to have.
1473 This is the lowest-order bit set in POS, but no more than
1474 the alignment of the record, if one is specified. Note
1475 that an alignment of 0 is taken as infinite. */
1476 unsigned int known_align;
1478 if (host_integerp (pos, 1))
1479 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1480 else
1481 known_align = BITS_PER_UNIT;
1483 if (TYPE_ALIGN (record_type)
1484 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1485 known_align = TYPE_ALIGN (record_type);
1487 layout_decl (field_decl, known_align);
1488 SET_DECL_OFFSET_ALIGN (field_decl, BIGGEST_ALIGNMENT);
1489 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1490 &DECL_FIELD_BIT_OFFSET (field_decl),
1491 BIGGEST_ALIGNMENT, pos);
1493 DECL_HAS_REP_P (field_decl) = 1;
1496 /* Mark the decl as nonaddressable if it either is indicated so semantically
1497 or if it is a bit field. */
1498 DECL_NONADDRESSABLE_P (field_decl)
1499 = ! addressable || DECL_BIT_FIELD (field_decl);
1501 return field_decl;
1504 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1505 effects, has the value of zero. */
1507 static int
1508 value_zerop (exp)
1509 tree exp;
1511 if (TREE_CODE (exp) == COMPOUND_EXPR)
1512 return value_zerop (TREE_OPERAND (exp, 1));
1514 return integer_zerop (exp);
1517 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1518 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1519 readonly (either an IN parameter or an address of a pass-by-ref
1520 parameter). */
1522 tree
1523 create_param_decl (param_name, param_type, readonly)
1524 tree param_name;
1525 tree param_type;
1526 int readonly;
1528 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1530 DECL_ARG_TYPE (param_decl) = param_type;
1531 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1532 TREE_READONLY (param_decl) = readonly;
1533 return param_decl;
1536 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1538 void
1539 process_attributes (decl, attr_list)
1540 tree decl;
1541 struct attrib *attr_list;
1543 for (; attr_list; attr_list = attr_list->next)
1544 switch (attr_list->type)
1546 case ATTR_MACHINE_ATTRIBUTE:
1547 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1548 NULL_TREE),
1549 ATTR_FLAG_TYPE_IN_PLACE);
1550 break;
1552 case ATTR_LINK_ALIAS:
1553 TREE_STATIC (decl) = 1;
1554 assemble_alias (decl, attr_list->name);
1555 break;
1557 case ATTR_WEAK_EXTERNAL:
1558 if (SUPPORTS_WEAK)
1559 declare_weak (decl);
1560 else
1561 post_error ("?weak declarations not supported on this target",
1562 attr_list->error_point);
1563 break;
1565 case ATTR_LINK_SECTION:
1566 #ifdef ASM_OUTPUT_SECTION_NAME
1567 DECL_SECTION_NAME (decl)
1568 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1569 IDENTIFIER_POINTER (attr_list->name));
1570 DECL_COMMON (decl) = 0;
1571 #else
1572 post_error ("?section attributes are not supported for this target",
1573 attr_list->error_point);
1574 #endif
1575 break;
1579 /* Add some pending elaborations on the list. */
1581 void
1582 add_pending_elaborations (var_decl, var_init)
1583 tree var_decl;
1584 tree var_init;
1586 if (var_init != 0)
1587 Check_Elaboration_Code_Allowed (error_gnat_node);
1589 pending_elaborations
1590 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1593 /* Obtain any pending elaborations and clear the old list. */
1595 tree
1596 get_pending_elaborations ()
1598 /* Each thing added to the list went on the end; we want it on the
1599 beginning. */
1600 tree result = TREE_CHAIN (pending_elaborations);
1602 TREE_CHAIN (pending_elaborations) = 0;
1603 return result;
1606 /* Mark the binding level stack. */
1608 static void
1609 mark_binding_level (arg)
1610 PTR arg;
1612 struct binding_level *level = *(struct binding_level **) arg;
1614 for (; level != 0; level = level->level_chain)
1616 ggc_mark_tree (level->names);
1617 ggc_mark_tree (level->blocks);
1618 ggc_mark_tree (level->this_block);
1622 /* Mark the pending elaboration list. */
1624 static void
1625 mark_e_stack (data)
1626 PTR data;
1628 struct e_stack *p = *((struct e_stack **) data);
1630 if (p != 0)
1632 ggc_mark_tree (p->elab_list);
1633 mark_e_stack (&p->next);
1637 /* Return nonzero if there are pending elaborations. */
1640 pending_elaborations_p ()
1642 return TREE_CHAIN (pending_elaborations) != 0;
1645 /* Save a copy of the current pending elaboration list and make a new
1646 one. */
1648 void
1649 push_pending_elaborations ()
1651 struct e_stack *p = (struct e_stack *) xmalloc (sizeof (struct e_stack));
1653 p->next = elist_stack;
1654 p->elab_list = pending_elaborations;
1655 elist_stack = p;
1656 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1659 /* Pop the stack of pending elaborations. */
1661 void
1662 pop_pending_elaborations ()
1664 struct e_stack *p = elist_stack;
1666 pending_elaborations = p->elab_list;
1667 elist_stack = p->next;
1668 free (p);
1671 /* Return the current position in pending_elaborations so we can insert
1672 elaborations after that point. */
1674 tree
1675 get_elaboration_location ()
1677 return tree_last (pending_elaborations);
1680 /* Insert the current elaborations after ELAB, which is in some elaboration
1681 list. */
1683 void
1684 insert_elaboration_list (elab)
1685 tree elab;
1687 tree next = TREE_CHAIN (elab);
1689 if (TREE_CHAIN (pending_elaborations))
1691 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1692 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1693 TREE_CHAIN (pending_elaborations) = 0;
1697 /* Returns a LABEL_DECL node for LABEL_NAME. */
1699 tree
1700 create_label_decl (label_name)
1701 tree label_name;
1703 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1705 DECL_CONTEXT (label_decl) = current_function_decl;
1706 DECL_MODE (label_decl) = VOIDmode;
1707 DECL_SOURCE_LINE (label_decl) = lineno;
1708 DECL_SOURCE_FILE (label_decl) = input_filename;
1710 return label_decl;
1713 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1714 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1715 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1716 PARM_DECL nodes chained through the TREE_CHAIN field).
1718 INLINE_FLAG, PUBLIC_FLAG, and EXTERN_FLAG are used to set the appropriate
1719 fields in the FUNCTION_DECL. */
1721 tree
1722 create_subprog_decl (subprog_name, asm_name, subprog_type, param_decl_list,
1723 inline_flag, public_flag, extern_flag, attr_list)
1724 tree subprog_name;
1725 tree asm_name;
1726 tree subprog_type;
1727 tree param_decl_list;
1728 int inline_flag;
1729 int public_flag;
1730 int extern_flag;
1731 struct attrib *attr_list;
1733 tree return_type = TREE_TYPE (subprog_type);
1734 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1736 /* If this is a function nested inside an inlined external function, it
1737 means we aren't going to compile the outer function unless it is
1738 actually inlined, so do the same for us. */
1739 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1740 && DECL_EXTERNAL (current_function_decl))
1741 extern_flag = 1;
1743 DECL_EXTERNAL (subprog_decl) = extern_flag;
1744 TREE_PUBLIC (subprog_decl) = public_flag;
1745 DECL_INLINE (subprog_decl) = inline_flag;
1746 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1747 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1748 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1749 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1750 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1752 if (asm_name != 0)
1753 DECL_ASSEMBLER_NAME (subprog_decl) = asm_name;
1755 process_attributes (subprog_decl, attr_list);
1757 /* Add this decl to the current binding level. */
1758 subprog_decl = pushdecl (subprog_decl);
1760 /* Output the assembler code and/or RTL for the declaration. */
1761 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1763 return subprog_decl;
1766 /* Count how deep we are into nested functions. This is because
1767 we shouldn't call the backend function context routines unless we
1768 are in a nested function. */
1770 static int function_nesting_depth;
1772 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1773 body. This routine needs to be invoked before processing the declarations
1774 appearing in the subprogram. */
1776 void
1777 begin_subprog_body (subprog_decl)
1778 tree subprog_decl;
1780 tree param_decl_list;
1781 tree param_decl;
1782 tree next_param;
1784 if (function_nesting_depth++ != 0)
1785 push_function_context ();
1787 announce_function (subprog_decl);
1789 /* Make this field nonzero so further routines know that this is not
1790 tentative. error_mark_node is replaced below (in poplevel) with the
1791 adequate BLOCK. */
1792 DECL_INITIAL (subprog_decl) = error_mark_node;
1794 /* This function exists in static storage. This does not mean `static' in
1795 the C sense! */
1796 TREE_STATIC (subprog_decl) = 1;
1798 /* Enter a new binding level. */
1799 current_function_decl = subprog_decl;
1800 pushlevel (0);
1802 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1803 subprogram body) so that they can be recognized as local variables in the
1804 subprogram.
1806 The list of PARM_DECL nodes is stored in the right order in
1807 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1808 which they are transmitted to `pushdecl' we need to reverse the list of
1809 PARM_DECLs if we want it to be stored in the right order. The reason why
1810 we want to make sure the PARM_DECLs are stored in the correct order is
1811 that this list will be retrieved in a few lines with a call to `getdecl'
1812 to store it back into the DECL_ARGUMENTS field. */
1813 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1815 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1817 next_param = TREE_CHAIN (param_decl);
1818 TREE_CHAIN (param_decl) = NULL;
1819 pushdecl (param_decl);
1822 /* Store back the PARM_DECL nodes. They appear in the right order. */
1823 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1825 init_function_start (subprog_decl, input_filename, lineno);
1826 expand_function_start (subprog_decl, 0);
1830 /* Finish the definition of the current subprogram and compile it all the way
1831 to assembler language output. */
1833 void
1834 end_subprog_body ()
1836 tree decl;
1837 tree cico_list;
1839 poplevel (1, 0, 1);
1840 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1841 = current_function_decl;
1843 /* Mark the RESULT_DECL as being in this subprogram. */
1844 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1846 expand_function_end (input_filename, lineno, 0);
1848 /* If this is a nested function, push a new GC context. That will keep
1849 local variables on the stack from being collected while we're doing
1850 the compilation of this function. */
1851 if (function_nesting_depth > 1)
1852 ggc_push_context ();
1854 rest_of_compilation (current_function_decl);
1856 if (function_nesting_depth > 1)
1857 ggc_pop_context ();
1859 #if 0
1860 /* If we're sure this function is defined in this file then mark it
1861 as such */
1862 if (TREE_ASM_WRITTEN (current_function_decl))
1863 mark_fn_defined_in_this_file (current_function_decl);
1864 #endif
1866 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1867 not be seen when we call this function and will be in
1868 unallocated memory anyway. */
1869 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1870 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1871 TREE_VALUE (cico_list) = 0;
1873 if (DECL_SAVED_INSNS (current_function_decl) == 0)
1875 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1876 was saved for inline, in which case the DECL_RTLs are in
1877 preserved memory. */
1878 for (decl = DECL_ARGUMENTS (current_function_decl);
1879 decl != 0; decl = TREE_CHAIN (decl))
1881 SET_DECL_RTL (decl, 0);
1882 DECL_INCOMING_RTL (decl) = 0;
1885 /* Similarly, discard DECL_RTL of the return value. */
1886 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1888 /* But DECL_INITIAL must remain nonzero so we know this
1889 was an actual function definition unless toplev.c decided not
1890 to inline it. */
1891 if (DECL_INITIAL (current_function_decl) != 0)
1892 DECL_INITIAL (current_function_decl) = error_mark_node;
1894 DECL_ARGUMENTS (current_function_decl) = 0;
1897 /* If we are not at the bottom of the function nesting stack, pop up to
1898 the containing function. Otherwise show we aren't in any function. */
1899 if (--function_nesting_depth != 0)
1900 pop_function_context ();
1901 else
1902 current_function_decl = 0;
1905 /* Return a definition for a builtin function named NAME and whose data type
1906 is TYPE. TYPE should be a function type with argument types.
1907 FUNCTION_CODE tells later passes how to compile calls to this function.
1908 See tree.h for its possible values.
1910 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1911 the name to be called if we can't opencode the function. */
1913 tree
1914 builtin_function (name, type, function_code, class, library_name)
1915 const char *name;
1916 tree type;
1917 int function_code;
1918 enum built_in_class class;
1919 const char *library_name;
1921 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1923 DECL_EXTERNAL (decl) = 1;
1924 TREE_PUBLIC (decl) = 1;
1925 if (library_name)
1926 DECL_ASSEMBLER_NAME (decl) = get_identifier (library_name);
1928 pushdecl (decl);
1929 DECL_BUILT_IN_CLASS (decl) = class;
1930 DECL_FUNCTION_CODE (decl) = function_code;
1931 return decl;
1934 /* Return an integer type with the number of bits of precision given by
1935 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1936 it is a signed type. */
1938 tree
1939 type_for_size (precision, unsignedp)
1940 unsigned precision;
1941 int unsignedp;
1943 tree t;
1944 char type_name[20];
1946 if (precision <= 2 * MAX_BITS_PER_WORD
1947 && signed_and_unsigned_types[precision][unsignedp] != 0)
1948 return signed_and_unsigned_types[precision][unsignedp];
1950 if (unsignedp)
1951 t = make_unsigned_type (precision);
1952 else
1953 t = make_signed_type (precision);
1955 if (precision <= 2 * MAX_BITS_PER_WORD)
1956 signed_and_unsigned_types[precision][unsignedp] = t;
1958 if (TYPE_NAME (t) == 0)
1960 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1961 TYPE_NAME (t) = get_identifier (type_name);
1964 return t;
1967 /* Likewise for floating-point types. */
1969 static tree
1970 float_type_for_size (precision, mode)
1971 int precision;
1972 enum machine_mode mode;
1974 tree t;
1975 char type_name[20];
1977 if (float_types[(int) mode] != 0)
1978 return float_types[(int) mode];
1980 float_types[(int) mode] = t = make_node (REAL_TYPE);
1981 TYPE_PRECISION (t) = precision;
1982 layout_type (t);
1984 if (TYPE_MODE (t) != mode)
1985 gigi_abort (414);
1987 if (TYPE_NAME (t) == 0)
1989 sprintf (type_name, "FLOAT_%d", precision);
1990 TYPE_NAME (t) = get_identifier (type_name);
1993 return t;
1996 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1997 an unsigned type; otherwise a signed type is returned. */
1999 tree
2000 type_for_mode (mode, unsignedp)
2001 enum machine_mode mode;
2002 int unsignedp;
2004 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2005 return float_type_for_size (GET_MODE_BITSIZE (mode), mode);
2006 else
2007 return type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2010 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2012 tree
2013 unsigned_type (type_node)
2014 tree type_node;
2016 tree type = type_for_size (TYPE_PRECISION (type_node), 1);
2018 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2020 type = copy_node (type);
2021 TREE_TYPE (type) = type_node;
2023 else if (TREE_TYPE (type_node) != 0
2024 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2025 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2027 type = copy_node (type);
2028 TREE_TYPE (type) = TREE_TYPE (type_node);
2031 return type;
2034 /* Return the signed version of a TYPE_NODE, a scalar type. */
2036 tree
2037 signed_type (type_node)
2038 tree type_node;
2040 tree type = type_for_size (TYPE_PRECISION (type_node), 0);
2042 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2044 type = copy_node (type);
2045 TREE_TYPE (type) = type_node;
2047 else if (TREE_TYPE (type_node) != 0
2048 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2049 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2051 type = copy_node (type);
2052 TREE_TYPE (type) = TREE_TYPE (type_node);
2055 return type;
2058 /* Return a type the same as TYPE except unsigned or signed according to
2059 UNSIGNEDP. */
2061 tree
2062 signed_or_unsigned_type (unsignedp, type)
2063 int unsignedp;
2064 tree type;
2066 if (! INTEGRAL_TYPE_P (type) || TREE_UNSIGNED (type) == unsignedp)
2067 return type;
2068 else
2069 return type_for_size (TYPE_PRECISION (type), unsignedp);
2072 /* EXP is an expression for the size of an object. If this size contains
2073 discriminant references, replace them with the maximum (if MAX_P) or
2074 minimum (if ! MAX_P) possible value of the discriminant. */
2076 tree
2077 max_size (exp, max_p)
2078 tree exp;
2079 int max_p;
2081 enum tree_code code = TREE_CODE (exp);
2082 tree type = TREE_TYPE (exp);
2084 switch (TREE_CODE_CLASS (code))
2086 case 'd':
2087 case 'c':
2088 return exp;
2090 case 'x':
2091 if (code == TREE_LIST)
2092 return tree_cons (TREE_PURPOSE (exp),
2093 max_size (TREE_VALUE (exp), max_p),
2094 TREE_CHAIN (exp) != 0
2095 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2096 break;
2098 case 'r':
2099 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2100 modify. Otherwise, we abort since it is something we can't
2101 handle. */
2102 if (! contains_placeholder_p (exp))
2103 gigi_abort (406);
2105 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2106 return
2107 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2109 case '<':
2110 return max_p ? size_one_node : size_zero_node;
2112 case '1':
2113 case '2':
2114 case 'e':
2115 switch (TREE_CODE_LENGTH (code))
2117 case 1:
2118 if (code == NON_LVALUE_EXPR)
2119 return max_size (TREE_OPERAND (exp, 0), max_p);
2120 else
2121 return
2122 fold (build1 (code, type,
2123 max_size (TREE_OPERAND (exp, 0),
2124 code == NEGATE_EXPR ? ! max_p : max_p)));
2126 case 2:
2127 if (code == RTL_EXPR)
2128 gigi_abort (407);
2129 else if (code == COMPOUND_EXPR)
2130 return max_size (TREE_OPERAND (exp, 1), max_p);
2131 else if (code == WITH_RECORD_EXPR)
2132 return exp;
2135 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2136 tree rhs = max_size (TREE_OPERAND (exp, 1),
2137 code == MINUS_EXPR ? ! max_p : max_p);
2139 /* Special-case wanting the maximum value of a MIN_EXPR.
2140 In that case, if one side overflows, return the other.
2141 sizetype is signed, but we know sizes are non-negative.
2142 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2143 overflowing or the maximum possible value and the RHS
2144 a variable. */
2145 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2146 return lhs;
2147 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2148 return rhs;
2149 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2150 && (TREE_OVERFLOW (lhs)
2151 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2152 && ! TREE_CONSTANT (rhs))
2153 return lhs;
2154 else
2155 return fold (build (code, type, lhs, rhs));
2158 case 3:
2159 if (code == SAVE_EXPR)
2160 return exp;
2161 else if (code == COND_EXPR)
2162 return fold (build (MAX_EXPR, type,
2163 max_size (TREE_OPERAND (exp, 1), max_p),
2164 max_size (TREE_OPERAND (exp, 2), max_p)));
2165 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2166 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2167 max_size (TREE_OPERAND (exp, 1), max_p));
2171 gigi_abort (408);
2174 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2175 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2176 Return a constructor for the template. */
2178 tree
2179 build_template (template_type, array_type, expr)
2180 tree template_type;
2181 tree array_type;
2182 tree expr;
2184 tree template_elts = NULL_TREE;
2185 tree bound_list = NULL_TREE;
2186 tree field;
2188 if (TREE_CODE (array_type) == RECORD_TYPE
2189 && (TYPE_IS_PADDING_P (array_type)
2190 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2191 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2193 if (TREE_CODE (array_type) == ARRAY_TYPE
2194 || (TREE_CODE (array_type) == INTEGER_TYPE
2195 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2196 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2198 /* First make the list for a CONSTRUCTOR for the template. Go down the
2199 field list of the template instead of the type chain because this
2200 array might be an Ada array of arrays and we can't tell where the
2201 nested arrays stop being the underlying object. */
2203 for (field = TYPE_FIELDS (template_type); field;
2204 (bound_list != 0
2205 ? (bound_list = TREE_CHAIN (bound_list))
2206 : (array_type = TREE_TYPE (array_type))),
2207 field = TREE_CHAIN (TREE_CHAIN (field)))
2209 tree bounds, min, max;
2211 /* If we have a bound list, get the bounds from there. Likewise
2212 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2213 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2214 This will give us a maximum range. */
2215 if (bound_list != 0)
2216 bounds = TREE_VALUE (bound_list);
2217 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2218 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2219 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2220 && DECL_BY_COMPONENT_PTR_P (expr))
2221 bounds = TREE_TYPE (field);
2222 else
2223 gigi_abort (411);
2225 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2226 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2228 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2229 surround them with a WITH_RECORD_EXPR giving EXPR as the
2230 OBJECT. */
2231 if (! TREE_CONSTANT (min) && contains_placeholder_p (min))
2232 min = build (WITH_RECORD_EXPR, TREE_TYPE (min), min, expr);
2233 if (! TREE_CONSTANT (max) && contains_placeholder_p (max))
2234 max = build (WITH_RECORD_EXPR, TREE_TYPE (max), max, expr);
2236 template_elts = tree_cons (TREE_CHAIN (field), max,
2237 tree_cons (field, min, template_elts));
2240 return build_constructor (template_type, nreverse (template_elts));
2243 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2244 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2245 in the type contains in its DECL_INITIAL the expression to use when
2246 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2247 to print out an error message if the mechanism cannot be applied to
2248 an object of that type and also for the name. */
2250 tree
2251 build_vms_descriptor (type, mech, gnat_entity)
2252 tree type;
2253 Mechanism_Type mech;
2254 Entity_Id gnat_entity;
2256 tree record_type = make_node (RECORD_TYPE);
2257 tree field_list = 0;
2258 int class;
2259 int dtype = 0;
2260 tree inner_type;
2261 int ndim;
2262 int i;
2263 tree *idx_arr;
2264 tree tem;
2266 /* If TYPE is an unconstrained array, use the underlying array type. */
2267 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2268 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2270 /* If this is an array, compute the number of dimensions in the array,
2271 get the index types, and point to the inner type. */
2272 if (TREE_CODE (type) != ARRAY_TYPE)
2273 ndim = 0;
2274 else
2275 for (ndim = 1, inner_type = type;
2276 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2277 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2278 ndim++, inner_type = TREE_TYPE (inner_type))
2281 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2283 if (mech != By_Descriptor_NCA
2284 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2285 for (i = ndim - 1, inner_type = type;
2286 i >= 0;
2287 i--, inner_type = TREE_TYPE (inner_type))
2288 idx_arr[i] = TYPE_DOMAIN (inner_type);
2289 else
2290 for (i = 0, inner_type = type;
2291 i < ndim;
2292 i++, inner_type = TREE_TYPE (inner_type))
2293 idx_arr[i] = TYPE_DOMAIN (inner_type);
2295 /* Now get the DTYPE value. */
2296 switch (TREE_CODE (type))
2298 case INTEGER_TYPE:
2299 case ENUMERAL_TYPE:
2300 if (TYPE_VAX_FLOATING_POINT_P (type))
2301 switch ((int) TYPE_DIGITS_VALUE (type))
2303 case 6:
2304 dtype = 10;
2305 break;
2306 case 9:
2307 dtype = 11;
2308 break;
2309 case 15:
2310 dtype = 27;
2311 break;
2313 else
2314 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2316 case 8:
2317 dtype = TREE_UNSIGNED (type) ? 2 : 6;
2318 break;
2319 case 16:
2320 dtype = TREE_UNSIGNED (type) ? 3 : 7;
2321 break;
2322 case 32:
2323 dtype = TREE_UNSIGNED (type) ? 4 : 8;
2324 break;
2325 case 64:
2326 dtype = TREE_UNSIGNED (type) ? 5 : 9;
2327 break;
2328 case 128:
2329 dtype = TREE_UNSIGNED (type) ? 25 : 26;
2330 break;
2332 break;
2334 case REAL_TYPE:
2335 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2336 break;
2338 case COMPLEX_TYPE:
2339 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2340 && TYPE_VAX_FLOATING_POINT_P (type))
2341 switch ((int) TYPE_DIGITS_VALUE (type))
2343 case 6:
2344 dtype = 12;
2345 break;
2346 case 9:
2347 dtype = 13;
2348 break;
2349 case 15:
2350 dtype = 29;
2352 else
2353 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2354 break;
2356 case ARRAY_TYPE:
2357 dtype = 14;
2358 break;
2360 default:
2361 break;
2364 /* Get the CLASS value. */
2365 switch (mech)
2367 case By_Descriptor_A:
2368 class = 4;
2369 break;
2370 case By_Descriptor_NCA:
2371 class = 10;
2372 break;
2373 case By_Descriptor_SB:
2374 class = 15;
2375 break;
2376 default:
2377 class = 1;
2380 /* Make the type for a descriptor for VMS. The first four fields
2381 are the same for all types. */
2383 field_list
2384 = chainon (field_list,
2385 make_descriptor_field
2386 ("LENGTH", type_for_size (16, 1), record_type,
2387 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2389 field_list = chainon (field_list,
2390 make_descriptor_field ("DTYPE", type_for_size (8, 1),
2391 record_type, size_int (dtype)));
2392 field_list = chainon (field_list,
2393 make_descriptor_field ("CLASS", type_for_size (8, 1),
2394 record_type, size_int (class)));
2396 field_list
2397 = chainon (field_list,
2398 make_descriptor_field ("POINTER",
2399 build_pointer_type (type),
2400 record_type,
2401 build1 (ADDR_EXPR,
2402 build_pointer_type (type),
2403 build (PLACEHOLDER_EXPR,
2404 type))));
2406 switch (mech)
2408 case By_Descriptor:
2409 case By_Descriptor_S:
2410 break;
2412 case By_Descriptor_SB:
2413 field_list
2414 = chainon (field_list,
2415 make_descriptor_field
2416 ("SB_L1", type_for_size (32, 1), record_type,
2417 TREE_CODE (type) == ARRAY_TYPE
2418 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2419 field_list
2420 = chainon (field_list,
2421 make_descriptor_field
2422 ("SB_L2", type_for_size (32, 1), record_type,
2423 TREE_CODE (type) == ARRAY_TYPE
2424 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2425 break;
2427 case By_Descriptor_A:
2428 case By_Descriptor_NCA:
2429 field_list = chainon (field_list,
2430 make_descriptor_field ("SCALE",
2431 type_for_size (8, 1),
2432 record_type,
2433 size_zero_node));
2435 field_list = chainon (field_list,
2436 make_descriptor_field ("DIGITS",
2437 type_for_size (8, 1),
2438 record_type,
2439 size_zero_node));
2441 field_list
2442 = chainon (field_list,
2443 make_descriptor_field
2444 ("AFLAGS", type_for_size (8, 1), record_type,
2445 size_int (mech == By_Descriptor_NCA
2447 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2448 : (TREE_CODE (type) == ARRAY_TYPE
2449 && TYPE_CONVENTION_FORTRAN_P (type)
2450 ? 224 : 192))));
2452 field_list = chainon (field_list,
2453 make_descriptor_field ("DIMCT",
2454 type_for_size (8, 1),
2455 record_type,
2456 size_int (ndim)));
2458 field_list = chainon (field_list,
2459 make_descriptor_field ("ARSIZE",
2460 type_for_size (32, 1),
2461 record_type,
2462 size_in_bytes (type)));
2464 /* Now build a pointer to the 0,0,0... element. */
2465 tem = build (PLACEHOLDER_EXPR, type);
2466 for (i = 0, inner_type = type; i < ndim;
2467 i++, inner_type = TREE_TYPE (inner_type))
2468 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2469 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2471 field_list
2472 = chainon (field_list,
2473 make_descriptor_field
2474 ("A0", build_pointer_type (inner_type), record_type,
2475 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2477 /* Next come the addressing coefficients. */
2478 tem = size_int (1);
2479 for (i = 0; i < ndim; i++)
2481 char fname[3];
2482 tree idx_length
2483 = size_binop (MULT_EXPR, tem,
2484 size_binop (PLUS_EXPR,
2485 size_binop (MINUS_EXPR,
2486 TYPE_MAX_VALUE (idx_arr[i]),
2487 TYPE_MIN_VALUE (idx_arr[i])),
2488 size_int (1)));
2490 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2491 fname[1] = '0' + i, fname[2] = 0;
2492 field_list = chainon (field_list,
2493 make_descriptor_field (fname,
2494 type_for_size (32, 1),
2495 record_type,
2496 idx_length));
2498 if (mech == By_Descriptor_NCA)
2499 tem = idx_length;
2502 /* Finally here are the bounds. */
2503 for (i = 0; i < ndim; i++)
2505 char fname[3];
2507 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2508 field_list
2509 = chainon (field_list,
2510 make_descriptor_field
2511 (fname, type_for_size (32, 1), record_type,
2512 TYPE_MIN_VALUE (idx_arr[i])));
2514 fname[0] = 'U';
2515 field_list
2516 = chainon (field_list,
2517 make_descriptor_field
2518 (fname, type_for_size (32, 1), record_type,
2519 TYPE_MAX_VALUE (idx_arr[i])));
2521 break;
2523 default:
2524 post_error ("unsupported descriptor type for &", gnat_entity);
2527 finish_record_type (record_type, field_list, 0, 1);
2528 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2529 record_type));
2531 return record_type;
2534 /* Utility routine for above code to make a field. */
2536 static tree
2537 make_descriptor_field (name, type, rec_type, initial)
2538 const char *name;
2539 tree type;
2540 tree rec_type;
2541 tree initial;
2543 tree field
2544 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2546 DECL_INITIAL (field) = initial;
2547 return field;
2550 /* Build a type to be used to represent an aliased object whose nominal
2551 type is an unconstrained array. This consists of a RECORD_TYPE containing
2552 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2553 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2554 is used to represent an arbitrary unconstrained object. Use NAME
2555 as the name of the record. */
2557 tree
2558 build_unc_object_type (template_type, object_type, name)
2559 tree template_type;
2560 tree object_type;
2561 tree name;
2563 tree type = make_node (RECORD_TYPE);
2564 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2565 template_type, type, 0, 0, 0, 1);
2566 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2567 type, 0, 0, 0, 1);
2569 TYPE_NAME (type) = name;
2570 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2571 finish_record_type (type,
2572 chainon (chainon (NULL_TREE, template_field),
2573 array_field),
2574 0, 0);
2576 return type;
2579 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2580 the normal case this is just two adjustments, but we have more to do
2581 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2583 void
2584 update_pointer_to (old_type, new_type)
2585 tree old_type;
2586 tree new_type;
2588 tree ptr = TYPE_POINTER_TO (old_type);
2589 tree ref = TYPE_REFERENCE_TO (old_type);
2590 tree type;
2592 /* If this is the main variant, process all the other variants first. */
2593 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2594 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2595 type = TYPE_NEXT_VARIANT (type))
2596 update_pointer_to (type, new_type);
2598 /* If no pointer or reference, we are done. Otherwise, get the new type with
2599 the same qualifiers as the old type and see if it is the same as the old
2600 type. */
2601 if (ptr == 0 && ref == 0)
2602 return;
2604 new_type = build_qualified_type (new_type, TYPE_QUALS (old_type));
2605 if (old_type == new_type)
2606 return;
2608 /* First handle the simple case. */
2609 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2611 if (ptr != 0)
2612 TREE_TYPE (ptr) = new_type;
2613 TYPE_POINTER_TO (new_type) = ptr;
2615 if (ref != 0)
2616 TREE_TYPE (ref) = new_type;
2617 TYPE_REFERENCE_TO (new_type) = ref;
2619 if (ptr != 0 && TYPE_NAME (ptr) != 0
2620 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2621 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2622 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2623 global_bindings_p (), 0);
2624 if (ref != 0 && TYPE_NAME (ref) != 0
2625 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2626 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2627 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2628 global_bindings_p (), 0);
2631 /* Now deal with the unconstrained array case. In this case the "pointer"
2632 is actually a RECORD_TYPE where the types of both fields are
2633 pointers to void. In that case, copy the field list from the
2634 old type to the new one and update the fields' context. */
2635 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2636 gigi_abort (412);
2638 else
2640 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2641 tree ptr_temp_type;
2642 tree new_ref;
2643 tree var;
2645 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2646 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2647 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2649 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2650 template bounds.
2652 ??? This is now the only use of gnat_substitute_in_type, which
2653 is now a very "heavy" routine to do this, so it should be replaced
2654 at some point. */
2655 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2656 new_ref = build (COMPONENT_REF, ptr_temp_type,
2657 build (PLACEHOLDER_EXPR, ptr),
2658 TREE_CHAIN (TYPE_FIELDS (ptr)));
2660 update_pointer_to
2661 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2662 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2663 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2665 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2666 TYPE_UNCONSTRAINED_ARRAY (var) = new_type;
2668 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2669 = TREE_TYPE (new_type) = ptr;
2671 /* Now handle updating the allocation record, what the thin pointer
2672 points to. Update all pointers from the old record into the new
2673 one, update the types of the fields, and recompute the size. */
2675 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2677 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2678 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2679 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2680 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2681 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2682 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2683 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2685 TYPE_SIZE (new_obj_rec)
2686 = size_binop (PLUS_EXPR,
2687 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2688 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2689 TYPE_SIZE_UNIT (new_obj_rec)
2690 = size_binop (PLUS_EXPR,
2691 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2692 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2693 rest_of_type_compilation (ptr, global_bindings_p ());
2697 /* Convert a pointer to a constrained array into a pointer to a fat
2698 pointer. This involves making or finding a template. */
2700 static tree
2701 convert_to_fat_pointer (type, expr)
2702 tree type;
2703 tree expr;
2705 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2706 tree template, template_addr;
2707 tree etype = TREE_TYPE (expr);
2709 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2710 pointer to the template and array. */
2711 if (integer_zerop (expr))
2712 return
2713 build_constructor
2714 (type,
2715 tree_cons (TYPE_FIELDS (type),
2716 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2717 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2718 convert (build_pointer_type (template_type),
2719 expr),
2720 NULL_TREE)));
2722 /* If EXPR is a thin pointer, make the template and data from the record. */
2724 else if (TYPE_THIN_POINTER_P (etype))
2726 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2728 expr = save_expr (expr);
2729 if (TREE_CODE (expr) == ADDR_EXPR)
2730 expr = TREE_OPERAND (expr, 0);
2731 else
2732 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2734 template = build_component_ref (expr, NULL_TREE, fields);
2735 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2736 build_component_ref (expr, NULL_TREE,
2737 TREE_CHAIN (fields)));
2739 else
2740 /* Otherwise, build the constructor for the template. */
2741 template = build_template (template_type, TREE_TYPE (etype), expr);
2743 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2745 /* The result is a CONSTRUCTOR for the fat pointer. */
2746 return
2747 build_constructor (type,
2748 tree_cons (TYPE_FIELDS (type), expr,
2749 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2750 template_addr, NULL_TREE)));
2753 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2754 is something that is a fat pointer, so convert to it first if it EXPR
2755 is not already a fat pointer. */
2757 static tree
2758 convert_to_thin_pointer (type, expr)
2759 tree type;
2760 tree expr;
2762 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2763 expr
2764 = convert_to_fat_pointer
2765 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2767 /* We get the pointer to the data and use a NOP_EXPR to make it the
2768 proper GCC type. */
2769 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)));
2770 expr = build1 (NOP_EXPR, type, expr);
2772 return expr;
2775 /* Create an expression whose value is that of EXPR,
2776 converted to type TYPE. The TREE_TYPE of the value
2777 is always TYPE. This function implements all reasonable
2778 conversions; callers should filter out those that are
2779 not permitted by the language being compiled. */
2781 tree
2782 convert (type, expr)
2783 tree type, expr;
2785 enum tree_code code = TREE_CODE (type);
2786 tree etype = TREE_TYPE (expr);
2787 enum tree_code ecode = TREE_CODE (etype);
2788 tree tem;
2790 /* If EXPR is already the right type, we are done. */
2791 if (type == etype)
2792 return expr;
2794 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2795 new one. */
2796 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
2797 return build (WITH_RECORD_EXPR, type,
2798 convert (type, TREE_OPERAND (expr, 0)),
2799 TREE_OPERAND (expr, 1));
2801 /* If the input type has padding, remove it by doing a component reference
2802 to the field. If the output type has padding, make a constructor
2803 to build the record. If both input and output have padding and are
2804 of variable size, do this as an unchecked conversion. */
2805 if (ecode == RECORD_TYPE && code == RECORD_TYPE
2806 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2807 && (! TREE_CONSTANT (TYPE_SIZE (type))
2808 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2810 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2812 /* If we have just converted to this padded type, just get
2813 the inner expression. */
2814 if (TREE_CODE (expr) == CONSTRUCTOR
2815 && CONSTRUCTOR_ELTS (expr) != 0
2816 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2817 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2818 else
2819 return convert (type, build_component_ref (expr, NULL_TREE,
2820 TYPE_FIELDS (etype)));
2822 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2824 /* If we previously converted from another type and our type is
2825 of variable size, remove the conversion to avoid the need for
2826 variable-size temporaries. */
2827 if (TREE_CODE (expr) == UNCHECKED_CONVERT_EXPR
2828 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2829 expr = TREE_OPERAND (expr, 0);
2831 /* If we are just removing the padding from expr, convert the original
2832 object if we have variable size. That will avoid the need
2833 for some variable-size temporaries. */
2834 if (TREE_CODE (expr) == COMPONENT_REF
2835 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2836 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2837 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2838 return convert (type, TREE_OPERAND (expr, 0));
2840 /* If the result type is a padded type with a self-referentially-sized
2841 field and the expression type is a record, do this as an
2842 unchecked converstion. */
2843 else if (TREE_CODE (DECL_SIZE (TYPE_FIELDS (type))) != INTEGER_CST
2844 && contains_placeholder_p (DECL_SIZE (TYPE_FIELDS (type)))
2845 && TREE_CODE (etype) == RECORD_TYPE)
2846 return unchecked_convert (type, expr);
2848 else
2849 return
2850 build_constructor (type,
2851 tree_cons (TYPE_FIELDS (type),
2852 convert (TREE_TYPE
2853 (TYPE_FIELDS (type)),
2854 expr),
2855 NULL_TREE));
2858 /* If the input is a biased type, adjust first. */
2859 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2860 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2861 fold (build1 (GNAT_NOP_EXPR,
2862 TREE_TYPE (etype), expr)),
2863 TYPE_MIN_VALUE (etype))));
2865 /* If the input is a left-justified modular type, we need to extract
2866 the actual object before converting it to any other type with the
2867 exception of an unconstrained array. */
2868 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2869 && code != UNCONSTRAINED_ARRAY_TYPE)
2870 return convert (type, build_component_ref (expr, NULL_TREE,
2871 TYPE_FIELDS (etype)));
2873 /* If converting a type that does not contain a template into one
2874 that does, convert to the data type and then build the template. */
2875 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type)
2876 && ! (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype)))
2878 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2880 return
2881 build_constructor
2882 (type,
2883 tree_cons (TYPE_FIELDS (type),
2884 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2885 obj_type, NULL_TREE),
2886 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2887 convert (obj_type, expr), NULL_TREE)));
2890 /* There are some special cases of expressions that we process
2891 specially. */
2892 switch (TREE_CODE (expr))
2894 case ERROR_MARK:
2895 return expr;
2897 case TRANSFORM_EXPR:
2898 case NULL_EXPR:
2899 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2900 conversion in gnat_expand_expr. NULL_EXPR does not represent
2901 and actual value, so no conversion is needed. */
2902 TREE_TYPE (expr) = type;
2903 return expr;
2905 case STRING_CST:
2906 case CONSTRUCTOR:
2907 /* If we are converting a STRING_CST to another constrained array type,
2908 just make a new one in the proper type. Likewise for a
2909 CONSTRUCTOR. But if the mode of the type is different, we must
2910 ensure a new RTL is made for the constant. */
2911 if (code == ecode && AGGREGATE_TYPE_P (etype)
2912 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2913 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
2915 expr = copy_node (expr);
2916 TREE_TYPE (expr) = type;
2918 if (TYPE_MODE (type) != TYPE_MODE (etype))
2919 TREE_CST_RTL (expr) = 0;
2921 return expr;
2923 break;
2925 case COMPONENT_REF:
2926 /* If we are converting between two aggregate types of the same
2927 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2928 This avoid unneeded conversions which makes reference computations
2929 more complex. */
2930 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
2931 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2932 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
2933 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
2934 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
2935 TREE_OPERAND (expr, 1));
2937 break;
2939 case UNCONSTRAINED_ARRAY_REF:
2940 /* Convert this to the type of the inner array by getting the address of
2941 the array from the template. */
2942 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2943 build_component_ref (TREE_OPERAND (expr, 0),
2944 get_identifier ("P_ARRAY"),
2945 NULL_TREE));
2946 etype = TREE_TYPE (expr);
2947 ecode = TREE_CODE (etype);
2948 break;
2950 case UNCHECKED_CONVERT_EXPR:
2951 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2952 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2953 return convert (type, TREE_OPERAND (expr, 0));
2954 break;
2956 case INDIRECT_REF:
2957 /* If both types are record types, just convert the pointer and
2958 make a new INDIRECT_REF.
2960 ??? Disable this for now since it causes problems with the
2961 code in build_binary_op for MODIFY_EXPR which wants to
2962 strip off conversions. But that code really is a mess and
2963 we need to do this a much better way some time. */
2964 if (0
2965 && (TREE_CODE (type) == RECORD_TYPE
2966 || TREE_CODE (type) == UNION_TYPE)
2967 && (TREE_CODE (etype) == RECORD_TYPE
2968 || TREE_CODE (etype) == UNION_TYPE)
2969 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2970 return build_unary_op (INDIRECT_REF, NULL_TREE,
2971 convert (build_pointer_type (type),
2972 TREE_OPERAND (expr, 0)));
2973 break;
2975 default:
2976 break;
2979 /* Check for converting to a pointer to an unconstrained array. */
2980 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2981 return convert_to_fat_pointer (type, expr);
2983 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
2984 || (code == INTEGER_CST && ecode == INTEGER_CST
2985 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
2986 return fold (build1 (NOP_EXPR, type, expr));
2988 switch (code)
2990 case VOID_TYPE:
2991 return build1 (CONVERT_EXPR, type, expr);
2993 case INTEGER_TYPE:
2994 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
2995 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE))
2996 return unchecked_convert (type, expr);
2997 else if (TYPE_BIASED_REPRESENTATION_P (type))
2998 return fold (build1 (CONVERT_EXPR, type,
2999 fold (build (MINUS_EXPR, TREE_TYPE (type),
3000 convert (TREE_TYPE (type), expr),
3001 TYPE_MIN_VALUE (type)))));
3003 /* ... fall through ... */
3005 case ENUMERAL_TYPE:
3006 return fold (convert_to_integer (type, expr));
3008 case POINTER_TYPE:
3009 case REFERENCE_TYPE:
3010 /* If converting between two pointers to records denoting
3011 both a template and type, adjust if needed to account
3012 for any differing offsets, since one might be negative. */
3013 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3015 tree bit_diff
3016 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3017 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3018 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3019 sbitsize_int (BITS_PER_UNIT));
3021 expr = build1 (NOP_EXPR, type, expr);
3022 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3023 if (integer_zerop (byte_diff))
3024 return expr;
3026 return build_binary_op (PLUS_EXPR, type, expr,
3027 fold (convert_to_pointer (type, byte_diff)));
3030 /* If converting to a thin pointer, handle specially. */
3031 if (TYPE_THIN_POINTER_P (type)
3032 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3033 return convert_to_thin_pointer (type, expr);
3035 /* If converting fat pointer to normal pointer, get the pointer to the
3036 array and then convert it. */
3037 else if (TYPE_FAT_POINTER_P (etype))
3038 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3039 NULL_TREE);
3041 return fold (convert_to_pointer (type, expr));
3043 case REAL_TYPE:
3044 return fold (convert_to_real (type, expr));
3046 case RECORD_TYPE:
3047 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3048 return
3049 build_constructor
3050 (type, tree_cons (TYPE_FIELDS (type),
3051 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3052 NULL_TREE));
3054 /* ... fall through ... */
3056 case ARRAY_TYPE:
3057 /* In these cases, assume the front-end has validated the conversion.
3058 If the conversion is valid, it will be a bit-wise conversion, so
3059 it can be viewed as an unchecked conversion. */
3060 return unchecked_convert (type, expr);
3062 case UNION_TYPE:
3063 /* Just validate that the type is indeed that of a field
3064 of the type. Then make the simple conversion. */
3065 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3066 if (TREE_TYPE (tem) == etype)
3067 return build1 (CONVERT_EXPR, type, expr);
3069 gigi_abort (413);
3071 case UNCONSTRAINED_ARRAY_TYPE:
3072 /* If EXPR is a constrained array, take its address, convert it to a
3073 fat pointer, and then dereference it. Likewise if EXPR is a
3074 record containing both a template and a constrained array.
3075 Note that a record representing a left justified modular type
3076 always represents a packed constrained array. */
3077 if (ecode == ARRAY_TYPE
3078 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3079 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3080 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3081 return
3082 build_unary_op
3083 (INDIRECT_REF, NULL_TREE,
3084 convert_to_fat_pointer (TREE_TYPE (type),
3085 build_unary_op (ADDR_EXPR,
3086 NULL_TREE, expr)));
3088 /* Do something very similar for converting one unconstrained
3089 array to another. */
3090 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3091 return
3092 build_unary_op (INDIRECT_REF, NULL_TREE,
3093 convert (TREE_TYPE (type),
3094 build_unary_op (ADDR_EXPR,
3095 NULL_TREE, expr)));
3096 else
3097 gigi_abort (409);
3099 case COMPLEX_TYPE:
3100 return fold (convert_to_complex (type, expr));
3102 default:
3103 gigi_abort (410);
3107 /* Remove all conversions that are done in EXP. This includes converting
3108 from a padded type or converting to a left-justified modular type. */
3110 tree
3111 remove_conversions (exp)
3112 tree exp;
3114 switch (TREE_CODE (exp))
3116 case CONSTRUCTOR:
3117 if (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3118 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3119 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)));
3120 break;
3122 case COMPONENT_REF:
3123 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3124 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3125 return remove_conversions (TREE_OPERAND (exp, 0));
3126 break;
3128 case UNCHECKED_CONVERT_EXPR:
3129 case NOP_EXPR: case CONVERT_EXPR:
3130 return remove_conversions (TREE_OPERAND (exp, 0));
3132 default:
3133 break;
3136 return exp;
3139 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3140 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3141 likewise return an expression pointing to the underlying array. */
3143 tree
3144 maybe_unconstrained_array (exp)
3145 tree exp;
3147 enum tree_code code = TREE_CODE (exp);
3148 tree new;
3150 switch (TREE_CODE (TREE_TYPE (exp)))
3152 case UNCONSTRAINED_ARRAY_TYPE:
3153 if (code == UNCONSTRAINED_ARRAY_REF)
3156 = build_unary_op (INDIRECT_REF, NULL_TREE,
3157 build_component_ref (TREE_OPERAND (exp, 0),
3158 get_identifier ("P_ARRAY"),
3159 NULL_TREE));
3160 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3161 return new;
3164 else if (code == NULL_EXPR)
3165 return build1 (NULL_EXPR,
3166 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3167 (TREE_TYPE (TREE_TYPE (exp))))),
3168 TREE_OPERAND (exp, 0));
3170 else if (code == WITH_RECORD_EXPR
3171 && (TREE_OPERAND (exp, 0)
3172 != (new = maybe_unconstrained_array
3173 (TREE_OPERAND (exp, 0)))))
3174 return build (WITH_RECORD_EXPR, TREE_TYPE (new), new,
3175 TREE_OPERAND (exp, 1));
3177 case RECORD_TYPE:
3178 if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3181 = build_component_ref (exp, NULL_TREE,
3182 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))));
3183 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3184 && TYPE_IS_PADDING_P (TREE_TYPE (new)))
3185 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (new))), new);
3187 return new;
3189 break;
3191 default:
3192 break;
3195 return exp;
3198 /* Return an expression that does an unchecked converstion of EXPR to TYPE. */
3200 tree
3201 unchecked_convert (type, expr)
3202 tree type;
3203 tree expr;
3205 tree etype = TREE_TYPE (expr);
3207 /* If the expression is already the right type, we are done. */
3208 if (etype == type)
3209 return expr;
3211 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3212 new one. */
3213 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
3214 return build (WITH_RECORD_EXPR, type,
3215 unchecked_convert (type, TREE_OPERAND (expr, 0)),
3216 TREE_OPERAND (expr, 1));
3218 /* If both types types are integral just do a normal conversion.
3219 Likewise for a conversion to an unconstrained array. */
3220 if ((((INTEGRAL_TYPE_P (type)
3221 && ! (TREE_CODE (type) == INTEGER_TYPE
3222 && TYPE_VAX_FLOATING_POINT_P (type)))
3223 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3224 || (TREE_CODE (type) == RECORD_TYPE
3225 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3226 && ((INTEGRAL_TYPE_P (etype)
3227 && ! (TREE_CODE (etype) == INTEGER_TYPE
3228 && TYPE_VAX_FLOATING_POINT_P (etype)))
3229 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3230 || (TREE_CODE (etype) == RECORD_TYPE
3231 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3232 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3234 tree rtype = type;
3236 if (TREE_CODE (etype) == INTEGER_TYPE
3237 && TYPE_BIASED_REPRESENTATION_P (etype))
3239 tree ntype = copy_type (etype);
3241 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3242 TYPE_MAIN_VARIANT (ntype) = ntype;
3243 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3246 if (TREE_CODE (type) == INTEGER_TYPE
3247 && TYPE_BIASED_REPRESENTATION_P (type))
3249 rtype = copy_type (type);
3250 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3251 TYPE_MAIN_VARIANT (rtype) = rtype;
3254 expr = convert (rtype, expr);
3255 if (type != rtype)
3256 expr = build1 (GNAT_NOP_EXPR, type, expr);
3259 /* If we are converting TO an integral type whose precision is not the
3260 same as its size, first unchecked convert to a record that contains
3261 an object of the output type. Then extract the field. */
3262 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3263 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3264 GET_MODE_BITSIZE (TYPE_MODE (type))))
3266 tree rec_type = make_node (RECORD_TYPE);
3267 tree field = create_field_decl (get_identifier ("OBJ"), type,
3268 rec_type, 1, 0, 0, 0);
3270 TYPE_FIELDS (rec_type) = field;
3271 layout_type (rec_type);
3273 expr = unchecked_convert (rec_type, expr);
3274 expr = build_component_ref (expr, NULL_TREE, field);
3277 /* Similarly for integral input type whose precision is not equal to its
3278 size. */
3279 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3280 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3281 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3283 tree rec_type = make_node (RECORD_TYPE);
3284 tree field
3285 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3286 1, 0, 0, 0);
3288 TYPE_FIELDS (rec_type) = field;
3289 layout_type (rec_type);
3291 expr = build_constructor (rec_type, build_tree_list (field, expr));
3292 expr = unchecked_convert (type, expr);
3295 /* We have a special case when we are converting between two
3296 unconstrained array types. In that case, take the address,
3297 convert the fat pointer types, and dereference. */
3298 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3299 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3300 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3301 build1 (UNCHECKED_CONVERT_EXPR, TREE_TYPE (type),
3302 build_unary_op (ADDR_EXPR, NULL_TREE,
3303 expr)));
3305 /* If both types are aggregates with the same mode and alignment (except
3306 if the result is a UNION_TYPE), we can do this as a normal conversion. */
3307 else if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
3308 && TREE_CODE (type) != UNION_TYPE
3309 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
3310 && TYPE_MODE (type) == TYPE_MODE (etype))
3311 expr = build1 (CONVERT_EXPR, type, expr);
3313 else
3315 expr = maybe_unconstrained_array (expr);
3316 etype = TREE_TYPE (expr);
3317 expr = build1 (UNCHECKED_CONVERT_EXPR, type, expr);
3321 /* If the result is an integral type whose size is not equal to
3322 the size of the underlying machine type, sign- or zero-extend
3323 the result. We need not do this in the case where the input is
3324 an integral type of the same precision and signedness or if the output
3325 is a biased type or if both the input and output are unsigned. */
3326 if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3327 && ! (TREE_CODE (type) == INTEGER_TYPE
3328 && TYPE_BIASED_REPRESENTATION_P (type))
3329 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3330 GET_MODE_BITSIZE (TYPE_MODE (type)))
3331 && ! (INTEGRAL_TYPE_P (etype)
3332 && TREE_UNSIGNED (type) == TREE_UNSIGNED (etype)
3333 && operand_equal_p (TYPE_RM_SIZE (type),
3334 (TYPE_RM_SIZE (etype) != 0
3335 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3337 && ! (TREE_UNSIGNED (type) && TREE_UNSIGNED (etype)))
3339 tree base_type = type_for_mode (TYPE_MODE (type), TREE_UNSIGNED (type));
3340 tree shift_expr
3341 = convert (base_type,
3342 size_binop (MINUS_EXPR,
3343 bitsize_int
3344 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3345 TYPE_RM_SIZE (type)));
3346 expr
3347 = convert (type,
3348 build_binary_op (RSHIFT_EXPR, base_type,
3349 build_binary_op (LSHIFT_EXPR, base_type,
3350 convert (base_type, expr),
3351 shift_expr),
3352 shift_expr));
3355 /* An unchecked conversion should never raise Constraint_Error. The code
3356 below assumes that GCC's conversion routines overflow the same
3357 way that the underlying hardware does. This is probably true. In
3358 the rare case when it isn't, we can rely on the fact that such
3359 conversions are erroneous anyway. */
3360 if (TREE_CODE (expr) == INTEGER_CST)
3361 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3363 /* If the sizes of the types differ and this is an UNCHECKED_CONVERT_EXPR,
3364 show no longer constant. */
3365 if (TREE_CODE (expr) == UNCHECKED_CONVERT_EXPR
3366 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3367 TREE_CONSTANT (expr) = 0;
3369 return expr;