Add hppa-openbsd target
[official-gcc.git] / gcc / ada / utils.c
blob33980a5f667e5ca6b5d0a1556b82dd48887c7a7d
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * *
10 * Copyright (C) 1992-2002, Free Software Foundation, Inc. *
11 * *
12 * GNAT is free software; you can redistribute it and/or modify it under *
13 * terms of the GNU General Public License as published by the Free Soft- *
14 * ware Foundation; either version 2, or (at your option) any later ver- *
15 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
16 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
17 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
18 * for more details. You should have received a copy of the GNU General *
19 * Public License distributed with GNAT; see file COPYING. If not, write *
20 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
21 * MA 02111-1307, USA. *
22 * *
23 * GNAT was originally developed by the GNAT team at New York University. *
24 * It is now maintained by Ada Core Technologies Inc (http://www.gnat.com). *
25 * *
26 ****************************************************************************/
28 #include "config.h"
29 #include "system.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "defaults.h"
33 #include "toplev.h"
34 #include "output.h"
35 #include "ggc.h"
36 #include "debug.h"
37 #include "convert.h"
39 #include "ada.h"
40 #include "types.h"
41 #include "atree.h"
42 #include "elists.h"
43 #include "namet.h"
44 #include "nlists.h"
45 #include "stringt.h"
46 #include "uintp.h"
47 #include "fe.h"
48 #include "sinfo.h"
49 #include "einfo.h"
50 #include "ada-tree.h"
51 #include "gigi.h"
53 #ifndef MAX_FIXED_MODE_SIZE
54 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
55 #endif
57 #ifndef MAX_BITS_PER_WORD
58 #define MAX_BITS_PER_WORD BITS_PER_WORD
59 #endif
61 /* If nonzero, pretend we are allocating at global level. */
62 int force_global;
64 /* Tree nodes for the various types and decls we create. */
65 tree gnat_std_decls[(int) ADT_LAST];
67 /* Functions to call for each of the possible raise reasons. */
68 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
70 /* Associates a GNAT tree node to a GCC tree node. It is used in
71 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
72 of `save_gnu_tree' for more info. */
73 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
75 /* This listhead is used to record any global objects that need elaboration.
76 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
77 initial value to assign. */
79 static GTY(()) tree pending_elaborations;
81 /* This stack allows us to momentarily switch to generating elaboration
82 lists for an inner context. */
84 struct e_stack GTY(()) {
85 struct e_stack *next;
86 tree elab_list;
88 static GTY(()) struct e_stack *elist_stack;
90 /* This variable keeps a table for types for each precision so that we only
91 allocate each of them once. Signed and unsigned types are kept separate.
93 Note that these types are only used when fold-const requests something
94 special. Perhaps we should NOT share these types; we'll see how it
95 goes later. */
96 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
98 /* Likewise for float types, but record these by mode. */
99 static GTY(()) tree float_types[NUM_MACHINE_MODES];
101 /* For each binding contour we allocate a binding_level structure which records
102 the entities defined or declared in that contour. Contours include:
104 the global one
105 one for each subprogram definition
106 one for each compound statement (declare block)
108 Binding contours are used to create GCC tree BLOCK nodes. */
110 struct binding_level GTY(())
112 /* A chain of ..._DECL nodes for all variables, constants, functions,
113 parameters and type declarations. These ..._DECL nodes are chained
114 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
115 in the reverse of the order supplied to be compatible with the
116 back-end. */
117 tree names;
118 /* For each level (except the global one), a chain of BLOCK nodes for all
119 the levels that were entered and exited one level down from this one. */
120 tree blocks;
121 /* The BLOCK node for this level, if one has been preallocated.
122 If 0, the BLOCK is allocated (if needed) when the level is popped. */
123 tree this_block;
124 /* The binding level containing this one (the enclosing binding level). */
125 struct binding_level *level_chain;
128 /* The binding level currently in effect. */
129 static GTY(()) struct binding_level *current_binding_level;
131 /* A chain of binding_level structures awaiting reuse. */
132 static GTY((deletable (""))) struct binding_level *free_binding_level;
134 /* The outermost binding level. This binding level is created when the
135 compiler is started and it will exist through the entire compilation. */
136 static struct binding_level *global_binding_level;
138 /* Binding level structures are initialized by copying this one. */
139 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
141 struct language_function GTY(())
143 int unused;
146 static tree merge_sizes PARAMS ((tree, tree, tree, int, int));
147 static tree compute_related_constant PARAMS ((tree, tree));
148 static tree split_plus PARAMS ((tree, tree *));
149 static int value_zerop PARAMS ((tree));
150 static tree float_type_for_size PARAMS ((int, enum machine_mode));
151 static tree convert_to_fat_pointer PARAMS ((tree, tree));
152 static tree convert_to_thin_pointer PARAMS ((tree, tree));
153 static tree make_descriptor_field PARAMS ((const char *,tree, tree,
154 tree));
156 /* Initialize the association of GNAT nodes to GCC trees. */
158 void
159 init_gnat_to_gnu ()
161 Node_Id gnat_node;
163 associate_gnat_to_gnu = (tree *) ggc_alloc (max_gnat_nodes * sizeof (tree));
165 for (gnat_node = 0; gnat_node < max_gnat_nodes; gnat_node++)
166 associate_gnat_to_gnu[gnat_node] = NULL_TREE;
168 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
171 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
172 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
173 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
175 If GNU_DECL is zero, a previous association is to be reset. */
177 void
178 save_gnu_tree (gnat_entity, gnu_decl, no_check)
179 Entity_Id gnat_entity;
180 tree gnu_decl;
181 int no_check;
183 if (gnu_decl
184 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
185 || (! no_check && ! DECL_P (gnu_decl))))
186 gigi_abort (401);
188 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
191 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
192 Return the ..._DECL node that was associated with it. If there is no tree
193 node associated with GNAT_ENTITY, abort.
195 In some cases, such as delayed elaboration or expressions that need to
196 be elaborated only once, GNAT_ENTITY is really not an entity. */
198 tree
199 get_gnu_tree (gnat_entity)
200 Entity_Id gnat_entity;
202 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
203 gigi_abort (402);
205 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
208 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
211 present_gnu_tree (gnat_entity)
212 Entity_Id gnat_entity;
214 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
218 /* Return non-zero if we are currently in the global binding level. */
221 global_bindings_p ()
223 return (force_global != 0 || current_binding_level == global_binding_level
224 ? -1 : 0);
227 /* Return the list of declarations in the current level. Note that this list
228 is in reverse order (it has to be so for back-end compatibility). */
230 tree
231 getdecls ()
233 return current_binding_level->names;
236 /* Nonzero if the current level needs to have a BLOCK made. */
239 kept_level_p ()
241 return (current_binding_level->names != 0);
244 /* Enter a new binding level. The input parameter is ignored, but has to be
245 specified for back-end compatibility. */
247 void
248 pushlevel (ignore)
249 int ignore ATTRIBUTE_UNUSED;
251 struct binding_level *newlevel = NULL;
253 /* Reuse a struct for this binding level, if there is one. */
254 if (free_binding_level)
256 newlevel = free_binding_level;
257 free_binding_level = free_binding_level->level_chain;
259 else
260 newlevel
261 = (struct binding_level *) ggc_alloc (sizeof (struct binding_level));
263 *newlevel = clear_binding_level;
265 /* Add this level to the front of the chain (stack) of levels that are
266 active. */
267 newlevel->level_chain = current_binding_level;
268 current_binding_level = newlevel;
271 /* Exit a binding level.
272 Pop the level off, and restore the state of the identifier-decl mappings
273 that were in effect when this level was entered.
275 If KEEP is nonzero, this level had explicit declarations, so
276 and create a "block" (a BLOCK node) for the level
277 to record its declarations and subblocks for symbol table output.
279 If FUNCTIONBODY is nonzero, this level is the body of a function,
280 so create a block as if KEEP were set and also clear out all
281 label names.
283 If REVERSE is nonzero, reverse the order of decls before putting
284 them into the BLOCK. */
286 tree
287 poplevel (keep, reverse, functionbody)
288 int keep;
289 int reverse;
290 int functionbody;
292 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
293 binding level that we are about to exit and which is returned by this
294 routine. */
295 tree block = NULL_TREE;
296 tree decl_chain;
297 tree decl_node;
298 tree subblock_chain = current_binding_level->blocks;
299 tree subblock_node;
300 int block_previously_created;
302 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
303 nodes chained through the `names' field of current_binding_level are in
304 reverse order except for PARM_DECL node, which are explicitly stored in
305 the right order. */
306 current_binding_level->names
307 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
308 : current_binding_level->names;
310 /* Output any nested inline functions within this block which must be
311 compiled because their address is needed. */
312 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
313 if (TREE_CODE (decl_node) == FUNCTION_DECL
314 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
315 && DECL_INITIAL (decl_node) != 0)
317 push_function_context ();
318 output_inline_function (decl_node);
319 pop_function_context ();
322 block = 0;
323 block_previously_created = (current_binding_level->this_block != 0);
324 if (block_previously_created)
325 block = current_binding_level->this_block;
326 else if (keep || functionbody)
327 block = make_node (BLOCK);
328 if (block != 0)
330 BLOCK_VARS (block) = keep ? decl_chain : 0;
331 BLOCK_SUBBLOCKS (block) = subblock_chain;
334 /* Record the BLOCK node just built as the subblock its enclosing scope. */
335 for (subblock_node = subblock_chain; subblock_node;
336 subblock_node = TREE_CHAIN (subblock_node))
337 BLOCK_SUPERCONTEXT (subblock_node) = block;
339 /* Clear out the meanings of the local variables of this level. */
341 for (subblock_node = decl_chain; subblock_node;
342 subblock_node = TREE_CHAIN (subblock_node))
343 if (DECL_NAME (subblock_node) != 0)
344 /* If the identifier was used or addressed via a local extern decl,
345 don't forget that fact. */
346 if (DECL_EXTERNAL (subblock_node))
348 if (TREE_USED (subblock_node))
349 TREE_USED (DECL_NAME (subblock_node)) = 1;
350 if (TREE_ADDRESSABLE (subblock_node))
351 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
355 /* Pop the current level, and free the structure for reuse. */
356 struct binding_level *level = current_binding_level;
357 current_binding_level = current_binding_level->level_chain;
358 level->level_chain = free_binding_level;
359 free_binding_level = level;
362 if (functionbody)
364 /* This is the top level block of a function. The ..._DECL chain stored
365 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
366 leave them in the BLOCK because they are found in the FUNCTION_DECL
367 instead. */
368 DECL_INITIAL (current_function_decl) = block;
369 BLOCK_VARS (block) = 0;
371 else if (block)
373 if (!block_previously_created)
374 current_binding_level->blocks
375 = chainon (current_binding_level->blocks, block);
378 /* If we did not make a block for the level just exited, any blocks made for
379 inner levels (since they cannot be recorded as subblocks in that level)
380 must be carried forward so they will later become subblocks of something
381 else. */
382 else if (subblock_chain)
383 current_binding_level->blocks
384 = chainon (current_binding_level->blocks, subblock_chain);
385 if (block)
386 TREE_USED (block) = 1;
388 return block;
391 /* Insert BLOCK at the end of the list of subblocks of the
392 current binding level. This is used when a BIND_EXPR is expanded,
393 to handle the BLOCK node inside the BIND_EXPR. */
395 void
396 insert_block (block)
397 tree block;
399 TREE_USED (block) = 1;
400 current_binding_level->blocks
401 = chainon (current_binding_level->blocks, block);
404 /* Set the BLOCK node for the innermost scope
405 (the one we are currently in). */
407 void
408 set_block (block)
409 tree block;
411 current_binding_level->this_block = block;
412 current_binding_level->names = chainon (current_binding_level->names,
413 BLOCK_VARS (block));
414 current_binding_level->blocks = chainon (current_binding_level->blocks,
415 BLOCK_SUBBLOCKS (block));
418 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
419 Returns the ..._DECL node. */
421 tree
422 pushdecl (decl)
423 tree decl;
425 struct binding_level *b;
427 /* If at top level, there is no context. But PARM_DECLs always go in the
428 level of its function. */
429 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
431 b = global_binding_level;
432 DECL_CONTEXT (decl) = 0;
434 else
436 b = current_binding_level;
437 DECL_CONTEXT (decl) = current_function_decl;
440 /* Put the declaration on the list. The list of declarations is in reverse
441 order. The list will be reversed later if necessary. This needs to be
442 this way for compatibility with the back-end.
444 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
445 will cause trouble with the debugger and aren't needed anyway. */
446 if (TREE_CODE (decl) != TYPE_DECL
447 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
449 TREE_CHAIN (decl) = b->names;
450 b->names = decl;
453 /* For the declaration of a type, set its name if it either is not already
454 set, was set to an IDENTIFIER_NODE, indicating an internal name,
455 or if the previous type name was not derived from a source name.
456 We'd rather have the type named with a real name and all the pointer
457 types to the same object have the same POINTER_TYPE node. Code in this
458 function in c-decl.c makes a copy of the type node here, but that may
459 cause us trouble with incomplete types, so let's not try it (at least
460 for now). */
462 if (TREE_CODE (decl) == TYPE_DECL
463 && DECL_NAME (decl) != 0
464 && (TYPE_NAME (TREE_TYPE (decl)) == 0
465 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
466 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
467 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
468 && ! DECL_ARTIFICIAL (decl))))
469 TYPE_NAME (TREE_TYPE (decl)) = decl;
471 return decl;
474 /* Do little here. Set up the standard declarations later after the
475 front end has been run. */
477 void
478 gnat_init_decl_processing ()
480 lineno = 0;
482 /* Make the binding_level structure for global names. */
483 current_function_decl = 0;
484 current_binding_level = 0;
485 free_binding_level = 0;
486 pushlevel (0);
487 global_binding_level = current_binding_level;
489 build_common_tree_nodes (0);
491 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
492 corresponding to the size of ptr_mode. Make this here since we need
493 this before we can expand the GNAT types. */
494 set_sizetype (gnat_type_for_size (GET_MODE_BITSIZE (ptr_mode), 0));
495 build_common_tree_nodes_2 (0);
497 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
499 /* We need to make the integer type before doing anything else.
500 We stitch this in to the appropriate GNAT type later. */
501 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
502 integer_type_node));
503 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
504 char_type_node));
506 ptr_void_type_node = build_pointer_type (void_type_node);
510 /* Create the predefined scalar types such as `integer_type_node' needed
511 in the gcc back-end and initialize the global binding level. */
513 void
514 init_gigi_decls (long_long_float_type, exception_type)
515 tree long_long_float_type, exception_type;
517 tree endlink, decl;
518 unsigned int i;
520 /* Set the types that GCC and Gigi use from the front end. We would like
521 to do this for char_type_node, but it needs to correspond to the C
522 char type. */
523 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
525 /* In this case, the builtin floating point types are VAX float,
526 so make up a type for use. */
527 longest_float_type_node = make_node (REAL_TYPE);
528 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
529 layout_type (longest_float_type_node);
530 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
531 longest_float_type_node));
533 else
534 longest_float_type_node = TREE_TYPE (long_long_float_type);
536 except_type_node = TREE_TYPE (exception_type);
538 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
539 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
540 unsigned_type_node));
542 void_type_decl_node
543 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
544 void_type_node));
546 void_ftype = build_function_type (void_type_node, NULL_TREE);
547 ptr_void_ftype = build_pointer_type (void_ftype);
549 /* Now declare runtime functions. */
550 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
552 /* malloc is a function declaration tree for a function to allocate
553 memory. */
554 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
555 NULL_TREE,
556 build_function_type (ptr_void_type_node,
557 tree_cons (NULL_TREE,
558 sizetype,
559 endlink)),
560 NULL_TREE, 0, 1, 1, 0);
562 /* free is a function declaration tree for a function to free memory. */
564 free_decl
565 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
566 build_function_type (void_type_node,
567 tree_cons (NULL_TREE,
568 ptr_void_type_node,
569 endlink)),
570 NULL_TREE, 0, 1, 1, 0);
572 /* Make the types and functions used for exception processing. */
573 jmpbuf_type
574 = build_array_type (gnat_type_for_mode (Pmode, 0),
575 build_index_type (build_int_2 (5, 0)));
576 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
577 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
579 /* Functions to get and set the jumpbuf pointer for the current thread. */
580 get_jmpbuf_decl
581 = create_subprog_decl
582 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
583 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
584 NULL_TREE, 0, 1, 1, 0);
586 set_jmpbuf_decl
587 = create_subprog_decl
588 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
589 NULL_TREE,
590 build_function_type (void_type_node,
591 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
592 NULL_TREE, 0, 1, 1, 0);
594 /* Function to get the current exception. */
595 get_excptr_decl
596 = create_subprog_decl
597 (get_identifier ("system__soft_links__get_gnat_exception"),
598 NULL_TREE,
599 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
600 NULL_TREE, 0, 1, 1, 0);
602 /* Functions that raise exceptions. */
603 raise_nodefer_decl
604 = create_subprog_decl
605 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
606 build_function_type (void_type_node,
607 tree_cons (NULL_TREE,
608 build_pointer_type (except_type_node),
609 endlink)),
610 NULL_TREE, 0, 1, 1, 0);
612 /* If in no exception handlers mode, all raise statements are redirected to
613 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
614 this procedure will never be called in this mode. */
615 if (No_Exception_Handlers_Set ())
617 decl
618 = create_subprog_decl
619 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
620 build_function_type (void_type_node,
621 tree_cons (NULL_TREE,
622 build_pointer_type (char_type_node),
623 tree_cons (NULL_TREE,
624 integer_type_node,
625 endlink))),
626 NULL_TREE, 0, 1, 1, 0);
628 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
629 gnat_raise_decls[i] = decl;
631 else
632 /* Otherwise, make one decl for each exception reason. */
633 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
635 char name[17];
637 sprintf (name, "__gnat_rcheck_%.2d", i);
638 gnat_raise_decls[i]
639 = create_subprog_decl
640 (get_identifier (name), NULL_TREE,
641 build_function_type (void_type_node,
642 tree_cons (NULL_TREE,
643 build_pointer_type
644 (char_type_node),
645 tree_cons (NULL_TREE,
646 integer_type_node,
647 endlink))),
648 NULL_TREE, 0, 1, 1, 0);
651 /* Indicate that these never return. */
652 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
653 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
654 TREE_TYPE (raise_nodefer_decl)
655 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
656 TYPE_QUAL_VOLATILE);
658 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
660 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
661 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
662 TREE_TYPE (gnat_raise_decls[i])
663 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
664 TYPE_QUAL_VOLATILE);
667 /* setjmp returns an integer and has one operand, which is a pointer to
668 a jmpbuf. */
669 setjmp_decl
670 = create_subprog_decl
671 (get_identifier ("setjmp"), NULL_TREE,
672 build_function_type (integer_type_node,
673 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
674 NULL_TREE, 0, 1, 1, 0);
676 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
677 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
679 main_identifier_node = get_identifier ("main");
682 /* This function is called indirectly from toplev.c to handle incomplete
683 declarations, i.e. VAR_DECL nodes whose DECL_SIZE is zero. To be precise,
684 compile_file in toplev.c makes an indirect call through the function pointer
685 incomplete_decl_finalize_hook which is initialized to this routine in
686 init_decl_processing. */
688 void
689 gnat_finish_incomplete_decl (dont_care)
690 tree dont_care ATTRIBUTE_UNUSED;
692 gigi_abort (405);
695 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
696 nodes (FIELDLIST), finish constructing the record or union type.
697 If HAS_REP is nonzero, this record has a rep clause; don't call
698 layout_type but merely set the size and alignment ourselves.
699 If DEFER_DEBUG is nonzero, do not call the debugging routines
700 on this type; it will be done later. */
702 void
703 finish_record_type (record_type, fieldlist, has_rep, defer_debug)
704 tree record_type;
705 tree fieldlist;
706 int has_rep;
707 int defer_debug;
709 enum tree_code code = TREE_CODE (record_type);
710 tree ada_size = bitsize_zero_node;
711 tree size = bitsize_zero_node;
712 tree size_unit = size_zero_node;
713 int var_size = 0;
714 tree field;
716 TYPE_FIELDS (record_type) = fieldlist;
718 if (TYPE_NAME (record_type) != 0
719 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
720 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
721 else
722 TYPE_STUB_DECL (record_type)
723 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
724 record_type));
726 /* We don't need both the typedef name and the record name output in
727 the debugging information, since they are the same. */
728 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
730 /* Globally initialize the record first. If this is a rep'ed record,
731 that just means some initializations; otherwise, layout the record. */
733 if (has_rep)
735 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
736 TYPE_MODE (record_type) = BLKmode;
737 if (TYPE_SIZE (record_type) == 0)
739 TYPE_SIZE (record_type) = bitsize_zero_node;
740 TYPE_SIZE_UNIT (record_type) = size_zero_node;
743 else
745 /* Ensure there isn't a size already set. There can be in an error
746 case where there is a rep clause but all fields have errors and
747 no longer have a position. */
748 TYPE_SIZE (record_type) = 0;
749 layout_type (record_type);
752 /* At this point, the position and size of each field is known. It was
753 either set before entry by a rep clause, or by laying out the type
754 above. We now make a pass through the fields (in reverse order for
755 QUAL_UNION_TYPEs) to compute the Ada size; the GCC size and alignment
756 (for rep'ed records that are not padding types); and the mode (for
757 rep'ed records). */
759 if (code == QUAL_UNION_TYPE)
760 fieldlist = nreverse (fieldlist);
762 for (field = fieldlist; field; field = TREE_CHAIN (field))
764 tree type = TREE_TYPE (field);
765 tree this_size = DECL_SIZE (field);
766 tree this_size_unit = DECL_SIZE_UNIT (field);
767 tree this_ada_size = DECL_SIZE (field);
769 /* We need to make an XVE/XVU record if any field has variable size,
770 whether or not the record does. For example, if we have an union,
771 it may be that all fields, rounded up to the alignment, have the
772 same size, in which case we'll use that size. But the debug
773 output routines (except Dwarf2) won't be able to output the fields,
774 so we need to make the special record. */
775 if (TREE_CODE (this_size) != INTEGER_CST)
776 var_size = 1;
778 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
779 || TREE_CODE (type) == QUAL_UNION_TYPE)
780 && ! TYPE_IS_FAT_POINTER_P (type)
781 && ! TYPE_CONTAINS_TEMPLATE_P (type)
782 && TYPE_ADA_SIZE (type) != 0)
783 this_ada_size = TYPE_ADA_SIZE (type);
785 if (has_rep && ! DECL_BIT_FIELD (field))
786 TYPE_ALIGN (record_type)
787 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
789 switch (code)
791 case UNION_TYPE:
792 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
793 size = size_binop (MAX_EXPR, size, this_size);
794 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
795 break;
797 case QUAL_UNION_TYPE:
798 ada_size
799 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
800 this_ada_size, ada_size));
801 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
802 this_size, size));
803 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
804 this_size_unit, size_unit));
805 break;
807 case RECORD_TYPE:
808 /* Since we know here that all fields are sorted in order of
809 increasing bit position, the size of the record is one
810 higher than the ending bit of the last field processed
811 unless we have a rep clause, since in that case we might
812 have a field outside a QUAL_UNION_TYPE that has a higher ending
813 position. So use a MAX in that case. Also, if this field is a
814 QUAL_UNION_TYPE, we need to take into account the previous size in
815 the case of empty variants. */
816 ada_size
817 = merge_sizes (ada_size, bit_position (field), this_ada_size,
818 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
819 size = merge_sizes (size, bit_position (field), this_size,
820 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
821 size_unit
822 = merge_sizes (size_unit, byte_position (field), this_size_unit,
823 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
824 break;
826 default:
827 abort ();
831 if (code == QUAL_UNION_TYPE)
832 nreverse (fieldlist);
834 /* If this is a padding record, we never want to make the size smaller than
835 what was specified in it, if any. */
836 if (TREE_CODE (record_type) == RECORD_TYPE
837 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
839 size = TYPE_SIZE (record_type);
840 size_unit = TYPE_SIZE_UNIT (record_type);
843 /* Now set any of the values we've just computed that apply. */
844 if (! TYPE_IS_FAT_POINTER_P (record_type)
845 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
846 SET_TYPE_ADA_SIZE (record_type, ada_size);
848 #ifdef ROUND_TYPE_SIZE
849 size = ROUND_TYPE_SIZE (record_type, size, TYPE_ALIGN (record_type));
850 size_unit = ROUND_TYPE_SIZE_UNIT (record_type, size_unit,
851 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
852 #else
853 size = round_up (size, TYPE_ALIGN (record_type));
854 size_unit = round_up (size_unit, TYPE_ALIGN (record_type) / BITS_PER_UNIT);
855 #endif
857 if (has_rep
858 && ! (TREE_CODE (record_type) == RECORD_TYPE
859 && TYPE_IS_PADDING_P (record_type)
860 && TREE_CODE (size) != INTEGER_CST
861 && contains_placeholder_p (size)))
863 TYPE_SIZE (record_type) = size;
864 TYPE_SIZE_UNIT (record_type) = size_unit;
867 if (has_rep)
868 compute_record_mode (record_type);
870 if (! defer_debug)
872 /* If this record is of variable size, rename it so that the
873 debugger knows it is and make a new, parallel, record
874 that tells the debugger how the record is laid out. See
875 exp_dbug.ads. */
876 if (var_size)
878 tree new_record_type
879 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
880 ? UNION_TYPE : TREE_CODE (record_type));
881 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
882 tree new_id
883 = concat_id_with_name (orig_id,
884 TREE_CODE (record_type) == QUAL_UNION_TYPE
885 ? "XVU" : "XVE");
886 tree last_pos = bitsize_zero_node;
887 tree old_field;
889 TYPE_NAME (new_record_type) = new_id;
890 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
891 TYPE_STUB_DECL (new_record_type)
892 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
893 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
894 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
895 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
896 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
898 /* Now scan all the fields, replacing each field with a new
899 field corresponding to the new encoding. */
900 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
901 old_field = TREE_CHAIN (old_field))
903 tree field_type = TREE_TYPE (old_field);
904 tree field_name = DECL_NAME (old_field);
905 tree new_field;
906 tree curpos = bit_position (old_field);
907 int var = 0;
908 unsigned int align = 0;
909 tree pos;
911 /* See how the position was modified from the last position.
913 There are two basic cases we support: a value was added
914 to the last position or the last position was rounded to
915 a boundary and they something was added. Check for the
916 first case first. If not, see if there is any evidence
917 of rounding. If so, round the last position and try
918 again.
920 If this is a union, the position can be taken as zero. */
922 if (TREE_CODE (new_record_type) == UNION_TYPE)
923 pos = bitsize_zero_node, align = 0;
924 else
925 pos = compute_related_constant (curpos, last_pos);
927 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
928 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
930 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
931 pos = compute_related_constant (curpos,
932 round_up (last_pos, align));
934 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
935 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
936 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
937 && host_integerp (TREE_OPERAND
938 (TREE_OPERAND (curpos, 0), 1),
941 align
942 = tree_low_cst
943 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
944 pos = compute_related_constant (curpos,
945 round_up (last_pos, align));
948 /* If we can't compute a position, set it to zero.
950 ??? We really should abort here, but it's too much work
951 to get this correct for all cases. */
953 if (pos == 0)
954 pos = bitsize_zero_node;
956 /* See if this type is variable-size and make a new type
957 and indicate the indirection if so. */
958 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
960 field_type = build_pointer_type (field_type);
961 var = 1;
964 /* Make a new field name, if necessary. */
965 if (var || align != 0)
967 char suffix[6];
969 if (align != 0)
970 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
971 align / BITS_PER_UNIT);
972 else
973 strcpy (suffix, "XVL");
975 field_name = concat_id_with_name (field_name, suffix);
978 new_field = create_field_decl (field_name, field_type,
979 new_record_type, 0,
980 DECL_SIZE (old_field), pos, 0);
981 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
982 TYPE_FIELDS (new_record_type) = new_field;
984 /* If old_field is a QUAL_UNION_TYPE, take its size as being
985 zero. The only time it's not the last field of the record
986 is when there are other components at fixed positions after
987 it (meaning there was a rep clause for every field) and we
988 want to be able to encode them. */
989 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
990 (TREE_CODE (TREE_TYPE (old_field))
991 == QUAL_UNION_TYPE)
992 ? bitsize_zero_node
993 : DECL_SIZE (old_field));
996 TYPE_FIELDS (new_record_type)
997 = nreverse (TYPE_FIELDS (new_record_type));
999 rest_of_type_compilation (new_record_type, global_bindings_p ());
1002 rest_of_type_compilation (record_type, global_bindings_p ());
1006 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1007 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1008 if this represents a QUAL_UNION_TYPE in which case we must look for
1009 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1010 is nonzero, we must take the MAX of the end position of this field
1011 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1013 We return an expression for the size. */
1015 static tree
1016 merge_sizes (last_size, first_bit, size, special, has_rep)
1017 tree last_size;
1018 tree first_bit, size;
1019 int special;
1020 int has_rep;
1022 tree type = TREE_TYPE (last_size);
1024 if (! special || TREE_CODE (size) != COND_EXPR)
1026 tree new = size_binop (PLUS_EXPR, first_bit, size);
1028 if (has_rep)
1029 new = size_binop (MAX_EXPR, last_size, new);
1031 return new;
1034 return fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1035 integer_zerop (TREE_OPERAND (size, 1))
1036 ? last_size : merge_sizes (last_size, first_bit,
1037 TREE_OPERAND (size, 1),
1038 1, has_rep),
1039 integer_zerop (TREE_OPERAND (size, 2))
1040 ? last_size : merge_sizes (last_size, first_bit,
1041 TREE_OPERAND (size, 2),
1042 1, has_rep)));
1045 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1046 related by the addition of a constant. Return that constant if so. */
1048 static tree
1049 compute_related_constant (op0, op1)
1050 tree op0, op1;
1052 tree op0_var, op1_var;
1053 tree op0_con = split_plus (op0, &op0_var);
1054 tree op1_con = split_plus (op1, &op1_var);
1055 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1057 if (operand_equal_p (op0_var, op1_var, 0))
1058 return result;
1059 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1060 return result;
1061 else
1062 return 0;
1065 /* Utility function of above to split a tree OP which may be a sum, into a
1066 constant part, which is returned, and a variable part, which is stored
1067 in *PVAR. *PVAR may be size_zero_node. All operations must be of
1068 sizetype. */
1070 static tree
1071 split_plus (in, pvar)
1072 tree in;
1073 tree *pvar;
1075 tree result = bitsize_zero_node;
1077 while (TREE_CODE (in) == NON_LVALUE_EXPR)
1078 in = TREE_OPERAND (in, 0);
1080 *pvar = in;
1081 if (TREE_CODE (in) == INTEGER_CST)
1083 *pvar = bitsize_zero_node;
1084 return in;
1086 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1088 tree lhs_var, rhs_var;
1089 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1090 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1092 result = size_binop (PLUS_EXPR, result, lhs_con);
1093 result = size_binop (TREE_CODE (in), result, rhs_con);
1095 if (lhs_var == TREE_OPERAND (in, 0)
1096 && rhs_var == TREE_OPERAND (in, 1))
1097 return bitsize_zero_node;
1099 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1100 return result;
1102 else
1103 return bitsize_zero_node;
1106 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1107 subprogram. If it is void_type_node, then we are dealing with a procedure,
1108 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1109 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1110 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1111 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1112 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1113 RETURNS_WITH_DSP is nonzero if the function is to return with a
1114 depressed stack pointer. */
1116 tree
1117 create_subprog_type (return_type, param_decl_list, cico_list,
1118 returns_unconstrained, returns_by_ref, returns_with_dsp)
1119 tree return_type;
1120 tree param_decl_list;
1121 tree cico_list;
1122 int returns_unconstrained, returns_by_ref, returns_with_dsp;
1124 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1125 the subprogram formal parameters. This list is generated by traversing the
1126 input list of PARM_DECL nodes. */
1127 tree param_type_list = NULL;
1128 tree param_decl;
1129 tree type;
1131 for (param_decl = param_decl_list; param_decl;
1132 param_decl = TREE_CHAIN (param_decl))
1133 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1134 param_type_list);
1136 /* The list of the function parameter types has to be terminated by the void
1137 type to signal to the back-end that we are not dealing with a variable
1138 parameter subprogram, but that the subprogram has a fixed number of
1139 parameters. */
1140 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1142 /* The list of argument types has been created in reverse
1143 so nreverse it. */
1144 param_type_list = nreverse (param_type_list);
1146 type = build_function_type (return_type, param_type_list);
1148 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1149 or the new type should, make a copy of TYPE. Likewise for
1150 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1151 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1152 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1153 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1154 type = copy_type (type);
1156 SET_TYPE_CI_CO_LIST (type, cico_list);
1157 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1158 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1159 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1160 return type;
1163 /* Return a copy of TYPE but safe to modify in any way. */
1165 tree
1166 copy_type (type)
1167 tree type;
1169 tree new = copy_node (type);
1171 /* copy_node clears this field instead of copying it, because it is
1172 aliased with TREE_CHAIN. */
1173 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1175 TYPE_POINTER_TO (new) = 0;
1176 TYPE_REFERENCE_TO (new) = 0;
1177 TYPE_MAIN_VARIANT (new) = new;
1178 TYPE_NEXT_VARIANT (new) = 0;
1180 return new;
1183 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1184 TYPE_INDEX_TYPE is INDEX. */
1186 tree
1187 create_index_type (min, max, index)
1188 tree min, max;
1189 tree index;
1191 /* First build a type for the desired range. */
1192 tree type = build_index_2_type (min, max);
1194 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1195 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1196 is set, but not to INDEX, make a copy of this type with the requested
1197 index type. Note that we have no way of sharing these types, but that's
1198 only a small hole. */
1199 if (TYPE_INDEX_TYPE (type) == index)
1200 return type;
1201 else if (TYPE_INDEX_TYPE (type) != 0)
1202 type = copy_type (type);
1204 SET_TYPE_INDEX_TYPE (type, index);
1205 return type;
1208 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1209 string) and TYPE is a ..._TYPE node giving its data type.
1210 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1211 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1212 information about this type. */
1214 tree
1215 create_type_decl (type_name, type, attr_list, artificial_p, debug_info_p)
1216 tree type_name;
1217 tree type;
1218 struct attrib *attr_list;
1219 int artificial_p;
1220 int debug_info_p;
1222 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1223 enum tree_code code = TREE_CODE (type);
1225 DECL_ARTIFICIAL (type_decl) = artificial_p;
1226 pushdecl (type_decl);
1227 process_attributes (type_decl, attr_list);
1229 /* Pass type declaration information to the debugger unless this is an
1230 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1231 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1232 a dummy type, which will be completed later, or a type for which
1233 debugging information was not requested. */
1234 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1235 || ! debug_info_p)
1236 DECL_IGNORED_P (type_decl) = 1;
1237 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1238 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1239 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1240 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1242 return type_decl;
1245 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1246 ASM_NAME is its assembler name (if provided). TYPE is its data type
1247 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1248 expression; NULL_TREE if none.
1250 CONST_FLAG is nonzero if this variable is constant.
1252 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1253 the current compilation unit. This flag should be set when processing the
1254 variable definitions in a package specification. EXTERN_FLAG is nonzero
1255 when processing an external variable declaration (as opposed to a
1256 definition: no storage is to be allocated for the variable here).
1258 STATIC_FLAG is only relevant when not at top level. In that case
1259 it indicates whether to always allocate storage to the variable. */
1261 tree
1262 create_var_decl (var_name, asm_name, type, var_init, const_flag, public_flag,
1263 extern_flag, static_flag, attr_list)
1264 tree var_name;
1265 tree asm_name;
1266 tree type;
1267 tree var_init;
1268 int const_flag;
1269 int public_flag;
1270 int extern_flag;
1271 int static_flag;
1272 struct attrib *attr_list;
1274 int init_const
1275 = (var_init == 0
1277 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1278 && (global_bindings_p () || static_flag
1279 ? 0 != initializer_constant_valid_p (var_init,
1280 TREE_TYPE (var_init))
1281 : TREE_CONSTANT (var_init))));
1282 tree var_decl
1283 = build_decl ((const_flag && init_const
1284 /* Only make a CONST_DECL for sufficiently-small objects.
1285 We consider complex double "sufficiently-small" */
1286 && TYPE_SIZE (type) != 0
1287 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1288 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1289 GET_MODE_SIZE (DCmode)))
1290 ? CONST_DECL : VAR_DECL, var_name, type);
1291 tree assign_init = 0;
1293 /* If this is external, throw away any initializations unless this is a
1294 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1295 we are defining a global here, leave a constant initialization and save
1296 any variable elaborations for the elaboration routine. Otherwise, if
1297 the initializing expression is not the same as TYPE, generate the
1298 initialization with an assignment statement, since it knows how
1299 to do the required adjustents. If we are just annotating types,
1300 throw away the initialization if it isn't a constant. */
1302 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1303 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1304 var_init = 0;
1306 if (global_bindings_p () && var_init != 0 && ! init_const)
1308 add_pending_elaborations (var_decl, var_init);
1309 var_init = 0;
1312 else if (var_init != 0
1313 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1314 != TYPE_MAIN_VARIANT (type))
1315 || (static_flag && ! init_const)))
1316 assign_init = var_init, var_init = 0;
1318 DECL_COMMON (var_decl) = !flag_no_common;
1319 DECL_INITIAL (var_decl) = var_init;
1320 TREE_READONLY (var_decl) = const_flag;
1321 DECL_EXTERNAL (var_decl) = extern_flag;
1322 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1323 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1324 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1325 = TYPE_VOLATILE (type);
1327 /* At the global binding level we need to allocate static storage for the
1328 variable if and only if its not external. If we are not at the top level
1329 we allocate automatic storage unless requested not to. */
1330 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1332 if (asm_name != 0)
1333 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1335 process_attributes (var_decl, attr_list);
1337 /* Add this decl to the current binding level and generate any
1338 needed code and RTL. */
1339 var_decl = pushdecl (var_decl);
1340 expand_decl (var_decl);
1342 if (DECL_CONTEXT (var_decl) != 0)
1343 expand_decl_init (var_decl);
1345 /* If this is volatile, force it into memory. */
1346 if (TREE_SIDE_EFFECTS (var_decl))
1347 gnat_mark_addressable (var_decl);
1349 if (TREE_CODE (var_decl) != CONST_DECL)
1350 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1352 if (assign_init != 0)
1354 /* If VAR_DECL has a padded type, convert it to the unpadded
1355 type so the assignment is done properly. */
1356 tree lhs = var_decl;
1358 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1359 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1360 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1362 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1363 assign_init));
1366 return var_decl;
1369 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1370 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1371 this field is in a record type with a "pragma pack". If SIZE is nonzero
1372 it is the specified size for this field. If POS is nonzero, it is the bit
1373 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1374 the address of this field for aliasing purposes. */
1376 tree
1377 create_field_decl (field_name, field_type, record_type, packed, size, pos,
1378 addressable)
1379 tree field_name;
1380 tree field_type;
1381 tree record_type;
1382 int packed;
1383 tree size, pos;
1384 int addressable;
1386 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1388 DECL_CONTEXT (field_decl) = record_type;
1389 TREE_READONLY (field_decl) = TREE_READONLY (field_type);
1391 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1392 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1393 If it is a padding type where the inner field is of variable size, it
1394 must be at its natural alignment. Just handle the packed case here; we
1395 will disallow non-aligned rep clauses elsewhere. */
1396 if (packed && TYPE_MODE (field_type) == BLKmode)
1397 DECL_ALIGN (field_decl)
1398 = ((TREE_CODE (field_type) == RECORD_TYPE
1399 && TYPE_IS_PADDING_P (field_type)
1400 && ! TREE_CONSTANT (DECL_SIZE (TYPE_FIELDS (field_type))))
1401 ? TYPE_ALIGN (field_type) : BITS_PER_UNIT);
1403 /* If a size is specified, use it. Otherwise, see if we have a size
1404 to use that may differ from the natural size of the object. */
1405 if (size != 0)
1406 size = convert (bitsizetype, size);
1407 else if (packed)
1409 if (packed == 1 && ! operand_equal_p (rm_size (field_type),
1410 TYPE_SIZE (field_type), 0))
1411 size = rm_size (field_type);
1413 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1414 byte. */
1415 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1416 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1417 size = round_up (size, BITS_PER_UNIT);
1420 /* Make a bitfield if a size is specified for two reasons: first if the size
1421 differs from the natural size. Second, if the alignment is insufficient.
1422 There are a number of ways the latter can be true. But never make a
1423 bitfield if the type of the field has a nonconstant size. */
1425 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1426 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1427 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1428 || (pos != 0
1429 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1430 bitsize_int (TYPE_ALIGN
1431 (field_type)))))
1432 || packed
1433 || (TYPE_ALIGN (record_type) != 0
1434 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1436 DECL_BIT_FIELD (field_decl) = 1;
1437 DECL_SIZE (field_decl) = size;
1438 if (! packed && pos == 0)
1439 DECL_ALIGN (field_decl)
1440 = (TYPE_ALIGN (record_type) != 0
1441 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1442 : TYPE_ALIGN (field_type));
1445 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1446 DECL_ALIGN (field_decl)
1447 = MAX (DECL_ALIGN (field_decl),
1448 DECL_BIT_FIELD (field_decl) ? 1
1449 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1450 : TYPE_ALIGN (field_type));
1452 if (pos != 0)
1454 /* We need to pass in the alignment the DECL is known to have.
1455 This is the lowest-order bit set in POS, but no more than
1456 the alignment of the record, if one is specified. Note
1457 that an alignment of 0 is taken as infinite. */
1458 unsigned int known_align;
1460 if (host_integerp (pos, 1))
1461 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1462 else
1463 known_align = BITS_PER_UNIT;
1465 if (TYPE_ALIGN (record_type)
1466 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1467 known_align = TYPE_ALIGN (record_type);
1469 layout_decl (field_decl, known_align);
1470 SET_DECL_OFFSET_ALIGN (field_decl,
1471 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1472 : BITS_PER_UNIT);
1473 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1474 &DECL_FIELD_BIT_OFFSET (field_decl),
1475 DECL_OFFSET_ALIGN (field_decl), pos);
1477 DECL_HAS_REP_P (field_decl) = 1;
1480 /* If the field type is passed by reference, we will have pointers to the
1481 field, so it is addressable. */
1482 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1483 addressable = 1;
1485 /* Mark the decl as nonaddressable if it either is indicated so semantically
1486 or if it is a bit field. */
1487 DECL_NONADDRESSABLE_P (field_decl)
1488 = ! addressable || DECL_BIT_FIELD (field_decl);
1490 return field_decl;
1493 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1494 effects, has the value of zero. */
1496 static int
1497 value_zerop (exp)
1498 tree exp;
1500 if (TREE_CODE (exp) == COMPOUND_EXPR)
1501 return value_zerop (TREE_OPERAND (exp, 1));
1503 return integer_zerop (exp);
1506 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1507 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1508 readonly (either an IN parameter or an address of a pass-by-ref
1509 parameter). */
1511 tree
1512 create_param_decl (param_name, param_type, readonly)
1513 tree param_name;
1514 tree param_type;
1515 int readonly;
1517 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1519 DECL_ARG_TYPE (param_decl) = param_type;
1520 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1521 TREE_READONLY (param_decl) = readonly;
1522 return param_decl;
1525 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1527 void
1528 process_attributes (decl, attr_list)
1529 tree decl;
1530 struct attrib *attr_list;
1532 for (; attr_list; attr_list = attr_list->next)
1533 switch (attr_list->type)
1535 case ATTR_MACHINE_ATTRIBUTE:
1536 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1537 NULL_TREE),
1538 ATTR_FLAG_TYPE_IN_PLACE);
1539 break;
1541 case ATTR_LINK_ALIAS:
1542 TREE_STATIC (decl) = 1;
1543 assemble_alias (decl, attr_list->name);
1544 break;
1546 case ATTR_WEAK_EXTERNAL:
1547 if (SUPPORTS_WEAK)
1548 declare_weak (decl);
1549 else
1550 post_error ("?weak declarations not supported on this target",
1551 attr_list->error_point);
1552 break;
1554 case ATTR_LINK_SECTION:
1555 #ifdef ASM_OUTPUT_SECTION_NAME
1556 DECL_SECTION_NAME (decl)
1557 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1558 IDENTIFIER_POINTER (attr_list->name));
1559 DECL_COMMON (decl) = 0;
1560 #else
1561 post_error ("?section attributes are not supported for this target",
1562 attr_list->error_point);
1563 #endif
1564 break;
1568 /* Add some pending elaborations on the list. */
1570 void
1571 add_pending_elaborations (var_decl, var_init)
1572 tree var_decl;
1573 tree var_init;
1575 if (var_init != 0)
1576 Check_Elaboration_Code_Allowed (error_gnat_node);
1578 pending_elaborations
1579 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1582 /* Obtain any pending elaborations and clear the old list. */
1584 tree
1585 get_pending_elaborations ()
1587 /* Each thing added to the list went on the end; we want it on the
1588 beginning. */
1589 tree result = TREE_CHAIN (pending_elaborations);
1591 TREE_CHAIN (pending_elaborations) = 0;
1592 return result;
1595 /* Return nonzero if there are pending elaborations. */
1598 pending_elaborations_p ()
1600 return TREE_CHAIN (pending_elaborations) != 0;
1603 /* Save a copy of the current pending elaboration list and make a new
1604 one. */
1606 void
1607 push_pending_elaborations ()
1609 struct e_stack *p = (struct e_stack *) ggc_alloc (sizeof (struct e_stack));
1611 p->next = elist_stack;
1612 p->elab_list = pending_elaborations;
1613 elist_stack = p;
1614 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1617 /* Pop the stack of pending elaborations. */
1619 void
1620 pop_pending_elaborations ()
1622 struct e_stack *p = elist_stack;
1624 pending_elaborations = p->elab_list;
1625 elist_stack = p->next;
1628 /* Return the current position in pending_elaborations so we can insert
1629 elaborations after that point. */
1631 tree
1632 get_elaboration_location ()
1634 return tree_last (pending_elaborations);
1637 /* Insert the current elaborations after ELAB, which is in some elaboration
1638 list. */
1640 void
1641 insert_elaboration_list (elab)
1642 tree elab;
1644 tree next = TREE_CHAIN (elab);
1646 if (TREE_CHAIN (pending_elaborations))
1648 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1649 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1650 TREE_CHAIN (pending_elaborations) = 0;
1654 /* Returns a LABEL_DECL node for LABEL_NAME. */
1656 tree
1657 create_label_decl (label_name)
1658 tree label_name;
1660 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1662 DECL_CONTEXT (label_decl) = current_function_decl;
1663 DECL_MODE (label_decl) = VOIDmode;
1664 DECL_SOURCE_LINE (label_decl) = lineno;
1665 DECL_SOURCE_FILE (label_decl) = input_filename;
1667 return label_decl;
1670 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1671 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1672 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1673 PARM_DECL nodes chained through the TREE_CHAIN field).
1675 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1676 appropriate fields in the FUNCTION_DECL. */
1678 tree
1679 create_subprog_decl (subprog_name, asm_name, subprog_type, param_decl_list,
1680 inline_flag, public_flag, extern_flag, attr_list)
1681 tree subprog_name;
1682 tree asm_name;
1683 tree subprog_type;
1684 tree param_decl_list;
1685 int inline_flag;
1686 int public_flag;
1687 int extern_flag;
1688 struct attrib *attr_list;
1690 tree return_type = TREE_TYPE (subprog_type);
1691 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1693 /* If this is a function nested inside an inlined external function, it
1694 means we aren't going to compile the outer function unless it is
1695 actually inlined, so do the same for us. */
1696 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1697 && DECL_EXTERNAL (current_function_decl))
1698 extern_flag = 1;
1700 DECL_EXTERNAL (subprog_decl) = extern_flag;
1701 TREE_PUBLIC (subprog_decl) = public_flag;
1702 DECL_INLINE (subprog_decl) = inline_flag;
1703 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1704 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1705 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1706 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1707 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1709 if (asm_name != 0)
1710 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1712 process_attributes (subprog_decl, attr_list);
1714 /* Add this decl to the current binding level. */
1715 subprog_decl = pushdecl (subprog_decl);
1717 /* Output the assembler code and/or RTL for the declaration. */
1718 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1720 return subprog_decl;
1723 /* Count how deep we are into nested functions. This is because
1724 we shouldn't call the backend function context routines unless we
1725 are in a nested function. */
1727 static int function_nesting_depth;
1729 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1730 body. This routine needs to be invoked before processing the declarations
1731 appearing in the subprogram. */
1733 void
1734 begin_subprog_body (subprog_decl)
1735 tree subprog_decl;
1737 tree param_decl_list;
1738 tree param_decl;
1739 tree next_param;
1741 if (function_nesting_depth++ != 0)
1742 push_function_context ();
1744 announce_function (subprog_decl);
1746 /* Make this field nonzero so further routines know that this is not
1747 tentative. error_mark_node is replaced below (in poplevel) with the
1748 adequate BLOCK. */
1749 DECL_INITIAL (subprog_decl) = error_mark_node;
1751 /* This function exists in static storage. This does not mean `static' in
1752 the C sense! */
1753 TREE_STATIC (subprog_decl) = 1;
1755 /* Enter a new binding level. */
1756 current_function_decl = subprog_decl;
1757 pushlevel (0);
1759 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1760 subprogram body) so that they can be recognized as local variables in the
1761 subprogram.
1763 The list of PARM_DECL nodes is stored in the right order in
1764 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1765 which they are transmitted to `pushdecl' we need to reverse the list of
1766 PARM_DECLs if we want it to be stored in the right order. The reason why
1767 we want to make sure the PARM_DECLs are stored in the correct order is
1768 that this list will be retrieved in a few lines with a call to `getdecl'
1769 to store it back into the DECL_ARGUMENTS field. */
1770 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1772 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1774 next_param = TREE_CHAIN (param_decl);
1775 TREE_CHAIN (param_decl) = NULL;
1776 pushdecl (param_decl);
1779 /* Store back the PARM_DECL nodes. They appear in the right order. */
1780 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1782 init_function_start (subprog_decl, input_filename, lineno);
1783 expand_function_start (subprog_decl, 0);
1785 /* If this function is `main', emit a call to `__main'
1786 to run global initializers, etc. */
1787 if (DECL_ASSEMBLER_NAME (subprog_decl) != 0
1788 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl))
1789 && DECL_CONTEXT (subprog_decl) == NULL_TREE)
1790 expand_main_function ();
1793 /* Finish the definition of the current subprogram and compile it all the way
1794 to assembler language output. */
1796 void
1797 end_subprog_body ()
1799 tree decl;
1800 tree cico_list;
1802 poplevel (1, 0, 1);
1803 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1804 = current_function_decl;
1806 /* Mark the RESULT_DECL as being in this subprogram. */
1807 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1809 expand_function_end (input_filename, lineno, 0);
1811 /* If this is a nested function, push a new GC context. That will keep
1812 local variables on the stack from being collected while we're doing
1813 the compilation of this function. */
1814 if (function_nesting_depth > 1)
1815 ggc_push_context ();
1817 rest_of_compilation (current_function_decl);
1819 if (function_nesting_depth > 1)
1820 ggc_pop_context ();
1822 #if 0
1823 /* If we're sure this function is defined in this file then mark it
1824 as such */
1825 if (TREE_ASM_WRITTEN (current_function_decl))
1826 mark_fn_defined_in_this_file (current_function_decl);
1827 #endif
1829 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1830 not be seen when we call this function and will be in
1831 unallocated memory anyway. */
1832 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1833 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1834 TREE_VALUE (cico_list) = 0;
1836 if (DECL_SAVED_INSNS (current_function_decl) == 0)
1838 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1839 was saved for inline, in which case the DECL_RTLs are in
1840 preserved memory. */
1841 for (decl = DECL_ARGUMENTS (current_function_decl);
1842 decl != 0; decl = TREE_CHAIN (decl))
1844 SET_DECL_RTL (decl, 0);
1845 DECL_INCOMING_RTL (decl) = 0;
1848 /* Similarly, discard DECL_RTL of the return value. */
1849 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1851 /* But DECL_INITIAL must remain nonzero so we know this
1852 was an actual function definition unless toplev.c decided not
1853 to inline it. */
1854 if (DECL_INITIAL (current_function_decl) != 0)
1855 DECL_INITIAL (current_function_decl) = error_mark_node;
1857 DECL_ARGUMENTS (current_function_decl) = 0;
1860 /* If we are not at the bottom of the function nesting stack, pop up to
1861 the containing function. Otherwise show we aren't in any function. */
1862 if (--function_nesting_depth != 0)
1863 pop_function_context ();
1864 else
1865 current_function_decl = 0;
1868 /* Return a definition for a builtin function named NAME and whose data type
1869 is TYPE. TYPE should be a function type with argument types.
1870 FUNCTION_CODE tells later passes how to compile calls to this function.
1871 See tree.h for its possible values.
1873 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1874 the name to be called if we can't opencode the function. If
1875 ATTRS is nonzero, use that for the function attribute list. */
1877 tree
1878 builtin_function (name, type, function_code, class, library_name, attrs)
1879 const char *name;
1880 tree type;
1881 int function_code;
1882 enum built_in_class class;
1883 const char *library_name;
1884 tree attrs;
1886 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1888 DECL_EXTERNAL (decl) = 1;
1889 TREE_PUBLIC (decl) = 1;
1890 if (library_name)
1891 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1893 pushdecl (decl);
1894 DECL_BUILT_IN_CLASS (decl) = class;
1895 DECL_FUNCTION_CODE (decl) = function_code;
1896 if (attrs)
1897 decl_attributes (&decl, attrs, ATTR_FLAG_BUILT_IN);
1898 return decl;
1901 /* Return an integer type with the number of bits of precision given by
1902 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1903 it is a signed type. */
1905 tree
1906 gnat_type_for_size (precision, unsignedp)
1907 unsigned precision;
1908 int unsignedp;
1910 tree t;
1911 char type_name[20];
1913 if (precision <= 2 * MAX_BITS_PER_WORD
1914 && signed_and_unsigned_types[precision][unsignedp] != 0)
1915 return signed_and_unsigned_types[precision][unsignedp];
1917 if (unsignedp)
1918 t = make_unsigned_type (precision);
1919 else
1920 t = make_signed_type (precision);
1922 if (precision <= 2 * MAX_BITS_PER_WORD)
1923 signed_and_unsigned_types[precision][unsignedp] = t;
1925 if (TYPE_NAME (t) == 0)
1927 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1928 TYPE_NAME (t) = get_identifier (type_name);
1931 return t;
1934 /* Likewise for floating-point types. */
1936 static tree
1937 float_type_for_size (precision, mode)
1938 int precision;
1939 enum machine_mode mode;
1941 tree t;
1942 char type_name[20];
1944 if (float_types[(int) mode] != 0)
1945 return float_types[(int) mode];
1947 float_types[(int) mode] = t = make_node (REAL_TYPE);
1948 TYPE_PRECISION (t) = precision;
1949 layout_type (t);
1951 if (TYPE_MODE (t) != mode)
1952 gigi_abort (414);
1954 if (TYPE_NAME (t) == 0)
1956 sprintf (type_name, "FLOAT_%d", precision);
1957 TYPE_NAME (t) = get_identifier (type_name);
1960 return t;
1963 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1964 an unsigned type; otherwise a signed type is returned. */
1966 tree
1967 gnat_type_for_mode (mode, unsignedp)
1968 enum machine_mode mode;
1969 int unsignedp;
1971 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1972 return float_type_for_size (GET_MODE_BITSIZE (mode), mode);
1973 else
1974 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
1977 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
1979 tree
1980 gnat_unsigned_type (type_node)
1981 tree type_node;
1983 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
1985 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
1987 type = copy_node (type);
1988 TREE_TYPE (type) = type_node;
1990 else if (TREE_TYPE (type_node) != 0
1991 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
1992 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
1994 type = copy_node (type);
1995 TREE_TYPE (type) = TREE_TYPE (type_node);
1998 return type;
2001 /* Return the signed version of a TYPE_NODE, a scalar type. */
2003 tree
2004 gnat_signed_type (type_node)
2005 tree type_node;
2007 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2009 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2011 type = copy_node (type);
2012 TREE_TYPE (type) = type_node;
2014 else if (TREE_TYPE (type_node) != 0
2015 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2016 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2018 type = copy_node (type);
2019 TREE_TYPE (type) = TREE_TYPE (type_node);
2022 return type;
2025 /* Return a type the same as TYPE except unsigned or signed according to
2026 UNSIGNEDP. */
2028 tree
2029 gnat_signed_or_unsigned_type (unsignedp, type)
2030 int unsignedp;
2031 tree type;
2033 if (! INTEGRAL_TYPE_P (type) || TREE_UNSIGNED (type) == unsignedp)
2034 return type;
2035 else
2036 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
2039 /* EXP is an expression for the size of an object. If this size contains
2040 discriminant references, replace them with the maximum (if MAX_P) or
2041 minimum (if ! MAX_P) possible value of the discriminant. */
2043 tree
2044 max_size (exp, max_p)
2045 tree exp;
2046 int max_p;
2048 enum tree_code code = TREE_CODE (exp);
2049 tree type = TREE_TYPE (exp);
2051 switch (TREE_CODE_CLASS (code))
2053 case 'd':
2054 case 'c':
2055 return exp;
2057 case 'x':
2058 if (code == TREE_LIST)
2059 return tree_cons (TREE_PURPOSE (exp),
2060 max_size (TREE_VALUE (exp), max_p),
2061 TREE_CHAIN (exp) != 0
2062 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2063 break;
2065 case 'r':
2066 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2067 modify. Otherwise, we abort since it is something we can't
2068 handle. */
2069 if (! contains_placeholder_p (exp))
2070 gigi_abort (406);
2072 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2073 return
2074 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2076 case '<':
2077 return max_p ? size_one_node : size_zero_node;
2079 case '1':
2080 case '2':
2081 case 'e':
2082 switch (TREE_CODE_LENGTH (code))
2084 case 1:
2085 if (code == NON_LVALUE_EXPR)
2086 return max_size (TREE_OPERAND (exp, 0), max_p);
2087 else
2088 return
2089 fold (build1 (code, type,
2090 max_size (TREE_OPERAND (exp, 0),
2091 code == NEGATE_EXPR ? ! max_p : max_p)));
2093 case 2:
2094 if (code == RTL_EXPR)
2095 gigi_abort (407);
2096 else if (code == COMPOUND_EXPR)
2097 return max_size (TREE_OPERAND (exp, 1), max_p);
2098 else if (code == WITH_RECORD_EXPR)
2099 return exp;
2102 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2103 tree rhs = max_size (TREE_OPERAND (exp, 1),
2104 code == MINUS_EXPR ? ! max_p : max_p);
2106 /* Special-case wanting the maximum value of a MIN_EXPR.
2107 In that case, if one side overflows, return the other.
2108 sizetype is signed, but we know sizes are non-negative.
2109 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2110 overflowing or the maximum possible value and the RHS
2111 a variable. */
2112 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2113 return lhs;
2114 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2115 return rhs;
2116 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2117 && (TREE_OVERFLOW (lhs)
2118 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2119 && ! TREE_CONSTANT (rhs))
2120 return lhs;
2121 else
2122 return fold (build (code, type, lhs, rhs));
2125 case 3:
2126 if (code == SAVE_EXPR)
2127 return exp;
2128 else if (code == COND_EXPR)
2129 return fold (build (MAX_EXPR, type,
2130 max_size (TREE_OPERAND (exp, 1), max_p),
2131 max_size (TREE_OPERAND (exp, 2), max_p)));
2132 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2133 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2134 max_size (TREE_OPERAND (exp, 1), max_p));
2138 gigi_abort (408);
2141 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2142 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2143 Return a constructor for the template. */
2145 tree
2146 build_template (template_type, array_type, expr)
2147 tree template_type;
2148 tree array_type;
2149 tree expr;
2151 tree template_elts = NULL_TREE;
2152 tree bound_list = NULL_TREE;
2153 tree field;
2155 if (TREE_CODE (array_type) == RECORD_TYPE
2156 && (TYPE_IS_PADDING_P (array_type)
2157 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2158 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2160 if (TREE_CODE (array_type) == ARRAY_TYPE
2161 || (TREE_CODE (array_type) == INTEGER_TYPE
2162 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2163 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2165 /* First make the list for a CONSTRUCTOR for the template. Go down the
2166 field list of the template instead of the type chain because this
2167 array might be an Ada array of arrays and we can't tell where the
2168 nested arrays stop being the underlying object. */
2170 for (field = TYPE_FIELDS (template_type); field;
2171 (bound_list != 0
2172 ? (bound_list = TREE_CHAIN (bound_list))
2173 : (array_type = TREE_TYPE (array_type))),
2174 field = TREE_CHAIN (TREE_CHAIN (field)))
2176 tree bounds, min, max;
2178 /* If we have a bound list, get the bounds from there. Likewise
2179 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2180 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2181 This will give us a maximum range. */
2182 if (bound_list != 0)
2183 bounds = TREE_VALUE (bound_list);
2184 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2185 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2186 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2187 && DECL_BY_COMPONENT_PTR_P (expr))
2188 bounds = TREE_TYPE (field);
2189 else
2190 gigi_abort (411);
2192 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2193 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2195 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2196 surround them with a WITH_RECORD_EXPR giving EXPR as the
2197 OBJECT. */
2198 if (! TREE_CONSTANT (min) && contains_placeholder_p (min))
2199 min = build (WITH_RECORD_EXPR, TREE_TYPE (min), min, expr);
2200 if (! TREE_CONSTANT (max) && contains_placeholder_p (max))
2201 max = build (WITH_RECORD_EXPR, TREE_TYPE (max), max, expr);
2203 template_elts = tree_cons (TREE_CHAIN (field), max,
2204 tree_cons (field, min, template_elts));
2207 return build_constructor (template_type, nreverse (template_elts));
2210 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2211 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2212 in the type contains in its DECL_INITIAL the expression to use when
2213 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2214 to print out an error message if the mechanism cannot be applied to
2215 an object of that type and also for the name. */
2217 tree
2218 build_vms_descriptor (type, mech, gnat_entity)
2219 tree type;
2220 Mechanism_Type mech;
2221 Entity_Id gnat_entity;
2223 tree record_type = make_node (RECORD_TYPE);
2224 tree field_list = 0;
2225 int class;
2226 int dtype = 0;
2227 tree inner_type;
2228 int ndim;
2229 int i;
2230 tree *idx_arr;
2231 tree tem;
2233 /* If TYPE is an unconstrained array, use the underlying array type. */
2234 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2235 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2237 /* If this is an array, compute the number of dimensions in the array,
2238 get the index types, and point to the inner type. */
2239 if (TREE_CODE (type) != ARRAY_TYPE)
2240 ndim = 0;
2241 else
2242 for (ndim = 1, inner_type = type;
2243 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2244 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2245 ndim++, inner_type = TREE_TYPE (inner_type))
2248 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2250 if (mech != By_Descriptor_NCA
2251 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2252 for (i = ndim - 1, inner_type = type;
2253 i >= 0;
2254 i--, inner_type = TREE_TYPE (inner_type))
2255 idx_arr[i] = TYPE_DOMAIN (inner_type);
2256 else
2257 for (i = 0, inner_type = type;
2258 i < ndim;
2259 i++, inner_type = TREE_TYPE (inner_type))
2260 idx_arr[i] = TYPE_DOMAIN (inner_type);
2262 /* Now get the DTYPE value. */
2263 switch (TREE_CODE (type))
2265 case INTEGER_TYPE:
2266 case ENUMERAL_TYPE:
2267 if (TYPE_VAX_FLOATING_POINT_P (type))
2268 switch ((int) TYPE_DIGITS_VALUE (type))
2270 case 6:
2271 dtype = 10;
2272 break;
2273 case 9:
2274 dtype = 11;
2275 break;
2276 case 15:
2277 dtype = 27;
2278 break;
2280 else
2281 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2283 case 8:
2284 dtype = TREE_UNSIGNED (type) ? 2 : 6;
2285 break;
2286 case 16:
2287 dtype = TREE_UNSIGNED (type) ? 3 : 7;
2288 break;
2289 case 32:
2290 dtype = TREE_UNSIGNED (type) ? 4 : 8;
2291 break;
2292 case 64:
2293 dtype = TREE_UNSIGNED (type) ? 5 : 9;
2294 break;
2295 case 128:
2296 dtype = TREE_UNSIGNED (type) ? 25 : 26;
2297 break;
2299 break;
2301 case REAL_TYPE:
2302 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2303 break;
2305 case COMPLEX_TYPE:
2306 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2307 && TYPE_VAX_FLOATING_POINT_P (type))
2308 switch ((int) TYPE_DIGITS_VALUE (type))
2310 case 6:
2311 dtype = 12;
2312 break;
2313 case 9:
2314 dtype = 13;
2315 break;
2316 case 15:
2317 dtype = 29;
2319 else
2320 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2321 break;
2323 case ARRAY_TYPE:
2324 dtype = 14;
2325 break;
2327 default:
2328 break;
2331 /* Get the CLASS value. */
2332 switch (mech)
2334 case By_Descriptor_A:
2335 class = 4;
2336 break;
2337 case By_Descriptor_NCA:
2338 class = 10;
2339 break;
2340 case By_Descriptor_SB:
2341 class = 15;
2342 break;
2343 default:
2344 class = 1;
2347 /* Make the type for a descriptor for VMS. The first four fields
2348 are the same for all types. */
2350 field_list
2351 = chainon (field_list,
2352 make_descriptor_field
2353 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2354 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2356 field_list = chainon (field_list,
2357 make_descriptor_field ("DTYPE",
2358 gnat_type_for_size (8, 1),
2359 record_type, size_int (dtype)));
2360 field_list = chainon (field_list,
2361 make_descriptor_field ("CLASS",
2362 gnat_type_for_size (8, 1),
2363 record_type, size_int (class)));
2365 field_list
2366 = chainon (field_list,
2367 make_descriptor_field ("POINTER",
2368 build_pointer_type (type),
2369 record_type,
2370 build1 (ADDR_EXPR,
2371 build_pointer_type (type),
2372 build (PLACEHOLDER_EXPR,
2373 type))));
2375 switch (mech)
2377 case By_Descriptor:
2378 case By_Descriptor_S:
2379 break;
2381 case By_Descriptor_SB:
2382 field_list
2383 = chainon (field_list,
2384 make_descriptor_field
2385 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2386 TREE_CODE (type) == ARRAY_TYPE
2387 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2388 field_list
2389 = chainon (field_list,
2390 make_descriptor_field
2391 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2392 TREE_CODE (type) == ARRAY_TYPE
2393 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2394 break;
2396 case By_Descriptor_A:
2397 case By_Descriptor_NCA:
2398 field_list = chainon (field_list,
2399 make_descriptor_field ("SCALE",
2400 gnat_type_for_size (8, 1),
2401 record_type,
2402 size_zero_node));
2404 field_list = chainon (field_list,
2405 make_descriptor_field ("DIGITS",
2406 gnat_type_for_size (8, 1),
2407 record_type,
2408 size_zero_node));
2410 field_list
2411 = chainon (field_list,
2412 make_descriptor_field
2413 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2414 size_int (mech == By_Descriptor_NCA
2416 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2417 : (TREE_CODE (type) == ARRAY_TYPE
2418 && TYPE_CONVENTION_FORTRAN_P (type)
2419 ? 224 : 192))));
2421 field_list = chainon (field_list,
2422 make_descriptor_field ("DIMCT",
2423 gnat_type_for_size (8, 1),
2424 record_type,
2425 size_int (ndim)));
2427 field_list = chainon (field_list,
2428 make_descriptor_field ("ARSIZE",
2429 gnat_type_for_size (32, 1),
2430 record_type,
2431 size_in_bytes (type)));
2433 /* Now build a pointer to the 0,0,0... element. */
2434 tem = build (PLACEHOLDER_EXPR, type);
2435 for (i = 0, inner_type = type; i < ndim;
2436 i++, inner_type = TREE_TYPE (inner_type))
2437 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2438 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2440 field_list
2441 = chainon (field_list,
2442 make_descriptor_field
2443 ("A0", build_pointer_type (inner_type), record_type,
2444 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2446 /* Next come the addressing coefficients. */
2447 tem = size_int (1);
2448 for (i = 0; i < ndim; i++)
2450 char fname[3];
2451 tree idx_length
2452 = size_binop (MULT_EXPR, tem,
2453 size_binop (PLUS_EXPR,
2454 size_binop (MINUS_EXPR,
2455 TYPE_MAX_VALUE (idx_arr[i]),
2456 TYPE_MIN_VALUE (idx_arr[i])),
2457 size_int (1)));
2459 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2460 fname[1] = '0' + i, fname[2] = 0;
2461 field_list
2462 = chainon (field_list,
2463 make_descriptor_field (fname,
2464 gnat_type_for_size (32, 1),
2465 record_type, idx_length));
2467 if (mech == By_Descriptor_NCA)
2468 tem = idx_length;
2471 /* Finally here are the bounds. */
2472 for (i = 0; i < ndim; i++)
2474 char fname[3];
2476 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2477 field_list
2478 = chainon (field_list,
2479 make_descriptor_field
2480 (fname, gnat_type_for_size (32, 1), record_type,
2481 TYPE_MIN_VALUE (idx_arr[i])));
2483 fname[0] = 'U';
2484 field_list
2485 = chainon (field_list,
2486 make_descriptor_field
2487 (fname, gnat_type_for_size (32, 1), record_type,
2488 TYPE_MAX_VALUE (idx_arr[i])));
2490 break;
2492 default:
2493 post_error ("unsupported descriptor type for &", gnat_entity);
2496 finish_record_type (record_type, field_list, 0, 1);
2497 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2498 record_type));
2500 return record_type;
2503 /* Utility routine for above code to make a field. */
2505 static tree
2506 make_descriptor_field (name, type, rec_type, initial)
2507 const char *name;
2508 tree type;
2509 tree rec_type;
2510 tree initial;
2512 tree field
2513 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2515 DECL_INITIAL (field) = initial;
2516 return field;
2519 /* Build a type to be used to represent an aliased object whose nominal
2520 type is an unconstrained array. This consists of a RECORD_TYPE containing
2521 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2522 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2523 is used to represent an arbitrary unconstrained object. Use NAME
2524 as the name of the record. */
2526 tree
2527 build_unc_object_type (template_type, object_type, name)
2528 tree template_type;
2529 tree object_type;
2530 tree name;
2532 tree type = make_node (RECORD_TYPE);
2533 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2534 template_type, type, 0, 0, 0, 1);
2535 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2536 type, 0, 0, 0, 1);
2538 TYPE_NAME (type) = name;
2539 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2540 finish_record_type (type,
2541 chainon (chainon (NULL_TREE, template_field),
2542 array_field),
2543 0, 0);
2545 return type;
2548 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2549 the normal case this is just two adjustments, but we have more to do
2550 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2552 void
2553 update_pointer_to (old_type, new_type)
2554 tree old_type;
2555 tree new_type;
2557 tree ptr = TYPE_POINTER_TO (old_type);
2558 tree ref = TYPE_REFERENCE_TO (old_type);
2559 tree type;
2561 /* If this is the main variant, process all the other variants first. */
2562 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2563 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2564 type = TYPE_NEXT_VARIANT (type))
2565 update_pointer_to (type, new_type);
2567 /* If no pointer or reference, we are done. Otherwise, get the new type with
2568 the same qualifiers as the old type and see if it is the same as the old
2569 type. */
2570 if (ptr == 0 && ref == 0)
2571 return;
2573 new_type = build_qualified_type (new_type, TYPE_QUALS (old_type));
2574 if (old_type == new_type)
2575 return;
2577 /* First handle the simple case. */
2578 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2580 if (ptr != 0)
2581 TREE_TYPE (ptr) = new_type;
2582 TYPE_POINTER_TO (new_type) = ptr;
2584 if (ref != 0)
2585 TREE_TYPE (ref) = new_type;
2586 TYPE_REFERENCE_TO (new_type) = ref;
2588 if (ptr != 0 && TYPE_NAME (ptr) != 0
2589 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2590 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2591 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2592 global_bindings_p (), 0);
2593 if (ref != 0 && TYPE_NAME (ref) != 0
2594 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2595 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2596 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2597 global_bindings_p (), 0);
2600 /* Now deal with the unconstrained array case. In this case the "pointer"
2601 is actually a RECORD_TYPE where the types of both fields are
2602 pointers to void. In that case, copy the field list from the
2603 old type to the new one and update the fields' context. */
2604 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2605 gigi_abort (412);
2607 else
2609 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2610 tree ptr_temp_type;
2611 tree new_ref;
2612 tree var;
2614 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2615 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2616 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2618 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2619 template bounds.
2621 ??? This is now the only use of gnat_substitute_in_type, which
2622 is now a very "heavy" routine to do this, so it should be replaced
2623 at some point. */
2624 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2625 new_ref = build (COMPONENT_REF, ptr_temp_type,
2626 build (PLACEHOLDER_EXPR, ptr),
2627 TREE_CHAIN (TYPE_FIELDS (ptr)));
2629 update_pointer_to
2630 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2631 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2632 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2634 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2635 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2637 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2638 = TREE_TYPE (new_type) = ptr;
2640 /* Now handle updating the allocation record, what the thin pointer
2641 points to. Update all pointers from the old record into the new
2642 one, update the types of the fields, and recompute the size. */
2644 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2646 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2647 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2648 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2649 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2650 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2651 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2652 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2654 TYPE_SIZE (new_obj_rec)
2655 = size_binop (PLUS_EXPR,
2656 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2657 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2658 TYPE_SIZE_UNIT (new_obj_rec)
2659 = size_binop (PLUS_EXPR,
2660 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2661 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2662 rest_of_type_compilation (ptr, global_bindings_p ());
2666 /* Convert a pointer to a constrained array into a pointer to a fat
2667 pointer. This involves making or finding a template. */
2669 static tree
2670 convert_to_fat_pointer (type, expr)
2671 tree type;
2672 tree expr;
2674 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2675 tree template, template_addr;
2676 tree etype = TREE_TYPE (expr);
2678 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2679 pointer to the template and array. */
2680 if (integer_zerop (expr))
2681 return
2682 build_constructor
2683 (type,
2684 tree_cons (TYPE_FIELDS (type),
2685 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2686 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2687 convert (build_pointer_type (template_type),
2688 expr),
2689 NULL_TREE)));
2691 /* If EXPR is a thin pointer, make the template and data from the record. */
2693 else if (TYPE_THIN_POINTER_P (etype))
2695 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2697 expr = save_expr (expr);
2698 if (TREE_CODE (expr) == ADDR_EXPR)
2699 expr = TREE_OPERAND (expr, 0);
2700 else
2701 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2703 template = build_component_ref (expr, NULL_TREE, fields);
2704 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2705 build_component_ref (expr, NULL_TREE,
2706 TREE_CHAIN (fields)));
2708 else
2709 /* Otherwise, build the constructor for the template. */
2710 template = build_template (template_type, TREE_TYPE (etype), expr);
2712 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2714 /* The result is a CONSTRUCTOR for the fat pointer. */
2715 return
2716 build_constructor (type,
2717 tree_cons (TYPE_FIELDS (type), expr,
2718 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2719 template_addr, NULL_TREE)));
2722 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2723 is something that is a fat pointer, so convert to it first if it EXPR
2724 is not already a fat pointer. */
2726 static tree
2727 convert_to_thin_pointer (type, expr)
2728 tree type;
2729 tree expr;
2731 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2732 expr
2733 = convert_to_fat_pointer
2734 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2736 /* We get the pointer to the data and use a NOP_EXPR to make it the
2737 proper GCC type. */
2738 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)));
2739 expr = build1 (NOP_EXPR, type, expr);
2741 return expr;
2744 /* Create an expression whose value is that of EXPR,
2745 converted to type TYPE. The TREE_TYPE of the value
2746 is always TYPE. This function implements all reasonable
2747 conversions; callers should filter out those that are
2748 not permitted by the language being compiled. */
2750 tree
2751 convert (type, expr)
2752 tree type, expr;
2754 enum tree_code code = TREE_CODE (type);
2755 tree etype = TREE_TYPE (expr);
2756 enum tree_code ecode = TREE_CODE (etype);
2757 tree tem;
2759 /* If EXPR is already the right type, we are done. */
2760 if (type == etype)
2761 return expr;
2763 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2764 new one. */
2765 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
2766 return build (WITH_RECORD_EXPR, type,
2767 convert (type, TREE_OPERAND (expr, 0)),
2768 TREE_OPERAND (expr, 1));
2770 /* If the input type has padding, remove it by doing a component reference
2771 to the field. If the output type has padding, make a constructor
2772 to build the record. If both input and output have padding and are
2773 of variable size, do this as an unchecked conversion. */
2774 if (ecode == RECORD_TYPE && code == RECORD_TYPE
2775 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2776 && (! TREE_CONSTANT (TYPE_SIZE (type))
2777 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2779 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2781 /* If we have just converted to this padded type, just get
2782 the inner expression. */
2783 if (TREE_CODE (expr) == CONSTRUCTOR
2784 && CONSTRUCTOR_ELTS (expr) != 0
2785 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2786 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2787 else
2788 return convert (type, build_component_ref (expr, NULL_TREE,
2789 TYPE_FIELDS (etype)));
2791 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2793 /* If we previously converted from another type and our type is
2794 of variable size, remove the conversion to avoid the need for
2795 variable-size temporaries. */
2796 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2797 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2798 expr = TREE_OPERAND (expr, 0);
2800 /* If we are just removing the padding from expr, convert the original
2801 object if we have variable size. That will avoid the need
2802 for some variable-size temporaries. */
2803 if (TREE_CODE (expr) == COMPONENT_REF
2804 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2805 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2806 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2807 return convert (type, TREE_OPERAND (expr, 0));
2809 /* If the result type is a padded type with a self-referentially-sized
2810 field and the expression type is a record, do this as an
2811 unchecked converstion. */
2812 else if (TREE_CODE (DECL_SIZE (TYPE_FIELDS (type))) != INTEGER_CST
2813 && contains_placeholder_p (DECL_SIZE (TYPE_FIELDS (type)))
2814 && TREE_CODE (etype) == RECORD_TYPE)
2815 return unchecked_convert (type, expr);
2817 else
2818 return
2819 build_constructor (type,
2820 tree_cons (TYPE_FIELDS (type),
2821 convert (TREE_TYPE
2822 (TYPE_FIELDS (type)),
2823 expr),
2824 NULL_TREE));
2827 /* If the input is a biased type, adjust first. */
2828 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2829 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2830 fold (build1 (GNAT_NOP_EXPR,
2831 TREE_TYPE (etype), expr)),
2832 TYPE_MIN_VALUE (etype))));
2834 /* If the input is a left-justified modular type, we need to extract
2835 the actual object before converting it to any other type with the
2836 exception of an unconstrained array. */
2837 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2838 && code != UNCONSTRAINED_ARRAY_TYPE)
2839 return convert (type, build_component_ref (expr, NULL_TREE,
2840 TYPE_FIELDS (etype)));
2842 /* If converting a type that does not contain a template into one
2843 that does, convert to the data type and then build the template. */
2844 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type)
2845 && ! (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype)))
2847 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2849 return
2850 build_constructor
2851 (type,
2852 tree_cons (TYPE_FIELDS (type),
2853 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2854 obj_type, NULL_TREE),
2855 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2856 convert (obj_type, expr), NULL_TREE)));
2859 /* There are some special cases of expressions that we process
2860 specially. */
2861 switch (TREE_CODE (expr))
2863 case ERROR_MARK:
2864 return expr;
2866 case TRANSFORM_EXPR:
2867 case NULL_EXPR:
2868 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2869 conversion in gnat_expand_expr. NULL_EXPR does not represent
2870 and actual value, so no conversion is needed. */
2871 TREE_TYPE (expr) = type;
2872 return expr;
2874 case STRING_CST:
2875 case CONSTRUCTOR:
2876 /* If we are converting a STRING_CST to another constrained array type,
2877 just make a new one in the proper type. Likewise for a
2878 CONSTRUCTOR. But if the mode of the type is different, we must
2879 ensure a new RTL is made for the constant. */
2880 if (code == ecode && AGGREGATE_TYPE_P (etype)
2881 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2882 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
2884 expr = copy_node (expr);
2885 TREE_TYPE (expr) = type;
2887 if (TYPE_MODE (type) != TYPE_MODE (etype))
2888 TREE_CST_RTL (expr) = 0;
2890 return expr;
2892 break;
2894 case COMPONENT_REF:
2895 /* If we are converting between two aggregate types of the same
2896 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2897 This avoid unneeded conversions which makes reference computations
2898 more complex. */
2899 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
2900 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2901 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
2902 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
2903 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
2904 TREE_OPERAND (expr, 1));
2906 break;
2908 case UNCONSTRAINED_ARRAY_REF:
2909 /* Convert this to the type of the inner array by getting the address of
2910 the array from the template. */
2911 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2912 build_component_ref (TREE_OPERAND (expr, 0),
2913 get_identifier ("P_ARRAY"),
2914 NULL_TREE));
2915 etype = TREE_TYPE (expr);
2916 ecode = TREE_CODE (etype);
2917 break;
2919 case VIEW_CONVERT_EXPR:
2920 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2921 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2922 return convert (type, TREE_OPERAND (expr, 0));
2923 break;
2925 case INDIRECT_REF:
2926 /* If both types are record types, just convert the pointer and
2927 make a new INDIRECT_REF.
2929 ??? Disable this for now since it causes problems with the
2930 code in build_binary_op for MODIFY_EXPR which wants to
2931 strip off conversions. But that code really is a mess and
2932 we need to do this a much better way some time. */
2933 if (0
2934 && (TREE_CODE (type) == RECORD_TYPE
2935 || TREE_CODE (type) == UNION_TYPE)
2936 && (TREE_CODE (etype) == RECORD_TYPE
2937 || TREE_CODE (etype) == UNION_TYPE)
2938 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2939 return build_unary_op (INDIRECT_REF, NULL_TREE,
2940 convert (build_pointer_type (type),
2941 TREE_OPERAND (expr, 0)));
2942 break;
2944 default:
2945 break;
2948 /* Check for converting to a pointer to an unconstrained array. */
2949 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2950 return convert_to_fat_pointer (type, expr);
2952 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
2953 || (code == INTEGER_CST && ecode == INTEGER_CST
2954 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
2955 return fold (build1 (NOP_EXPR, type, expr));
2957 switch (code)
2959 case VOID_TYPE:
2960 return build1 (CONVERT_EXPR, type, expr);
2962 case INTEGER_TYPE:
2963 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
2964 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE))
2965 return unchecked_convert (type, expr);
2966 else if (TYPE_BIASED_REPRESENTATION_P (type))
2967 return fold (build1 (CONVERT_EXPR, type,
2968 fold (build (MINUS_EXPR, TREE_TYPE (type),
2969 convert (TREE_TYPE (type), expr),
2970 TYPE_MIN_VALUE (type)))));
2972 /* ... fall through ... */
2974 case ENUMERAL_TYPE:
2975 return fold (convert_to_integer (type, expr));
2977 case POINTER_TYPE:
2978 case REFERENCE_TYPE:
2979 /* If converting between two pointers to records denoting
2980 both a template and type, adjust if needed to account
2981 for any differing offsets, since one might be negative. */
2982 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
2984 tree bit_diff
2985 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
2986 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
2987 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
2988 sbitsize_int (BITS_PER_UNIT));
2990 expr = build1 (NOP_EXPR, type, expr);
2991 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
2992 if (integer_zerop (byte_diff))
2993 return expr;
2995 return build_binary_op (PLUS_EXPR, type, expr,
2996 fold (convert_to_pointer (type, byte_diff)));
2999 /* If converting to a thin pointer, handle specially. */
3000 if (TYPE_THIN_POINTER_P (type)
3001 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3002 return convert_to_thin_pointer (type, expr);
3004 /* If converting fat pointer to normal pointer, get the pointer to the
3005 array and then convert it. */
3006 else if (TYPE_FAT_POINTER_P (etype))
3007 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3008 NULL_TREE);
3010 return fold (convert_to_pointer (type, expr));
3012 case REAL_TYPE:
3013 return fold (convert_to_real (type, expr));
3015 case RECORD_TYPE:
3016 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3017 return
3018 build_constructor
3019 (type, tree_cons (TYPE_FIELDS (type),
3020 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3021 NULL_TREE));
3023 /* ... fall through ... */
3025 case ARRAY_TYPE:
3026 /* In these cases, assume the front-end has validated the conversion.
3027 If the conversion is valid, it will be a bit-wise conversion, so
3028 it can be viewed as an unchecked conversion. */
3029 return unchecked_convert (type, expr);
3031 case UNION_TYPE:
3032 /* Just validate that the type is indeed that of a field
3033 of the type. Then make the simple conversion. */
3034 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3035 if (TREE_TYPE (tem) == etype)
3036 return build1 (CONVERT_EXPR, type, expr);
3038 gigi_abort (413);
3040 case UNCONSTRAINED_ARRAY_TYPE:
3041 /* If EXPR is a constrained array, take its address, convert it to a
3042 fat pointer, and then dereference it. Likewise if EXPR is a
3043 record containing both a template and a constrained array.
3044 Note that a record representing a left justified modular type
3045 always represents a packed constrained array. */
3046 if (ecode == ARRAY_TYPE
3047 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3048 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3049 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3050 return
3051 build_unary_op
3052 (INDIRECT_REF, NULL_TREE,
3053 convert_to_fat_pointer (TREE_TYPE (type),
3054 build_unary_op (ADDR_EXPR,
3055 NULL_TREE, expr)));
3057 /* Do something very similar for converting one unconstrained
3058 array to another. */
3059 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3060 return
3061 build_unary_op (INDIRECT_REF, NULL_TREE,
3062 convert (TREE_TYPE (type),
3063 build_unary_op (ADDR_EXPR,
3064 NULL_TREE, expr)));
3065 else
3066 gigi_abort (409);
3068 case COMPLEX_TYPE:
3069 return fold (convert_to_complex (type, expr));
3071 default:
3072 gigi_abort (410);
3076 /* Remove all conversions that are done in EXP. This includes converting
3077 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3078 is nonzero, always return the address of the containing object even if
3079 the address is not bit-aligned. */
3081 tree
3082 remove_conversions (exp, true_address)
3083 tree exp;
3084 int true_address;
3086 switch (TREE_CODE (exp))
3088 case CONSTRUCTOR:
3089 if (true_address
3090 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3091 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3092 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3093 break;
3095 case COMPONENT_REF:
3096 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3097 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3098 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3099 break;
3101 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3102 case NOP_EXPR: case CONVERT_EXPR: case GNAT_NOP_EXPR:
3103 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3105 default:
3106 break;
3109 return exp;
3112 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3113 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3114 likewise return an expression pointing to the underlying array. */
3116 tree
3117 maybe_unconstrained_array (exp)
3118 tree exp;
3120 enum tree_code code = TREE_CODE (exp);
3121 tree new;
3123 switch (TREE_CODE (TREE_TYPE (exp)))
3125 case UNCONSTRAINED_ARRAY_TYPE:
3126 if (code == UNCONSTRAINED_ARRAY_REF)
3129 = build_unary_op (INDIRECT_REF, NULL_TREE,
3130 build_component_ref (TREE_OPERAND (exp, 0),
3131 get_identifier ("P_ARRAY"),
3132 NULL_TREE));
3133 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3134 return new;
3137 else if (code == NULL_EXPR)
3138 return build1 (NULL_EXPR,
3139 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3140 (TREE_TYPE (TREE_TYPE (exp))))),
3141 TREE_OPERAND (exp, 0));
3143 else if (code == WITH_RECORD_EXPR
3144 && (TREE_OPERAND (exp, 0)
3145 != (new = maybe_unconstrained_array
3146 (TREE_OPERAND (exp, 0)))))
3147 return build (WITH_RECORD_EXPR, TREE_TYPE (new), new,
3148 TREE_OPERAND (exp, 1));
3150 case RECORD_TYPE:
3151 if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3154 = build_component_ref (exp, NULL_TREE,
3155 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))));
3156 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3157 && TYPE_IS_PADDING_P (TREE_TYPE (new)))
3158 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (new))), new);
3160 return new;
3162 break;
3164 default:
3165 break;
3168 return exp;
3171 /* Return an expression that does an unchecked converstion of EXPR to TYPE. */
3173 tree
3174 unchecked_convert (type, expr)
3175 tree type;
3176 tree expr;
3178 tree etype = TREE_TYPE (expr);
3180 /* If the expression is already the right type, we are done. */
3181 if (etype == type)
3182 return expr;
3184 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3185 new one. */
3186 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
3187 return build (WITH_RECORD_EXPR, type,
3188 unchecked_convert (type, TREE_OPERAND (expr, 0)),
3189 TREE_OPERAND (expr, 1));
3191 /* If both types types are integral just do a normal conversion.
3192 Likewise for a conversion to an unconstrained array. */
3193 if ((((INTEGRAL_TYPE_P (type)
3194 && ! (TREE_CODE (type) == INTEGER_TYPE
3195 && TYPE_VAX_FLOATING_POINT_P (type)))
3196 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3197 || (TREE_CODE (type) == RECORD_TYPE
3198 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3199 && ((INTEGRAL_TYPE_P (etype)
3200 && ! (TREE_CODE (etype) == INTEGER_TYPE
3201 && TYPE_VAX_FLOATING_POINT_P (etype)))
3202 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3203 || (TREE_CODE (etype) == RECORD_TYPE
3204 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3205 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3207 tree rtype = type;
3209 if (TREE_CODE (etype) == INTEGER_TYPE
3210 && TYPE_BIASED_REPRESENTATION_P (etype))
3212 tree ntype = copy_type (etype);
3214 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3215 TYPE_MAIN_VARIANT (ntype) = ntype;
3216 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3219 if (TREE_CODE (type) == INTEGER_TYPE
3220 && TYPE_BIASED_REPRESENTATION_P (type))
3222 rtype = copy_type (type);
3223 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3224 TYPE_MAIN_VARIANT (rtype) = rtype;
3227 expr = convert (rtype, expr);
3228 if (type != rtype)
3229 expr = build1 (GNAT_NOP_EXPR, type, expr);
3232 /* If we are converting TO an integral type whose precision is not the
3233 same as its size, first unchecked convert to a record that contains
3234 an object of the output type. Then extract the field. */
3235 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3236 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3237 GET_MODE_BITSIZE (TYPE_MODE (type))))
3239 tree rec_type = make_node (RECORD_TYPE);
3240 tree field = create_field_decl (get_identifier ("OBJ"), type,
3241 rec_type, 1, 0, 0, 0);
3243 TYPE_FIELDS (rec_type) = field;
3244 layout_type (rec_type);
3246 expr = unchecked_convert (rec_type, expr);
3247 expr = build_component_ref (expr, NULL_TREE, field);
3250 /* Similarly for integral input type whose precision is not equal to its
3251 size. */
3252 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3253 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3254 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3256 tree rec_type = make_node (RECORD_TYPE);
3257 tree field
3258 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3259 1, 0, 0, 0);
3261 TYPE_FIELDS (rec_type) = field;
3262 layout_type (rec_type);
3264 expr = build_constructor (rec_type, build_tree_list (field, expr));
3265 expr = unchecked_convert (type, expr);
3268 /* We have a special case when we are converting between two
3269 unconstrained array types. In that case, take the address,
3270 convert the fat pointer types, and dereference. */
3271 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3272 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3273 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3274 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3275 build_unary_op (ADDR_EXPR, NULL_TREE,
3276 expr)));
3277 else
3279 expr = maybe_unconstrained_array (expr);
3280 etype = TREE_TYPE (expr);
3281 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3284 /* If the result is an integral type whose size is not equal to
3285 the size of the underlying machine type, sign- or zero-extend
3286 the result. We need not do this in the case where the input is
3287 an integral type of the same precision and signedness or if the output
3288 is a biased type or if both the input and output are unsigned. */
3289 if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3290 && ! (TREE_CODE (type) == INTEGER_TYPE
3291 && TYPE_BIASED_REPRESENTATION_P (type))
3292 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3293 GET_MODE_BITSIZE (TYPE_MODE (type)))
3294 && ! (INTEGRAL_TYPE_P (etype)
3295 && TREE_UNSIGNED (type) == TREE_UNSIGNED (etype)
3296 && operand_equal_p (TYPE_RM_SIZE (type),
3297 (TYPE_RM_SIZE (etype) != 0
3298 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3300 && ! (TREE_UNSIGNED (type) && TREE_UNSIGNED (etype)))
3302 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3303 TREE_UNSIGNED (type));
3304 tree shift_expr
3305 = convert (base_type,
3306 size_binop (MINUS_EXPR,
3307 bitsize_int
3308 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3309 TYPE_RM_SIZE (type)));
3310 expr
3311 = convert (type,
3312 build_binary_op (RSHIFT_EXPR, base_type,
3313 build_binary_op (LSHIFT_EXPR, base_type,
3314 convert (base_type, expr),
3315 shift_expr),
3316 shift_expr));
3319 /* An unchecked conversion should never raise Constraint_Error. The code
3320 below assumes that GCC's conversion routines overflow the same way that
3321 the underlying hardware does. This is probably true. In the rare case
3322 when it is false, we can rely on the fact that such conversions are
3323 erroneous anyway. */
3324 if (TREE_CODE (expr) == INTEGER_CST)
3325 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3327 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3328 show no longer constant. */
3329 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3330 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3331 TREE_CONSTANT (expr) = 0;
3333 return expr;
3336 #include "gt-ada-utils.h"
3337 #include "gtype-ada.h"