config/sparc/sol2-bi.h: Revert previous delta.
[official-gcc.git] / gcc / ada / utils.c
bloba5d2860652f2c3b44613fd3353345ef7560bdaac
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * *
10 * Copyright (C) 1992-2002, Free Software Foundation, Inc. *
11 * *
12 * GNAT is free software; you can redistribute it and/or modify it under *
13 * terms of the GNU General Public License as published by the Free Soft- *
14 * ware Foundation; either version 2, or (at your option) any later ver- *
15 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
16 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
17 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
18 * for more details. You should have received a copy of the GNU General *
19 * Public License distributed with GNAT; see file COPYING. If not, write *
20 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
21 * MA 02111-1307, USA. *
22 * *
23 * GNAT was originally developed by the GNAT team at New York University. *
24 * Extensive contributions were provided by Ada Core Technologies Inc. *
25 * *
26 ****************************************************************************/
28 #include "config.h"
29 #include "system.h"
30 #include "coretypes.h"
31 #include "tm.h"
32 #include "tree.h"
33 #include "flags.h"
34 #include "defaults.h"
35 #include "toplev.h"
36 #include "output.h"
37 #include "ggc.h"
38 #include "debug.h"
39 #include "convert.h"
41 #include "ada.h"
42 #include "types.h"
43 #include "atree.h"
44 #include "elists.h"
45 #include "namet.h"
46 #include "nlists.h"
47 #include "stringt.h"
48 #include "uintp.h"
49 #include "fe.h"
50 #include "sinfo.h"
51 #include "einfo.h"
52 #include "ada-tree.h"
53 #include "gigi.h"
55 #ifndef MAX_FIXED_MODE_SIZE
56 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
57 #endif
59 #ifndef MAX_BITS_PER_WORD
60 #define MAX_BITS_PER_WORD BITS_PER_WORD
61 #endif
63 /* If nonzero, pretend we are allocating at global level. */
64 int force_global;
66 /* Tree nodes for the various types and decls we create. */
67 tree gnat_std_decls[(int) ADT_LAST];
69 /* Functions to call for each of the possible raise reasons. */
70 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
72 /* Associates a GNAT tree node to a GCC tree node. It is used in
73 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
74 of `save_gnu_tree' for more info. */
75 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
77 /* This listhead is used to record any global objects that need elaboration.
78 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
79 initial value to assign. */
81 static GTY(()) tree pending_elaborations;
83 /* This stack allows us to momentarily switch to generating elaboration
84 lists for an inner context. */
86 struct e_stack GTY(()) {
87 struct e_stack *next;
88 tree elab_list;
90 static GTY(()) struct e_stack *elist_stack;
92 /* This variable keeps a table for types for each precision so that we only
93 allocate each of them once. Signed and unsigned types are kept separate.
95 Note that these types are only used when fold-const requests something
96 special. Perhaps we should NOT share these types; we'll see how it
97 goes later. */
98 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
100 /* Likewise for float types, but record these by mode. */
101 static GTY(()) tree float_types[NUM_MACHINE_MODES];
103 /* For each binding contour we allocate a binding_level structure which records
104 the entities defined or declared in that contour. Contours include:
106 the global one
107 one for each subprogram definition
108 one for each compound statement (declare block)
110 Binding contours are used to create GCC tree BLOCK nodes. */
112 struct binding_level GTY(())
114 /* A chain of ..._DECL nodes for all variables, constants, functions,
115 parameters and type declarations. These ..._DECL nodes are chained
116 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
117 in the reverse of the order supplied to be compatible with the
118 back-end. */
119 tree names;
120 /* For each level (except the global one), a chain of BLOCK nodes for all
121 the levels that were entered and exited one level down from this one. */
122 tree blocks;
123 /* The BLOCK node for this level, if one has been preallocated.
124 If 0, the BLOCK is allocated (if needed) when the level is popped. */
125 tree this_block;
126 /* The binding level containing this one (the enclosing binding level). */
127 struct binding_level *level_chain;
130 /* The binding level currently in effect. */
131 static GTY(()) struct binding_level *current_binding_level;
133 /* A chain of binding_level structures awaiting reuse. */
134 static GTY((deletable (""))) struct binding_level *free_binding_level;
136 /* The outermost binding level. This binding level is created when the
137 compiler is started and it will exist through the entire compilation. */
138 static struct binding_level *global_binding_level;
140 /* Binding level structures are initialized by copying this one. */
141 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
143 struct language_function GTY(())
145 int unused;
148 static tree merge_sizes PARAMS ((tree, tree, tree, int, int));
149 static tree compute_related_constant PARAMS ((tree, tree));
150 static tree split_plus PARAMS ((tree, tree *));
151 static int value_zerop PARAMS ((tree));
152 static tree float_type_for_size PARAMS ((int, enum machine_mode));
153 static tree convert_to_fat_pointer PARAMS ((tree, tree));
154 static tree convert_to_thin_pointer PARAMS ((tree, tree));
155 static tree make_descriptor_field PARAMS ((const char *,tree, tree,
156 tree));
158 /* Initialize the association of GNAT nodes to GCC trees. */
160 void
161 init_gnat_to_gnu ()
163 Node_Id gnat_node;
165 associate_gnat_to_gnu = (tree *) ggc_alloc (max_gnat_nodes * sizeof (tree));
167 for (gnat_node = 0; gnat_node < max_gnat_nodes; gnat_node++)
168 associate_gnat_to_gnu[gnat_node] = NULL_TREE;
170 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
173 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
174 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
175 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
177 If GNU_DECL is zero, a previous association is to be reset. */
179 void
180 save_gnu_tree (gnat_entity, gnu_decl, no_check)
181 Entity_Id gnat_entity;
182 tree gnu_decl;
183 int no_check;
185 if (gnu_decl
186 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
187 || (! no_check && ! DECL_P (gnu_decl))))
188 gigi_abort (401);
190 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
193 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
194 Return the ..._DECL node that was associated with it. If there is no tree
195 node associated with GNAT_ENTITY, abort.
197 In some cases, such as delayed elaboration or expressions that need to
198 be elaborated only once, GNAT_ENTITY is really not an entity. */
200 tree
201 get_gnu_tree (gnat_entity)
202 Entity_Id gnat_entity;
204 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
205 gigi_abort (402);
207 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
210 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
213 present_gnu_tree (gnat_entity)
214 Entity_Id gnat_entity;
216 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
220 /* Return non-zero if we are currently in the global binding level. */
223 global_bindings_p ()
225 return (force_global != 0 || current_binding_level == global_binding_level
226 ? -1 : 0);
229 /* Return the list of declarations in the current level. Note that this list
230 is in reverse order (it has to be so for back-end compatibility). */
232 tree
233 getdecls ()
235 return current_binding_level->names;
238 /* Nonzero if the current level needs to have a BLOCK made. */
241 kept_level_p ()
243 return (current_binding_level->names != 0);
246 /* Enter a new binding level. The input parameter is ignored, but has to be
247 specified for back-end compatibility. */
249 void
250 pushlevel (ignore)
251 int ignore ATTRIBUTE_UNUSED;
253 struct binding_level *newlevel = NULL;
255 /* Reuse a struct for this binding level, if there is one. */
256 if (free_binding_level)
258 newlevel = free_binding_level;
259 free_binding_level = free_binding_level->level_chain;
261 else
262 newlevel
263 = (struct binding_level *) ggc_alloc (sizeof (struct binding_level));
265 *newlevel = clear_binding_level;
267 /* Add this level to the front of the chain (stack) of levels that are
268 active. */
269 newlevel->level_chain = current_binding_level;
270 current_binding_level = newlevel;
273 /* Exit a binding level.
274 Pop the level off, and restore the state of the identifier-decl mappings
275 that were in effect when this level was entered.
277 If KEEP is nonzero, this level had explicit declarations, so
278 and create a "block" (a BLOCK node) for the level
279 to record its declarations and subblocks for symbol table output.
281 If FUNCTIONBODY is nonzero, this level is the body of a function,
282 so create a block as if KEEP were set and also clear out all
283 label names.
285 If REVERSE is nonzero, reverse the order of decls before putting
286 them into the BLOCK. */
288 tree
289 poplevel (keep, reverse, functionbody)
290 int keep;
291 int reverse;
292 int functionbody;
294 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
295 binding level that we are about to exit and which is returned by this
296 routine. */
297 tree block = NULL_TREE;
298 tree decl_chain;
299 tree decl_node;
300 tree subblock_chain = current_binding_level->blocks;
301 tree subblock_node;
302 int block_previously_created;
304 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
305 nodes chained through the `names' field of current_binding_level are in
306 reverse order except for PARM_DECL node, which are explicitly stored in
307 the right order. */
308 current_binding_level->names
309 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
310 : current_binding_level->names;
312 /* Output any nested inline functions within this block which must be
313 compiled because their address is needed. */
314 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
315 if (TREE_CODE (decl_node) == FUNCTION_DECL
316 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
317 && DECL_INITIAL (decl_node) != 0)
319 push_function_context ();
320 output_inline_function (decl_node);
321 pop_function_context ();
324 block = 0;
325 block_previously_created = (current_binding_level->this_block != 0);
326 if (block_previously_created)
327 block = current_binding_level->this_block;
328 else if (keep || functionbody)
329 block = make_node (BLOCK);
330 if (block != 0)
332 BLOCK_VARS (block) = keep ? decl_chain : 0;
333 BLOCK_SUBBLOCKS (block) = subblock_chain;
336 /* Record the BLOCK node just built as the subblock its enclosing scope. */
337 for (subblock_node = subblock_chain; subblock_node;
338 subblock_node = TREE_CHAIN (subblock_node))
339 BLOCK_SUPERCONTEXT (subblock_node) = block;
341 /* Clear out the meanings of the local variables of this level. */
343 for (subblock_node = decl_chain; subblock_node;
344 subblock_node = TREE_CHAIN (subblock_node))
345 if (DECL_NAME (subblock_node) != 0)
346 /* If the identifier was used or addressed via a local extern decl,
347 don't forget that fact. */
348 if (DECL_EXTERNAL (subblock_node))
350 if (TREE_USED (subblock_node))
351 TREE_USED (DECL_NAME (subblock_node)) = 1;
352 if (TREE_ADDRESSABLE (subblock_node))
353 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
357 /* Pop the current level, and free the structure for reuse. */
358 struct binding_level *level = current_binding_level;
359 current_binding_level = current_binding_level->level_chain;
360 level->level_chain = free_binding_level;
361 free_binding_level = level;
364 if (functionbody)
366 /* This is the top level block of a function. The ..._DECL chain stored
367 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
368 leave them in the BLOCK because they are found in the FUNCTION_DECL
369 instead. */
370 DECL_INITIAL (current_function_decl) = block;
371 BLOCK_VARS (block) = 0;
373 else if (block)
375 if (!block_previously_created)
376 current_binding_level->blocks
377 = chainon (current_binding_level->blocks, block);
380 /* If we did not make a block for the level just exited, any blocks made for
381 inner levels (since they cannot be recorded as subblocks in that level)
382 must be carried forward so they will later become subblocks of something
383 else. */
384 else if (subblock_chain)
385 current_binding_level->blocks
386 = chainon (current_binding_level->blocks, subblock_chain);
387 if (block)
388 TREE_USED (block) = 1;
390 return block;
393 /* Insert BLOCK at the end of the list of subblocks of the
394 current binding level. This is used when a BIND_EXPR is expanded,
395 to handle the BLOCK node inside the BIND_EXPR. */
397 void
398 insert_block (block)
399 tree block;
401 TREE_USED (block) = 1;
402 current_binding_level->blocks
403 = chainon (current_binding_level->blocks, block);
406 /* Set the BLOCK node for the innermost scope
407 (the one we are currently in). */
409 void
410 set_block (block)
411 tree block;
413 current_binding_level->this_block = block;
414 current_binding_level->names = chainon (current_binding_level->names,
415 BLOCK_VARS (block));
416 current_binding_level->blocks = chainon (current_binding_level->blocks,
417 BLOCK_SUBBLOCKS (block));
420 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
421 Returns the ..._DECL node. */
423 tree
424 pushdecl (decl)
425 tree decl;
427 struct binding_level *b;
429 /* If at top level, there is no context. But PARM_DECLs always go in the
430 level of its function. */
431 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
433 b = global_binding_level;
434 DECL_CONTEXT (decl) = 0;
436 else
438 b = current_binding_level;
439 DECL_CONTEXT (decl) = current_function_decl;
442 /* Put the declaration on the list. The list of declarations is in reverse
443 order. The list will be reversed later if necessary. This needs to be
444 this way for compatibility with the back-end.
446 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
447 will cause trouble with the debugger and aren't needed anyway. */
448 if (TREE_CODE (decl) != TYPE_DECL
449 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
451 TREE_CHAIN (decl) = b->names;
452 b->names = decl;
455 /* For the declaration of a type, set its name if it either is not already
456 set, was set to an IDENTIFIER_NODE, indicating an internal name,
457 or if the previous type name was not derived from a source name.
458 We'd rather have the type named with a real name and all the pointer
459 types to the same object have the same POINTER_TYPE node. Code in this
460 function in c-decl.c makes a copy of the type node here, but that may
461 cause us trouble with incomplete types, so let's not try it (at least
462 for now). */
464 if (TREE_CODE (decl) == TYPE_DECL
465 && DECL_NAME (decl) != 0
466 && (TYPE_NAME (TREE_TYPE (decl)) == 0
467 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
468 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
469 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
470 && ! DECL_ARTIFICIAL (decl))))
471 TYPE_NAME (TREE_TYPE (decl)) = decl;
473 return decl;
476 /* Do little here. Set up the standard declarations later after the
477 front end has been run. */
479 void
480 gnat_init_decl_processing ()
482 lineno = 0;
484 /* Make the binding_level structure for global names. */
485 current_function_decl = 0;
486 current_binding_level = 0;
487 free_binding_level = 0;
488 pushlevel (0);
489 global_binding_level = current_binding_level;
491 build_common_tree_nodes (0);
493 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
494 corresponding to the size of ptr_mode. Make this here since we need
495 this before we can expand the GNAT types. */
496 set_sizetype (gnat_type_for_size (GET_MODE_BITSIZE (ptr_mode), 0));
497 build_common_tree_nodes_2 (0);
499 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
501 /* We need to make the integer type before doing anything else.
502 We stitch this in to the appropriate GNAT type later. */
503 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
504 integer_type_node));
505 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
506 char_type_node));
508 ptr_void_type_node = build_pointer_type (void_type_node);
512 /* Create the predefined scalar types such as `integer_type_node' needed
513 in the gcc back-end and initialize the global binding level. */
515 void
516 init_gigi_decls (long_long_float_type, exception_type)
517 tree long_long_float_type, exception_type;
519 tree endlink, decl;
520 unsigned int i;
522 /* Set the types that GCC and Gigi use from the front end. We would like
523 to do this for char_type_node, but it needs to correspond to the C
524 char type. */
525 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
527 /* In this case, the builtin floating point types are VAX float,
528 so make up a type for use. */
529 longest_float_type_node = make_node (REAL_TYPE);
530 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
531 layout_type (longest_float_type_node);
532 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
533 longest_float_type_node));
535 else
536 longest_float_type_node = TREE_TYPE (long_long_float_type);
538 except_type_node = TREE_TYPE (exception_type);
540 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
541 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
542 unsigned_type_node));
544 void_type_decl_node
545 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
546 void_type_node));
548 void_ftype = build_function_type (void_type_node, NULL_TREE);
549 ptr_void_ftype = build_pointer_type (void_ftype);
551 /* Now declare runtime functions. */
552 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
554 /* malloc is a function declaration tree for a function to allocate
555 memory. */
556 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
557 NULL_TREE,
558 build_function_type (ptr_void_type_node,
559 tree_cons (NULL_TREE,
560 sizetype,
561 endlink)),
562 NULL_TREE, 0, 1, 1, 0);
564 /* free is a function declaration tree for a function to free memory. */
566 free_decl
567 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
568 build_function_type (void_type_node,
569 tree_cons (NULL_TREE,
570 ptr_void_type_node,
571 endlink)),
572 NULL_TREE, 0, 1, 1, 0);
574 /* Make the types and functions used for exception processing. */
575 jmpbuf_type
576 = build_array_type (gnat_type_for_mode (Pmode, 0),
577 build_index_type (build_int_2 (5, 0)));
578 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
579 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
581 /* Functions to get and set the jumpbuf pointer for the current thread. */
582 get_jmpbuf_decl
583 = create_subprog_decl
584 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
585 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
586 NULL_TREE, 0, 1, 1, 0);
588 set_jmpbuf_decl
589 = create_subprog_decl
590 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
591 NULL_TREE,
592 build_function_type (void_type_node,
593 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
594 NULL_TREE, 0, 1, 1, 0);
596 /* Function to get the current exception. */
597 get_excptr_decl
598 = create_subprog_decl
599 (get_identifier ("system__soft_links__get_gnat_exception"),
600 NULL_TREE,
601 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
602 NULL_TREE, 0, 1, 1, 0);
604 /* Functions that raise exceptions. */
605 raise_nodefer_decl
606 = create_subprog_decl
607 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
608 build_function_type (void_type_node,
609 tree_cons (NULL_TREE,
610 build_pointer_type (except_type_node),
611 endlink)),
612 NULL_TREE, 0, 1, 1, 0);
614 /* If in no exception handlers mode, all raise statements are redirected to
615 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
616 this procedure will never be called in this mode. */
617 if (No_Exception_Handlers_Set ())
619 decl
620 = create_subprog_decl
621 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
622 build_function_type (void_type_node,
623 tree_cons (NULL_TREE,
624 build_pointer_type (char_type_node),
625 tree_cons (NULL_TREE,
626 integer_type_node,
627 endlink))),
628 NULL_TREE, 0, 1, 1, 0);
630 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
631 gnat_raise_decls[i] = decl;
633 else
634 /* Otherwise, make one decl for each exception reason. */
635 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
637 char name[17];
639 sprintf (name, "__gnat_rcheck_%.2d", i);
640 gnat_raise_decls[i]
641 = create_subprog_decl
642 (get_identifier (name), NULL_TREE,
643 build_function_type (void_type_node,
644 tree_cons (NULL_TREE,
645 build_pointer_type
646 (char_type_node),
647 tree_cons (NULL_TREE,
648 integer_type_node,
649 endlink))),
650 NULL_TREE, 0, 1, 1, 0);
653 /* Indicate that these never return. */
654 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
655 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
656 TREE_TYPE (raise_nodefer_decl)
657 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
658 TYPE_QUAL_VOLATILE);
660 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
662 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
663 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
664 TREE_TYPE (gnat_raise_decls[i])
665 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
666 TYPE_QUAL_VOLATILE);
669 /* setjmp returns an integer and has one operand, which is a pointer to
670 a jmpbuf. */
671 setjmp_decl
672 = create_subprog_decl
673 (get_identifier ("setjmp"), NULL_TREE,
674 build_function_type (integer_type_node,
675 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
676 NULL_TREE, 0, 1, 1, 0);
678 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
679 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
681 main_identifier_node = get_identifier ("main");
684 /* This function is called indirectly from toplev.c to handle incomplete
685 declarations, i.e. VAR_DECL nodes whose DECL_SIZE is zero. To be precise,
686 compile_file in toplev.c makes an indirect call through the function pointer
687 incomplete_decl_finalize_hook which is initialized to this routine in
688 init_decl_processing. */
690 void
691 gnat_finish_incomplete_decl (dont_care)
692 tree dont_care ATTRIBUTE_UNUSED;
694 gigi_abort (405);
697 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
698 nodes (FIELDLIST), finish constructing the record or union type.
699 If HAS_REP is nonzero, this record has a rep clause; don't call
700 layout_type but merely set the size and alignment ourselves.
701 If DEFER_DEBUG is nonzero, do not call the debugging routines
702 on this type; it will be done later. */
704 void
705 finish_record_type (record_type, fieldlist, has_rep, defer_debug)
706 tree record_type;
707 tree fieldlist;
708 int has_rep;
709 int defer_debug;
711 enum tree_code code = TREE_CODE (record_type);
712 tree ada_size = bitsize_zero_node;
713 tree size = bitsize_zero_node;
714 tree size_unit = size_zero_node;
715 int var_size = 0;
716 tree field;
718 TYPE_FIELDS (record_type) = fieldlist;
720 if (TYPE_NAME (record_type) != 0
721 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
722 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
723 else
724 TYPE_STUB_DECL (record_type)
725 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
726 record_type));
728 /* We don't need both the typedef name and the record name output in
729 the debugging information, since they are the same. */
730 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
732 /* Globally initialize the record first. If this is a rep'ed record,
733 that just means some initializations; otherwise, layout the record. */
735 if (has_rep)
737 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
738 TYPE_MODE (record_type) = BLKmode;
739 if (TYPE_SIZE (record_type) == 0)
741 TYPE_SIZE (record_type) = bitsize_zero_node;
742 TYPE_SIZE_UNIT (record_type) = size_zero_node;
745 else
747 /* Ensure there isn't a size already set. There can be in an error
748 case where there is a rep clause but all fields have errors and
749 no longer have a position. */
750 TYPE_SIZE (record_type) = 0;
751 layout_type (record_type);
754 /* At this point, the position and size of each field is known. It was
755 either set before entry by a rep clause, or by laying out the type
756 above. We now make a pass through the fields (in reverse order for
757 QUAL_UNION_TYPEs) to compute the Ada size; the GCC size and alignment
758 (for rep'ed records that are not padding types); and the mode (for
759 rep'ed records). */
761 if (code == QUAL_UNION_TYPE)
762 fieldlist = nreverse (fieldlist);
764 for (field = fieldlist; field; field = TREE_CHAIN (field))
766 tree type = TREE_TYPE (field);
767 tree this_size = DECL_SIZE (field);
768 tree this_size_unit = DECL_SIZE_UNIT (field);
769 tree this_ada_size = DECL_SIZE (field);
771 /* We need to make an XVE/XVU record if any field has variable size,
772 whether or not the record does. For example, if we have an union,
773 it may be that all fields, rounded up to the alignment, have the
774 same size, in which case we'll use that size. But the debug
775 output routines (except Dwarf2) won't be able to output the fields,
776 so we need to make the special record. */
777 if (TREE_CODE (this_size) != INTEGER_CST)
778 var_size = 1;
780 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
781 || TREE_CODE (type) == QUAL_UNION_TYPE)
782 && ! TYPE_IS_FAT_POINTER_P (type)
783 && ! TYPE_CONTAINS_TEMPLATE_P (type)
784 && TYPE_ADA_SIZE (type) != 0)
785 this_ada_size = TYPE_ADA_SIZE (type);
787 if (has_rep && ! DECL_BIT_FIELD (field))
788 TYPE_ALIGN (record_type)
789 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
791 switch (code)
793 case UNION_TYPE:
794 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
795 size = size_binop (MAX_EXPR, size, this_size);
796 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
797 break;
799 case QUAL_UNION_TYPE:
800 ada_size
801 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
802 this_ada_size, ada_size));
803 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
804 this_size, size));
805 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
806 this_size_unit, size_unit));
807 break;
809 case RECORD_TYPE:
810 /* Since we know here that all fields are sorted in order of
811 increasing bit position, the size of the record is one
812 higher than the ending bit of the last field processed
813 unless we have a rep clause, since in that case we might
814 have a field outside a QUAL_UNION_TYPE that has a higher ending
815 position. So use a MAX in that case. Also, if this field is a
816 QUAL_UNION_TYPE, we need to take into account the previous size in
817 the case of empty variants. */
818 ada_size
819 = merge_sizes (ada_size, bit_position (field), this_ada_size,
820 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
821 size = merge_sizes (size, bit_position (field), this_size,
822 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
823 size_unit
824 = merge_sizes (size_unit, byte_position (field), this_size_unit,
825 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
826 break;
828 default:
829 abort ();
833 if (code == QUAL_UNION_TYPE)
834 nreverse (fieldlist);
836 /* If this is a padding record, we never want to make the size smaller than
837 what was specified in it, if any. */
838 if (TREE_CODE (record_type) == RECORD_TYPE
839 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
841 size = TYPE_SIZE (record_type);
842 size_unit = TYPE_SIZE_UNIT (record_type);
845 /* Now set any of the values we've just computed that apply. */
846 if (! TYPE_IS_FAT_POINTER_P (record_type)
847 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
848 SET_TYPE_ADA_SIZE (record_type, ada_size);
850 #ifdef ROUND_TYPE_SIZE
851 size = ROUND_TYPE_SIZE (record_type, size, TYPE_ALIGN (record_type));
852 size_unit = ROUND_TYPE_SIZE_UNIT (record_type, size_unit,
853 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
854 #else
855 size = round_up (size, TYPE_ALIGN (record_type));
856 size_unit = round_up (size_unit, TYPE_ALIGN (record_type) / BITS_PER_UNIT);
857 #endif
859 if (has_rep
860 && ! (TREE_CODE (record_type) == RECORD_TYPE
861 && TYPE_IS_PADDING_P (record_type)
862 && TREE_CODE (size) != INTEGER_CST
863 && contains_placeholder_p (size)))
865 TYPE_SIZE (record_type) = size;
866 TYPE_SIZE_UNIT (record_type) = size_unit;
869 if (has_rep)
870 compute_record_mode (record_type);
872 if (! defer_debug)
874 /* If this record is of variable size, rename it so that the
875 debugger knows it is and make a new, parallel, record
876 that tells the debugger how the record is laid out. See
877 exp_dbug.ads. */
878 if (var_size)
880 tree new_record_type
881 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
882 ? UNION_TYPE : TREE_CODE (record_type));
883 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
884 tree new_id
885 = concat_id_with_name (orig_id,
886 TREE_CODE (record_type) == QUAL_UNION_TYPE
887 ? "XVU" : "XVE");
888 tree last_pos = bitsize_zero_node;
889 tree old_field;
891 TYPE_NAME (new_record_type) = new_id;
892 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
893 TYPE_STUB_DECL (new_record_type)
894 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
895 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
896 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
897 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
898 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
900 /* Now scan all the fields, replacing each field with a new
901 field corresponding to the new encoding. */
902 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
903 old_field = TREE_CHAIN (old_field))
905 tree field_type = TREE_TYPE (old_field);
906 tree field_name = DECL_NAME (old_field);
907 tree new_field;
908 tree curpos = bit_position (old_field);
909 int var = 0;
910 unsigned int align = 0;
911 tree pos;
913 /* See how the position was modified from the last position.
915 There are two basic cases we support: a value was added
916 to the last position or the last position was rounded to
917 a boundary and they something was added. Check for the
918 first case first. If not, see if there is any evidence
919 of rounding. If so, round the last position and try
920 again.
922 If this is a union, the position can be taken as zero. */
924 if (TREE_CODE (new_record_type) == UNION_TYPE)
925 pos = bitsize_zero_node, align = 0;
926 else
927 pos = compute_related_constant (curpos, last_pos);
929 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
930 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
932 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
933 pos = compute_related_constant (curpos,
934 round_up (last_pos, align));
936 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
937 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
938 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
939 && host_integerp (TREE_OPERAND
940 (TREE_OPERAND (curpos, 0), 1),
943 align
944 = tree_low_cst
945 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
946 pos = compute_related_constant (curpos,
947 round_up (last_pos, align));
950 /* If we can't compute a position, set it to zero.
952 ??? We really should abort here, but it's too much work
953 to get this correct for all cases. */
955 if (pos == 0)
956 pos = bitsize_zero_node;
958 /* See if this type is variable-size and make a new type
959 and indicate the indirection if so. */
960 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
962 field_type = build_pointer_type (field_type);
963 var = 1;
966 /* Make a new field name, if necessary. */
967 if (var || align != 0)
969 char suffix[6];
971 if (align != 0)
972 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
973 align / BITS_PER_UNIT);
974 else
975 strcpy (suffix, "XVL");
977 field_name = concat_id_with_name (field_name, suffix);
980 new_field = create_field_decl (field_name, field_type,
981 new_record_type, 0,
982 DECL_SIZE (old_field), pos, 0);
983 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
984 TYPE_FIELDS (new_record_type) = new_field;
986 /* If old_field is a QUAL_UNION_TYPE, take its size as being
987 zero. The only time it's not the last field of the record
988 is when there are other components at fixed positions after
989 it (meaning there was a rep clause for every field) and we
990 want to be able to encode them. */
991 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
992 (TREE_CODE (TREE_TYPE (old_field))
993 == QUAL_UNION_TYPE)
994 ? bitsize_zero_node
995 : DECL_SIZE (old_field));
998 TYPE_FIELDS (new_record_type)
999 = nreverse (TYPE_FIELDS (new_record_type));
1001 rest_of_type_compilation (new_record_type, global_bindings_p ());
1004 rest_of_type_compilation (record_type, global_bindings_p ());
1008 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1009 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1010 if this represents a QUAL_UNION_TYPE in which case we must look for
1011 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1012 is nonzero, we must take the MAX of the end position of this field
1013 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1015 We return an expression for the size. */
1017 static tree
1018 merge_sizes (last_size, first_bit, size, special, has_rep)
1019 tree last_size;
1020 tree first_bit, size;
1021 int special;
1022 int has_rep;
1024 tree type = TREE_TYPE (last_size);
1026 if (! special || TREE_CODE (size) != COND_EXPR)
1028 tree new = size_binop (PLUS_EXPR, first_bit, size);
1030 if (has_rep)
1031 new = size_binop (MAX_EXPR, last_size, new);
1033 return new;
1036 return fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1037 integer_zerop (TREE_OPERAND (size, 1))
1038 ? last_size : merge_sizes (last_size, first_bit,
1039 TREE_OPERAND (size, 1),
1040 1, has_rep),
1041 integer_zerop (TREE_OPERAND (size, 2))
1042 ? last_size : merge_sizes (last_size, first_bit,
1043 TREE_OPERAND (size, 2),
1044 1, has_rep)));
1047 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1048 related by the addition of a constant. Return that constant if so. */
1050 static tree
1051 compute_related_constant (op0, op1)
1052 tree op0, op1;
1054 tree op0_var, op1_var;
1055 tree op0_con = split_plus (op0, &op0_var);
1056 tree op1_con = split_plus (op1, &op1_var);
1057 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1059 if (operand_equal_p (op0_var, op1_var, 0))
1060 return result;
1061 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1062 return result;
1063 else
1064 return 0;
1067 /* Utility function of above to split a tree OP which may be a sum, into a
1068 constant part, which is returned, and a variable part, which is stored
1069 in *PVAR. *PVAR may be size_zero_node. All operations must be of
1070 sizetype. */
1072 static tree
1073 split_plus (in, pvar)
1074 tree in;
1075 tree *pvar;
1077 tree result = bitsize_zero_node;
1079 while (TREE_CODE (in) == NON_LVALUE_EXPR)
1080 in = TREE_OPERAND (in, 0);
1082 *pvar = in;
1083 if (TREE_CODE (in) == INTEGER_CST)
1085 *pvar = bitsize_zero_node;
1086 return in;
1088 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1090 tree lhs_var, rhs_var;
1091 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1092 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1094 result = size_binop (PLUS_EXPR, result, lhs_con);
1095 result = size_binop (TREE_CODE (in), result, rhs_con);
1097 if (lhs_var == TREE_OPERAND (in, 0)
1098 && rhs_var == TREE_OPERAND (in, 1))
1099 return bitsize_zero_node;
1101 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1102 return result;
1104 else
1105 return bitsize_zero_node;
1108 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1109 subprogram. If it is void_type_node, then we are dealing with a procedure,
1110 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1111 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1112 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1113 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1114 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1115 RETURNS_WITH_DSP is nonzero if the function is to return with a
1116 depressed stack pointer. */
1118 tree
1119 create_subprog_type (return_type, param_decl_list, cico_list,
1120 returns_unconstrained, returns_by_ref, returns_with_dsp)
1121 tree return_type;
1122 tree param_decl_list;
1123 tree cico_list;
1124 int returns_unconstrained, returns_by_ref, returns_with_dsp;
1126 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1127 the subprogram formal parameters. This list is generated by traversing the
1128 input list of PARM_DECL nodes. */
1129 tree param_type_list = NULL;
1130 tree param_decl;
1131 tree type;
1133 for (param_decl = param_decl_list; param_decl;
1134 param_decl = TREE_CHAIN (param_decl))
1135 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1136 param_type_list);
1138 /* The list of the function parameter types has to be terminated by the void
1139 type to signal to the back-end that we are not dealing with a variable
1140 parameter subprogram, but that the subprogram has a fixed number of
1141 parameters. */
1142 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1144 /* The list of argument types has been created in reverse
1145 so nreverse it. */
1146 param_type_list = nreverse (param_type_list);
1148 type = build_function_type (return_type, param_type_list);
1150 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1151 or the new type should, make a copy of TYPE. Likewise for
1152 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1153 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1154 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1155 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1156 type = copy_type (type);
1158 SET_TYPE_CI_CO_LIST (type, cico_list);
1159 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1160 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1161 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1162 return type;
1165 /* Return a copy of TYPE but safe to modify in any way. */
1167 tree
1168 copy_type (type)
1169 tree type;
1171 tree new = copy_node (type);
1173 /* copy_node clears this field instead of copying it, because it is
1174 aliased with TREE_CHAIN. */
1175 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1177 TYPE_POINTER_TO (new) = 0;
1178 TYPE_REFERENCE_TO (new) = 0;
1179 TYPE_MAIN_VARIANT (new) = new;
1180 TYPE_NEXT_VARIANT (new) = 0;
1182 return new;
1185 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1186 TYPE_INDEX_TYPE is INDEX. */
1188 tree
1189 create_index_type (min, max, index)
1190 tree min, max;
1191 tree index;
1193 /* First build a type for the desired range. */
1194 tree type = build_index_2_type (min, max);
1196 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1197 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1198 is set, but not to INDEX, make a copy of this type with the requested
1199 index type. Note that we have no way of sharing these types, but that's
1200 only a small hole. */
1201 if (TYPE_INDEX_TYPE (type) == index)
1202 return type;
1203 else if (TYPE_INDEX_TYPE (type) != 0)
1204 type = copy_type (type);
1206 SET_TYPE_INDEX_TYPE (type, index);
1207 return type;
1210 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1211 string) and TYPE is a ..._TYPE node giving its data type.
1212 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1213 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1214 information about this type. */
1216 tree
1217 create_type_decl (type_name, type, attr_list, artificial_p, debug_info_p)
1218 tree type_name;
1219 tree type;
1220 struct attrib *attr_list;
1221 int artificial_p;
1222 int debug_info_p;
1224 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1225 enum tree_code code = TREE_CODE (type);
1227 DECL_ARTIFICIAL (type_decl) = artificial_p;
1228 pushdecl (type_decl);
1229 process_attributes (type_decl, attr_list);
1231 /* Pass type declaration information to the debugger unless this is an
1232 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1233 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1234 a dummy type, which will be completed later, or a type for which
1235 debugging information was not requested. */
1236 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1237 || ! debug_info_p)
1238 DECL_IGNORED_P (type_decl) = 1;
1239 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1240 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1241 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1242 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1244 return type_decl;
1247 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1248 ASM_NAME is its assembler name (if provided). TYPE is its data type
1249 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1250 expression; NULL_TREE if none.
1252 CONST_FLAG is nonzero if this variable is constant.
1254 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1255 the current compilation unit. This flag should be set when processing the
1256 variable definitions in a package specification. EXTERN_FLAG is nonzero
1257 when processing an external variable declaration (as opposed to a
1258 definition: no storage is to be allocated for the variable here).
1260 STATIC_FLAG is only relevant when not at top level. In that case
1261 it indicates whether to always allocate storage to the variable. */
1263 tree
1264 create_var_decl (var_name, asm_name, type, var_init, const_flag, public_flag,
1265 extern_flag, static_flag, attr_list)
1266 tree var_name;
1267 tree asm_name;
1268 tree type;
1269 tree var_init;
1270 int const_flag;
1271 int public_flag;
1272 int extern_flag;
1273 int static_flag;
1274 struct attrib *attr_list;
1276 int init_const
1277 = (var_init == 0
1279 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1280 && (global_bindings_p () || static_flag
1281 ? 0 != initializer_constant_valid_p (var_init,
1282 TREE_TYPE (var_init))
1283 : TREE_CONSTANT (var_init))));
1284 tree var_decl
1285 = build_decl ((const_flag && init_const
1286 /* Only make a CONST_DECL for sufficiently-small objects.
1287 We consider complex double "sufficiently-small" */
1288 && TYPE_SIZE (type) != 0
1289 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1290 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1291 GET_MODE_SIZE (DCmode)))
1292 ? CONST_DECL : VAR_DECL, var_name, type);
1293 tree assign_init = 0;
1295 /* If this is external, throw away any initializations unless this is a
1296 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1297 we are defining a global here, leave a constant initialization and save
1298 any variable elaborations for the elaboration routine. Otherwise, if
1299 the initializing expression is not the same as TYPE, generate the
1300 initialization with an assignment statement, since it knows how
1301 to do the required adjustents. If we are just annotating types,
1302 throw away the initialization if it isn't a constant. */
1304 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1305 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1306 var_init = 0;
1308 if (global_bindings_p () && var_init != 0 && ! init_const)
1310 add_pending_elaborations (var_decl, var_init);
1311 var_init = 0;
1314 else if (var_init != 0
1315 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1316 != TYPE_MAIN_VARIANT (type))
1317 || (static_flag && ! init_const)))
1318 assign_init = var_init, var_init = 0;
1320 DECL_COMMON (var_decl) = !flag_no_common;
1321 DECL_INITIAL (var_decl) = var_init;
1322 TREE_READONLY (var_decl) = const_flag;
1323 DECL_EXTERNAL (var_decl) = extern_flag;
1324 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1325 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1326 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1327 = TYPE_VOLATILE (type);
1329 /* At the global binding level we need to allocate static storage for the
1330 variable if and only if its not external. If we are not at the top level
1331 we allocate automatic storage unless requested not to. */
1332 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1334 if (asm_name != 0)
1335 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1337 process_attributes (var_decl, attr_list);
1339 /* Add this decl to the current binding level and generate any
1340 needed code and RTL. */
1341 var_decl = pushdecl (var_decl);
1342 expand_decl (var_decl);
1344 if (DECL_CONTEXT (var_decl) != 0)
1345 expand_decl_init (var_decl);
1347 /* If this is volatile, force it into memory. */
1348 if (TREE_SIDE_EFFECTS (var_decl))
1349 gnat_mark_addressable (var_decl);
1351 if (TREE_CODE (var_decl) != CONST_DECL)
1352 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1354 if (assign_init != 0)
1356 /* If VAR_DECL has a padded type, convert it to the unpadded
1357 type so the assignment is done properly. */
1358 tree lhs = var_decl;
1360 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1361 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1362 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1364 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1365 assign_init));
1368 return var_decl;
1371 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1372 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1373 this field is in a record type with a "pragma pack". If SIZE is nonzero
1374 it is the specified size for this field. If POS is nonzero, it is the bit
1375 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1376 the address of this field for aliasing purposes. */
1378 tree
1379 create_field_decl (field_name, field_type, record_type, packed, size, pos,
1380 addressable)
1381 tree field_name;
1382 tree field_type;
1383 tree record_type;
1384 int packed;
1385 tree size, pos;
1386 int addressable;
1388 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1390 DECL_CONTEXT (field_decl) = record_type;
1391 TREE_READONLY (field_decl) = TREE_READONLY (field_type);
1393 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1394 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1395 If it is a padding type where the inner field is of variable size, it
1396 must be at its natural alignment. Just handle the packed case here; we
1397 will disallow non-aligned rep clauses elsewhere. */
1398 if (packed && TYPE_MODE (field_type) == BLKmode)
1399 DECL_ALIGN (field_decl)
1400 = ((TREE_CODE (field_type) == RECORD_TYPE
1401 && TYPE_IS_PADDING_P (field_type)
1402 && ! TREE_CONSTANT (DECL_SIZE (TYPE_FIELDS (field_type))))
1403 ? TYPE_ALIGN (field_type) : BITS_PER_UNIT);
1405 /* If a size is specified, use it. Otherwise, see if we have a size
1406 to use that may differ from the natural size of the object. */
1407 if (size != 0)
1408 size = convert (bitsizetype, size);
1409 else if (packed)
1411 if (packed == 1 && ! operand_equal_p (rm_size (field_type),
1412 TYPE_SIZE (field_type), 0))
1413 size = rm_size (field_type);
1415 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1416 byte. */
1417 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1418 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1419 size = round_up (size, BITS_PER_UNIT);
1422 /* Make a bitfield if a size is specified for two reasons: first if the size
1423 differs from the natural size. Second, if the alignment is insufficient.
1424 There are a number of ways the latter can be true. But never make a
1425 bitfield if the type of the field has a nonconstant size. */
1427 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1428 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1429 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1430 || (pos != 0
1431 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1432 bitsize_int (TYPE_ALIGN
1433 (field_type)))))
1434 || packed
1435 || (TYPE_ALIGN (record_type) != 0
1436 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1438 DECL_BIT_FIELD (field_decl) = 1;
1439 DECL_SIZE (field_decl) = size;
1440 if (! packed && pos == 0)
1441 DECL_ALIGN (field_decl)
1442 = (TYPE_ALIGN (record_type) != 0
1443 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1444 : TYPE_ALIGN (field_type));
1447 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1448 DECL_ALIGN (field_decl)
1449 = MAX (DECL_ALIGN (field_decl),
1450 DECL_BIT_FIELD (field_decl) ? 1
1451 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1452 : TYPE_ALIGN (field_type));
1454 if (pos != 0)
1456 /* We need to pass in the alignment the DECL is known to have.
1457 This is the lowest-order bit set in POS, but no more than
1458 the alignment of the record, if one is specified. Note
1459 that an alignment of 0 is taken as infinite. */
1460 unsigned int known_align;
1462 if (host_integerp (pos, 1))
1463 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1464 else
1465 known_align = BITS_PER_UNIT;
1467 if (TYPE_ALIGN (record_type)
1468 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1469 known_align = TYPE_ALIGN (record_type);
1471 layout_decl (field_decl, known_align);
1472 SET_DECL_OFFSET_ALIGN (field_decl,
1473 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1474 : BITS_PER_UNIT);
1475 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1476 &DECL_FIELD_BIT_OFFSET (field_decl),
1477 DECL_OFFSET_ALIGN (field_decl), pos);
1479 DECL_HAS_REP_P (field_decl) = 1;
1482 /* If the field type is passed by reference, we will have pointers to the
1483 field, so it is addressable. */
1484 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1485 addressable = 1;
1487 /* Mark the decl as nonaddressable if it either is indicated so semantically
1488 or if it is a bit field. */
1489 DECL_NONADDRESSABLE_P (field_decl)
1490 = ! addressable || DECL_BIT_FIELD (field_decl);
1492 return field_decl;
1495 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1496 effects, has the value of zero. */
1498 static int
1499 value_zerop (exp)
1500 tree exp;
1502 if (TREE_CODE (exp) == COMPOUND_EXPR)
1503 return value_zerop (TREE_OPERAND (exp, 1));
1505 return integer_zerop (exp);
1508 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1509 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1510 readonly (either an IN parameter or an address of a pass-by-ref
1511 parameter). */
1513 tree
1514 create_param_decl (param_name, param_type, readonly)
1515 tree param_name;
1516 tree param_type;
1517 int readonly;
1519 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1521 DECL_ARG_TYPE (param_decl) = param_type;
1522 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1523 TREE_READONLY (param_decl) = readonly;
1524 return param_decl;
1527 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1529 void
1530 process_attributes (decl, attr_list)
1531 tree decl;
1532 struct attrib *attr_list;
1534 for (; attr_list; attr_list = attr_list->next)
1535 switch (attr_list->type)
1537 case ATTR_MACHINE_ATTRIBUTE:
1538 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1539 NULL_TREE),
1540 ATTR_FLAG_TYPE_IN_PLACE);
1541 break;
1543 case ATTR_LINK_ALIAS:
1544 TREE_STATIC (decl) = 1;
1545 assemble_alias (decl, attr_list->name);
1546 break;
1548 case ATTR_WEAK_EXTERNAL:
1549 if (SUPPORTS_WEAK)
1550 declare_weak (decl);
1551 else
1552 post_error ("?weak declarations not supported on this target",
1553 attr_list->error_point);
1554 break;
1556 case ATTR_LINK_SECTION:
1557 #ifdef ASM_OUTPUT_SECTION_NAME
1558 DECL_SECTION_NAME (decl)
1559 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1560 IDENTIFIER_POINTER (attr_list->name));
1561 DECL_COMMON (decl) = 0;
1562 #else
1563 post_error ("?section attributes are not supported for this target",
1564 attr_list->error_point);
1565 #endif
1566 break;
1570 /* Add some pending elaborations on the list. */
1572 void
1573 add_pending_elaborations (var_decl, var_init)
1574 tree var_decl;
1575 tree var_init;
1577 if (var_init != 0)
1578 Check_Elaboration_Code_Allowed (error_gnat_node);
1580 pending_elaborations
1581 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1584 /* Obtain any pending elaborations and clear the old list. */
1586 tree
1587 get_pending_elaborations ()
1589 /* Each thing added to the list went on the end; we want it on the
1590 beginning. */
1591 tree result = TREE_CHAIN (pending_elaborations);
1593 TREE_CHAIN (pending_elaborations) = 0;
1594 return result;
1597 /* Return nonzero if there are pending elaborations. */
1600 pending_elaborations_p ()
1602 return TREE_CHAIN (pending_elaborations) != 0;
1605 /* Save a copy of the current pending elaboration list and make a new
1606 one. */
1608 void
1609 push_pending_elaborations ()
1611 struct e_stack *p = (struct e_stack *) ggc_alloc (sizeof (struct e_stack));
1613 p->next = elist_stack;
1614 p->elab_list = pending_elaborations;
1615 elist_stack = p;
1616 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1619 /* Pop the stack of pending elaborations. */
1621 void
1622 pop_pending_elaborations ()
1624 struct e_stack *p = elist_stack;
1626 pending_elaborations = p->elab_list;
1627 elist_stack = p->next;
1630 /* Return the current position in pending_elaborations so we can insert
1631 elaborations after that point. */
1633 tree
1634 get_elaboration_location ()
1636 return tree_last (pending_elaborations);
1639 /* Insert the current elaborations after ELAB, which is in some elaboration
1640 list. */
1642 void
1643 insert_elaboration_list (elab)
1644 tree elab;
1646 tree next = TREE_CHAIN (elab);
1648 if (TREE_CHAIN (pending_elaborations))
1650 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1651 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1652 TREE_CHAIN (pending_elaborations) = 0;
1656 /* Returns a LABEL_DECL node for LABEL_NAME. */
1658 tree
1659 create_label_decl (label_name)
1660 tree label_name;
1662 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1664 DECL_CONTEXT (label_decl) = current_function_decl;
1665 DECL_MODE (label_decl) = VOIDmode;
1666 DECL_SOURCE_LINE (label_decl) = lineno;
1667 DECL_SOURCE_FILE (label_decl) = input_filename;
1669 return label_decl;
1672 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1673 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1674 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1675 PARM_DECL nodes chained through the TREE_CHAIN field).
1677 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1678 appropriate fields in the FUNCTION_DECL. */
1680 tree
1681 create_subprog_decl (subprog_name, asm_name, subprog_type, param_decl_list,
1682 inline_flag, public_flag, extern_flag, attr_list)
1683 tree subprog_name;
1684 tree asm_name;
1685 tree subprog_type;
1686 tree param_decl_list;
1687 int inline_flag;
1688 int public_flag;
1689 int extern_flag;
1690 struct attrib *attr_list;
1692 tree return_type = TREE_TYPE (subprog_type);
1693 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1695 /* If this is a function nested inside an inlined external function, it
1696 means we aren't going to compile the outer function unless it is
1697 actually inlined, so do the same for us. */
1698 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1699 && DECL_EXTERNAL (current_function_decl))
1700 extern_flag = 1;
1702 DECL_EXTERNAL (subprog_decl) = extern_flag;
1703 TREE_PUBLIC (subprog_decl) = public_flag;
1704 DECL_INLINE (subprog_decl) = inline_flag;
1705 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1706 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1707 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1708 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1709 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1711 if (asm_name != 0)
1712 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1714 process_attributes (subprog_decl, attr_list);
1716 /* Add this decl to the current binding level. */
1717 subprog_decl = pushdecl (subprog_decl);
1719 /* Output the assembler code and/or RTL for the declaration. */
1720 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1722 return subprog_decl;
1725 /* Count how deep we are into nested functions. This is because
1726 we shouldn't call the backend function context routines unless we
1727 are in a nested function. */
1729 static int function_nesting_depth;
1731 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1732 body. This routine needs to be invoked before processing the declarations
1733 appearing in the subprogram. */
1735 void
1736 begin_subprog_body (subprog_decl)
1737 tree subprog_decl;
1739 tree param_decl_list;
1740 tree param_decl;
1741 tree next_param;
1743 if (function_nesting_depth++ != 0)
1744 push_function_context ();
1746 announce_function (subprog_decl);
1748 /* Make this field nonzero so further routines know that this is not
1749 tentative. error_mark_node is replaced below (in poplevel) with the
1750 adequate BLOCK. */
1751 DECL_INITIAL (subprog_decl) = error_mark_node;
1753 /* This function exists in static storage. This does not mean `static' in
1754 the C sense! */
1755 TREE_STATIC (subprog_decl) = 1;
1757 /* Enter a new binding level. */
1758 current_function_decl = subprog_decl;
1759 pushlevel (0);
1761 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1762 subprogram body) so that they can be recognized as local variables in the
1763 subprogram.
1765 The list of PARM_DECL nodes is stored in the right order in
1766 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1767 which they are transmitted to `pushdecl' we need to reverse the list of
1768 PARM_DECLs if we want it to be stored in the right order. The reason why
1769 we want to make sure the PARM_DECLs are stored in the correct order is
1770 that this list will be retrieved in a few lines with a call to `getdecl'
1771 to store it back into the DECL_ARGUMENTS field. */
1772 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1774 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1776 next_param = TREE_CHAIN (param_decl);
1777 TREE_CHAIN (param_decl) = NULL;
1778 pushdecl (param_decl);
1781 /* Store back the PARM_DECL nodes. They appear in the right order. */
1782 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1784 init_function_start (subprog_decl, input_filename, lineno);
1785 expand_function_start (subprog_decl, 0);
1787 /* If this function is `main', emit a call to `__main'
1788 to run global initializers, etc. */
1789 if (DECL_ASSEMBLER_NAME (subprog_decl) != 0
1790 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl))
1791 && DECL_CONTEXT (subprog_decl) == NULL_TREE)
1792 expand_main_function ();
1795 /* Finish the definition of the current subprogram and compile it all the way
1796 to assembler language output. */
1798 void
1799 end_subprog_body ()
1801 tree decl;
1802 tree cico_list;
1804 poplevel (1, 0, 1);
1805 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1806 = current_function_decl;
1808 /* Mark the RESULT_DECL as being in this subprogram. */
1809 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1811 expand_function_end (input_filename, lineno, 0);
1813 /* If this is a nested function, push a new GC context. That will keep
1814 local variables on the stack from being collected while we're doing
1815 the compilation of this function. */
1816 if (function_nesting_depth > 1)
1817 ggc_push_context ();
1819 rest_of_compilation (current_function_decl);
1821 if (function_nesting_depth > 1)
1822 ggc_pop_context ();
1824 #if 0
1825 /* If we're sure this function is defined in this file then mark it
1826 as such */
1827 if (TREE_ASM_WRITTEN (current_function_decl))
1828 mark_fn_defined_in_this_file (current_function_decl);
1829 #endif
1831 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1832 not be seen when we call this function and will be in
1833 unallocated memory anyway. */
1834 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1835 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1836 TREE_VALUE (cico_list) = 0;
1838 if (DECL_SAVED_INSNS (current_function_decl) == 0)
1840 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1841 was saved for inline, in which case the DECL_RTLs are in
1842 preserved memory. */
1843 for (decl = DECL_ARGUMENTS (current_function_decl);
1844 decl != 0; decl = TREE_CHAIN (decl))
1846 SET_DECL_RTL (decl, 0);
1847 DECL_INCOMING_RTL (decl) = 0;
1850 /* Similarly, discard DECL_RTL of the return value. */
1851 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1853 /* But DECL_INITIAL must remain nonzero so we know this
1854 was an actual function definition unless toplev.c decided not
1855 to inline it. */
1856 if (DECL_INITIAL (current_function_decl) != 0)
1857 DECL_INITIAL (current_function_decl) = error_mark_node;
1859 DECL_ARGUMENTS (current_function_decl) = 0;
1862 /* If we are not at the bottom of the function nesting stack, pop up to
1863 the containing function. Otherwise show we aren't in any function. */
1864 if (--function_nesting_depth != 0)
1865 pop_function_context ();
1866 else
1867 current_function_decl = 0;
1870 /* Return a definition for a builtin function named NAME and whose data type
1871 is TYPE. TYPE should be a function type with argument types.
1872 FUNCTION_CODE tells later passes how to compile calls to this function.
1873 See tree.h for its possible values.
1875 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1876 the name to be called if we can't opencode the function. If
1877 ATTRS is nonzero, use that for the function attribute list. */
1879 tree
1880 builtin_function (name, type, function_code, class, library_name, attrs)
1881 const char *name;
1882 tree type;
1883 int function_code;
1884 enum built_in_class class;
1885 const char *library_name;
1886 tree attrs;
1888 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1890 DECL_EXTERNAL (decl) = 1;
1891 TREE_PUBLIC (decl) = 1;
1892 if (library_name)
1893 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1895 pushdecl (decl);
1896 DECL_BUILT_IN_CLASS (decl) = class;
1897 DECL_FUNCTION_CODE (decl) = function_code;
1898 if (attrs)
1899 decl_attributes (&decl, attrs, ATTR_FLAG_BUILT_IN);
1900 return decl;
1903 /* Return an integer type with the number of bits of precision given by
1904 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1905 it is a signed type. */
1907 tree
1908 gnat_type_for_size (precision, unsignedp)
1909 unsigned precision;
1910 int unsignedp;
1912 tree t;
1913 char type_name[20];
1915 if (precision <= 2 * MAX_BITS_PER_WORD
1916 && signed_and_unsigned_types[precision][unsignedp] != 0)
1917 return signed_and_unsigned_types[precision][unsignedp];
1919 if (unsignedp)
1920 t = make_unsigned_type (precision);
1921 else
1922 t = make_signed_type (precision);
1924 if (precision <= 2 * MAX_BITS_PER_WORD)
1925 signed_and_unsigned_types[precision][unsignedp] = t;
1927 if (TYPE_NAME (t) == 0)
1929 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1930 TYPE_NAME (t) = get_identifier (type_name);
1933 return t;
1936 /* Likewise for floating-point types. */
1938 static tree
1939 float_type_for_size (precision, mode)
1940 int precision;
1941 enum machine_mode mode;
1943 tree t;
1944 char type_name[20];
1946 if (float_types[(int) mode] != 0)
1947 return float_types[(int) mode];
1949 float_types[(int) mode] = t = make_node (REAL_TYPE);
1950 TYPE_PRECISION (t) = precision;
1951 layout_type (t);
1953 if (TYPE_MODE (t) != mode)
1954 gigi_abort (414);
1956 if (TYPE_NAME (t) == 0)
1958 sprintf (type_name, "FLOAT_%d", precision);
1959 TYPE_NAME (t) = get_identifier (type_name);
1962 return t;
1965 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1966 an unsigned type; otherwise a signed type is returned. */
1968 tree
1969 gnat_type_for_mode (mode, unsignedp)
1970 enum machine_mode mode;
1971 int unsignedp;
1973 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1974 return float_type_for_size (GET_MODE_BITSIZE (mode), mode);
1975 else
1976 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
1979 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
1981 tree
1982 gnat_unsigned_type (type_node)
1983 tree type_node;
1985 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
1987 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
1989 type = copy_node (type);
1990 TREE_TYPE (type) = type_node;
1992 else if (TREE_TYPE (type_node) != 0
1993 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
1994 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
1996 type = copy_node (type);
1997 TREE_TYPE (type) = TREE_TYPE (type_node);
2000 return type;
2003 /* Return the signed version of a TYPE_NODE, a scalar type. */
2005 tree
2006 gnat_signed_type (type_node)
2007 tree type_node;
2009 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2011 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2013 type = copy_node (type);
2014 TREE_TYPE (type) = type_node;
2016 else if (TREE_TYPE (type_node) != 0
2017 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2018 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2020 type = copy_node (type);
2021 TREE_TYPE (type) = TREE_TYPE (type_node);
2024 return type;
2027 /* Return a type the same as TYPE except unsigned or signed according to
2028 UNSIGNEDP. */
2030 tree
2031 gnat_signed_or_unsigned_type (unsignedp, type)
2032 int unsignedp;
2033 tree type;
2035 if (! INTEGRAL_TYPE_P (type) || TREE_UNSIGNED (type) == unsignedp)
2036 return type;
2037 else
2038 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
2041 /* EXP is an expression for the size of an object. If this size contains
2042 discriminant references, replace them with the maximum (if MAX_P) or
2043 minimum (if ! MAX_P) possible value of the discriminant. */
2045 tree
2046 max_size (exp, max_p)
2047 tree exp;
2048 int max_p;
2050 enum tree_code code = TREE_CODE (exp);
2051 tree type = TREE_TYPE (exp);
2053 switch (TREE_CODE_CLASS (code))
2055 case 'd':
2056 case 'c':
2057 return exp;
2059 case 'x':
2060 if (code == TREE_LIST)
2061 return tree_cons (TREE_PURPOSE (exp),
2062 max_size (TREE_VALUE (exp), max_p),
2063 TREE_CHAIN (exp) != 0
2064 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2065 break;
2067 case 'r':
2068 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2069 modify. Otherwise, we abort since it is something we can't
2070 handle. */
2071 if (! contains_placeholder_p (exp))
2072 gigi_abort (406);
2074 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2075 return
2076 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2078 case '<':
2079 return max_p ? size_one_node : size_zero_node;
2081 case '1':
2082 case '2':
2083 case 'e':
2084 switch (TREE_CODE_LENGTH (code))
2086 case 1:
2087 if (code == NON_LVALUE_EXPR)
2088 return max_size (TREE_OPERAND (exp, 0), max_p);
2089 else
2090 return
2091 fold (build1 (code, type,
2092 max_size (TREE_OPERAND (exp, 0),
2093 code == NEGATE_EXPR ? ! max_p : max_p)));
2095 case 2:
2096 if (code == RTL_EXPR)
2097 gigi_abort (407);
2098 else if (code == COMPOUND_EXPR)
2099 return max_size (TREE_OPERAND (exp, 1), max_p);
2100 else if (code == WITH_RECORD_EXPR)
2101 return exp;
2104 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2105 tree rhs = max_size (TREE_OPERAND (exp, 1),
2106 code == MINUS_EXPR ? ! max_p : max_p);
2108 /* Special-case wanting the maximum value of a MIN_EXPR.
2109 In that case, if one side overflows, return the other.
2110 sizetype is signed, but we know sizes are non-negative.
2111 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2112 overflowing or the maximum possible value and the RHS
2113 a variable. */
2114 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2115 return lhs;
2116 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2117 return rhs;
2118 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2119 && (TREE_OVERFLOW (lhs)
2120 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2121 && ! TREE_CONSTANT (rhs))
2122 return lhs;
2123 else
2124 return fold (build (code, type, lhs, rhs));
2127 case 3:
2128 if (code == SAVE_EXPR)
2129 return exp;
2130 else if (code == COND_EXPR)
2131 return fold (build (MAX_EXPR, type,
2132 max_size (TREE_OPERAND (exp, 1), max_p),
2133 max_size (TREE_OPERAND (exp, 2), max_p)));
2134 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2135 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2136 max_size (TREE_OPERAND (exp, 1), max_p));
2140 gigi_abort (408);
2143 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2144 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2145 Return a constructor for the template. */
2147 tree
2148 build_template (template_type, array_type, expr)
2149 tree template_type;
2150 tree array_type;
2151 tree expr;
2153 tree template_elts = NULL_TREE;
2154 tree bound_list = NULL_TREE;
2155 tree field;
2157 if (TREE_CODE (array_type) == RECORD_TYPE
2158 && (TYPE_IS_PADDING_P (array_type)
2159 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2160 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2162 if (TREE_CODE (array_type) == ARRAY_TYPE
2163 || (TREE_CODE (array_type) == INTEGER_TYPE
2164 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2165 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2167 /* First make the list for a CONSTRUCTOR for the template. Go down the
2168 field list of the template instead of the type chain because this
2169 array might be an Ada array of arrays and we can't tell where the
2170 nested arrays stop being the underlying object. */
2172 for (field = TYPE_FIELDS (template_type); field;
2173 (bound_list != 0
2174 ? (bound_list = TREE_CHAIN (bound_list))
2175 : (array_type = TREE_TYPE (array_type))),
2176 field = TREE_CHAIN (TREE_CHAIN (field)))
2178 tree bounds, min, max;
2180 /* If we have a bound list, get the bounds from there. Likewise
2181 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2182 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2183 This will give us a maximum range. */
2184 if (bound_list != 0)
2185 bounds = TREE_VALUE (bound_list);
2186 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2187 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2188 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2189 && DECL_BY_COMPONENT_PTR_P (expr))
2190 bounds = TREE_TYPE (field);
2191 else
2192 gigi_abort (411);
2194 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2195 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2197 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2198 surround them with a WITH_RECORD_EXPR giving EXPR as the
2199 OBJECT. */
2200 if (! TREE_CONSTANT (min) && contains_placeholder_p (min))
2201 min = build (WITH_RECORD_EXPR, TREE_TYPE (min), min, expr);
2202 if (! TREE_CONSTANT (max) && contains_placeholder_p (max))
2203 max = build (WITH_RECORD_EXPR, TREE_TYPE (max), max, expr);
2205 template_elts = tree_cons (TREE_CHAIN (field), max,
2206 tree_cons (field, min, template_elts));
2209 return build_constructor (template_type, nreverse (template_elts));
2212 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2213 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2214 in the type contains in its DECL_INITIAL the expression to use when
2215 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2216 to print out an error message if the mechanism cannot be applied to
2217 an object of that type and also for the name. */
2219 tree
2220 build_vms_descriptor (type, mech, gnat_entity)
2221 tree type;
2222 Mechanism_Type mech;
2223 Entity_Id gnat_entity;
2225 tree record_type = make_node (RECORD_TYPE);
2226 tree field_list = 0;
2227 int class;
2228 int dtype = 0;
2229 tree inner_type;
2230 int ndim;
2231 int i;
2232 tree *idx_arr;
2233 tree tem;
2235 /* If TYPE is an unconstrained array, use the underlying array type. */
2236 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2237 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2239 /* If this is an array, compute the number of dimensions in the array,
2240 get the index types, and point to the inner type. */
2241 if (TREE_CODE (type) != ARRAY_TYPE)
2242 ndim = 0;
2243 else
2244 for (ndim = 1, inner_type = type;
2245 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2246 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2247 ndim++, inner_type = TREE_TYPE (inner_type))
2250 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2252 if (mech != By_Descriptor_NCA
2253 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2254 for (i = ndim - 1, inner_type = type;
2255 i >= 0;
2256 i--, inner_type = TREE_TYPE (inner_type))
2257 idx_arr[i] = TYPE_DOMAIN (inner_type);
2258 else
2259 for (i = 0, inner_type = type;
2260 i < ndim;
2261 i++, inner_type = TREE_TYPE (inner_type))
2262 idx_arr[i] = TYPE_DOMAIN (inner_type);
2264 /* Now get the DTYPE value. */
2265 switch (TREE_CODE (type))
2267 case INTEGER_TYPE:
2268 case ENUMERAL_TYPE:
2269 if (TYPE_VAX_FLOATING_POINT_P (type))
2270 switch ((int) TYPE_DIGITS_VALUE (type))
2272 case 6:
2273 dtype = 10;
2274 break;
2275 case 9:
2276 dtype = 11;
2277 break;
2278 case 15:
2279 dtype = 27;
2280 break;
2282 else
2283 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2285 case 8:
2286 dtype = TREE_UNSIGNED (type) ? 2 : 6;
2287 break;
2288 case 16:
2289 dtype = TREE_UNSIGNED (type) ? 3 : 7;
2290 break;
2291 case 32:
2292 dtype = TREE_UNSIGNED (type) ? 4 : 8;
2293 break;
2294 case 64:
2295 dtype = TREE_UNSIGNED (type) ? 5 : 9;
2296 break;
2297 case 128:
2298 dtype = TREE_UNSIGNED (type) ? 25 : 26;
2299 break;
2301 break;
2303 case REAL_TYPE:
2304 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2305 break;
2307 case COMPLEX_TYPE:
2308 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2309 && TYPE_VAX_FLOATING_POINT_P (type))
2310 switch ((int) TYPE_DIGITS_VALUE (type))
2312 case 6:
2313 dtype = 12;
2314 break;
2315 case 9:
2316 dtype = 13;
2317 break;
2318 case 15:
2319 dtype = 29;
2321 else
2322 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2323 break;
2325 case ARRAY_TYPE:
2326 dtype = 14;
2327 break;
2329 default:
2330 break;
2333 /* Get the CLASS value. */
2334 switch (mech)
2336 case By_Descriptor_A:
2337 class = 4;
2338 break;
2339 case By_Descriptor_NCA:
2340 class = 10;
2341 break;
2342 case By_Descriptor_SB:
2343 class = 15;
2344 break;
2345 default:
2346 class = 1;
2349 /* Make the type for a descriptor for VMS. The first four fields
2350 are the same for all types. */
2352 field_list
2353 = chainon (field_list,
2354 make_descriptor_field
2355 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2356 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2358 field_list = chainon (field_list,
2359 make_descriptor_field ("DTYPE",
2360 gnat_type_for_size (8, 1),
2361 record_type, size_int (dtype)));
2362 field_list = chainon (field_list,
2363 make_descriptor_field ("CLASS",
2364 gnat_type_for_size (8, 1),
2365 record_type, size_int (class)));
2367 field_list
2368 = chainon (field_list,
2369 make_descriptor_field ("POINTER",
2370 build_pointer_type (type),
2371 record_type,
2372 build1 (ADDR_EXPR,
2373 build_pointer_type (type),
2374 build (PLACEHOLDER_EXPR,
2375 type))));
2377 switch (mech)
2379 case By_Descriptor:
2380 case By_Descriptor_S:
2381 break;
2383 case By_Descriptor_SB:
2384 field_list
2385 = chainon (field_list,
2386 make_descriptor_field
2387 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2388 TREE_CODE (type) == ARRAY_TYPE
2389 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2390 field_list
2391 = chainon (field_list,
2392 make_descriptor_field
2393 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2394 TREE_CODE (type) == ARRAY_TYPE
2395 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2396 break;
2398 case By_Descriptor_A:
2399 case By_Descriptor_NCA:
2400 field_list = chainon (field_list,
2401 make_descriptor_field ("SCALE",
2402 gnat_type_for_size (8, 1),
2403 record_type,
2404 size_zero_node));
2406 field_list = chainon (field_list,
2407 make_descriptor_field ("DIGITS",
2408 gnat_type_for_size (8, 1),
2409 record_type,
2410 size_zero_node));
2412 field_list
2413 = chainon (field_list,
2414 make_descriptor_field
2415 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2416 size_int (mech == By_Descriptor_NCA
2418 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2419 : (TREE_CODE (type) == ARRAY_TYPE
2420 && TYPE_CONVENTION_FORTRAN_P (type)
2421 ? 224 : 192))));
2423 field_list = chainon (field_list,
2424 make_descriptor_field ("DIMCT",
2425 gnat_type_for_size (8, 1),
2426 record_type,
2427 size_int (ndim)));
2429 field_list = chainon (field_list,
2430 make_descriptor_field ("ARSIZE",
2431 gnat_type_for_size (32, 1),
2432 record_type,
2433 size_in_bytes (type)));
2435 /* Now build a pointer to the 0,0,0... element. */
2436 tem = build (PLACEHOLDER_EXPR, type);
2437 for (i = 0, inner_type = type; i < ndim;
2438 i++, inner_type = TREE_TYPE (inner_type))
2439 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2440 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2442 field_list
2443 = chainon (field_list,
2444 make_descriptor_field
2445 ("A0", build_pointer_type (inner_type), record_type,
2446 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2448 /* Next come the addressing coefficients. */
2449 tem = size_int (1);
2450 for (i = 0; i < ndim; i++)
2452 char fname[3];
2453 tree idx_length
2454 = size_binop (MULT_EXPR, tem,
2455 size_binop (PLUS_EXPR,
2456 size_binop (MINUS_EXPR,
2457 TYPE_MAX_VALUE (idx_arr[i]),
2458 TYPE_MIN_VALUE (idx_arr[i])),
2459 size_int (1)));
2461 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2462 fname[1] = '0' + i, fname[2] = 0;
2463 field_list
2464 = chainon (field_list,
2465 make_descriptor_field (fname,
2466 gnat_type_for_size (32, 1),
2467 record_type, idx_length));
2469 if (mech == By_Descriptor_NCA)
2470 tem = idx_length;
2473 /* Finally here are the bounds. */
2474 for (i = 0; i < ndim; i++)
2476 char fname[3];
2478 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2479 field_list
2480 = chainon (field_list,
2481 make_descriptor_field
2482 (fname, gnat_type_for_size (32, 1), record_type,
2483 TYPE_MIN_VALUE (idx_arr[i])));
2485 fname[0] = 'U';
2486 field_list
2487 = chainon (field_list,
2488 make_descriptor_field
2489 (fname, gnat_type_for_size (32, 1), record_type,
2490 TYPE_MAX_VALUE (idx_arr[i])));
2492 break;
2494 default:
2495 post_error ("unsupported descriptor type for &", gnat_entity);
2498 finish_record_type (record_type, field_list, 0, 1);
2499 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2500 record_type));
2502 return record_type;
2505 /* Utility routine for above code to make a field. */
2507 static tree
2508 make_descriptor_field (name, type, rec_type, initial)
2509 const char *name;
2510 tree type;
2511 tree rec_type;
2512 tree initial;
2514 tree field
2515 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2517 DECL_INITIAL (field) = initial;
2518 return field;
2521 /* Build a type to be used to represent an aliased object whose nominal
2522 type is an unconstrained array. This consists of a RECORD_TYPE containing
2523 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2524 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2525 is used to represent an arbitrary unconstrained object. Use NAME
2526 as the name of the record. */
2528 tree
2529 build_unc_object_type (template_type, object_type, name)
2530 tree template_type;
2531 tree object_type;
2532 tree name;
2534 tree type = make_node (RECORD_TYPE);
2535 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2536 template_type, type, 0, 0, 0, 1);
2537 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2538 type, 0, 0, 0, 1);
2540 TYPE_NAME (type) = name;
2541 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2542 finish_record_type (type,
2543 chainon (chainon (NULL_TREE, template_field),
2544 array_field),
2545 0, 0);
2547 return type;
2550 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2551 the normal case this is just two adjustments, but we have more to do
2552 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2554 void
2555 update_pointer_to (old_type, new_type)
2556 tree old_type;
2557 tree new_type;
2559 tree ptr = TYPE_POINTER_TO (old_type);
2560 tree ref = TYPE_REFERENCE_TO (old_type);
2561 tree type;
2563 /* If this is the main variant, process all the other variants first. */
2564 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2565 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2566 type = TYPE_NEXT_VARIANT (type))
2567 update_pointer_to (type, new_type);
2569 /* If no pointer or reference, we are done. Otherwise, get the new type with
2570 the same qualifiers as the old type and see if it is the same as the old
2571 type. */
2572 if (ptr == 0 && ref == 0)
2573 return;
2575 new_type = build_qualified_type (new_type, TYPE_QUALS (old_type));
2576 if (old_type == new_type)
2577 return;
2579 /* First handle the simple case. */
2580 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2582 if (ptr != 0)
2583 TREE_TYPE (ptr) = new_type;
2584 TYPE_POINTER_TO (new_type) = ptr;
2586 if (ref != 0)
2587 TREE_TYPE (ref) = new_type;
2588 TYPE_REFERENCE_TO (new_type) = ref;
2590 if (ptr != 0 && TYPE_NAME (ptr) != 0
2591 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2592 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2593 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2594 global_bindings_p (), 0);
2595 if (ref != 0 && TYPE_NAME (ref) != 0
2596 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2597 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2598 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2599 global_bindings_p (), 0);
2602 /* Now deal with the unconstrained array case. In this case the "pointer"
2603 is actually a RECORD_TYPE where the types of both fields are
2604 pointers to void. In that case, copy the field list from the
2605 old type to the new one and update the fields' context. */
2606 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2607 gigi_abort (412);
2609 else
2611 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2612 tree ptr_temp_type;
2613 tree new_ref;
2614 tree var;
2616 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2617 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2618 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2620 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2621 template bounds.
2623 ??? This is now the only use of gnat_substitute_in_type, which
2624 is now a very "heavy" routine to do this, so it should be replaced
2625 at some point. */
2626 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2627 new_ref = build (COMPONENT_REF, ptr_temp_type,
2628 build (PLACEHOLDER_EXPR, ptr),
2629 TREE_CHAIN (TYPE_FIELDS (ptr)));
2631 update_pointer_to
2632 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2633 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2634 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2636 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2637 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2639 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2640 = TREE_TYPE (new_type) = ptr;
2642 /* Now handle updating the allocation record, what the thin pointer
2643 points to. Update all pointers from the old record into the new
2644 one, update the types of the fields, and recompute the size. */
2646 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2648 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2649 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2650 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2651 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2652 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2653 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2654 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2656 TYPE_SIZE (new_obj_rec)
2657 = size_binop (PLUS_EXPR,
2658 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2659 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2660 TYPE_SIZE_UNIT (new_obj_rec)
2661 = size_binop (PLUS_EXPR,
2662 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2663 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2664 rest_of_type_compilation (ptr, global_bindings_p ());
2668 /* Convert a pointer to a constrained array into a pointer to a fat
2669 pointer. This involves making or finding a template. */
2671 static tree
2672 convert_to_fat_pointer (type, expr)
2673 tree type;
2674 tree expr;
2676 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2677 tree template, template_addr;
2678 tree etype = TREE_TYPE (expr);
2680 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2681 pointer to the template and array. */
2682 if (integer_zerop (expr))
2683 return
2684 build_constructor
2685 (type,
2686 tree_cons (TYPE_FIELDS (type),
2687 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2688 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2689 convert (build_pointer_type (template_type),
2690 expr),
2691 NULL_TREE)));
2693 /* If EXPR is a thin pointer, make the template and data from the record. */
2695 else if (TYPE_THIN_POINTER_P (etype))
2697 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2699 expr = save_expr (expr);
2700 if (TREE_CODE (expr) == ADDR_EXPR)
2701 expr = TREE_OPERAND (expr, 0);
2702 else
2703 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2705 template = build_component_ref (expr, NULL_TREE, fields);
2706 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2707 build_component_ref (expr, NULL_TREE,
2708 TREE_CHAIN (fields)));
2710 else
2711 /* Otherwise, build the constructor for the template. */
2712 template = build_template (template_type, TREE_TYPE (etype), expr);
2714 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2716 /* The result is a CONSTRUCTOR for the fat pointer. */
2717 return
2718 build_constructor (type,
2719 tree_cons (TYPE_FIELDS (type), expr,
2720 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2721 template_addr, NULL_TREE)));
2724 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2725 is something that is a fat pointer, so convert to it first if it EXPR
2726 is not already a fat pointer. */
2728 static tree
2729 convert_to_thin_pointer (type, expr)
2730 tree type;
2731 tree expr;
2733 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2734 expr
2735 = convert_to_fat_pointer
2736 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2738 /* We get the pointer to the data and use a NOP_EXPR to make it the
2739 proper GCC type. */
2740 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)));
2741 expr = build1 (NOP_EXPR, type, expr);
2743 return expr;
2746 /* Create an expression whose value is that of EXPR,
2747 converted to type TYPE. The TREE_TYPE of the value
2748 is always TYPE. This function implements all reasonable
2749 conversions; callers should filter out those that are
2750 not permitted by the language being compiled. */
2752 tree
2753 convert (type, expr)
2754 tree type, expr;
2756 enum tree_code code = TREE_CODE (type);
2757 tree etype = TREE_TYPE (expr);
2758 enum tree_code ecode = TREE_CODE (etype);
2759 tree tem;
2761 /* If EXPR is already the right type, we are done. */
2762 if (type == etype)
2763 return expr;
2765 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2766 new one. */
2767 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
2768 return build (WITH_RECORD_EXPR, type,
2769 convert (type, TREE_OPERAND (expr, 0)),
2770 TREE_OPERAND (expr, 1));
2772 /* If the input type has padding, remove it by doing a component reference
2773 to the field. If the output type has padding, make a constructor
2774 to build the record. If both input and output have padding and are
2775 of variable size, do this as an unchecked conversion. */
2776 if (ecode == RECORD_TYPE && code == RECORD_TYPE
2777 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2778 && (! TREE_CONSTANT (TYPE_SIZE (type))
2779 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2781 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2783 /* If we have just converted to this padded type, just get
2784 the inner expression. */
2785 if (TREE_CODE (expr) == CONSTRUCTOR
2786 && CONSTRUCTOR_ELTS (expr) != 0
2787 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2788 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2789 else
2790 return convert (type, build_component_ref (expr, NULL_TREE,
2791 TYPE_FIELDS (etype)));
2793 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2795 /* If we previously converted from another type and our type is
2796 of variable size, remove the conversion to avoid the need for
2797 variable-size temporaries. */
2798 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2799 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2800 expr = TREE_OPERAND (expr, 0);
2802 /* If we are just removing the padding from expr, convert the original
2803 object if we have variable size. That will avoid the need
2804 for some variable-size temporaries. */
2805 if (TREE_CODE (expr) == COMPONENT_REF
2806 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2807 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2808 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2809 return convert (type, TREE_OPERAND (expr, 0));
2811 /* If the result type is a padded type with a self-referentially-sized
2812 field and the expression type is a record, do this as an
2813 unchecked converstion. */
2814 else if (TREE_CODE (DECL_SIZE (TYPE_FIELDS (type))) != INTEGER_CST
2815 && contains_placeholder_p (DECL_SIZE (TYPE_FIELDS (type)))
2816 && TREE_CODE (etype) == RECORD_TYPE)
2817 return unchecked_convert (type, expr);
2819 else
2820 return
2821 build_constructor (type,
2822 tree_cons (TYPE_FIELDS (type),
2823 convert (TREE_TYPE
2824 (TYPE_FIELDS (type)),
2825 expr),
2826 NULL_TREE));
2829 /* If the input is a biased type, adjust first. */
2830 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2831 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2832 fold (build1 (GNAT_NOP_EXPR,
2833 TREE_TYPE (etype), expr)),
2834 TYPE_MIN_VALUE (etype))));
2836 /* If the input is a left-justified modular type, we need to extract
2837 the actual object before converting it to any other type with the
2838 exception of an unconstrained array. */
2839 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2840 && code != UNCONSTRAINED_ARRAY_TYPE)
2841 return convert (type, build_component_ref (expr, NULL_TREE,
2842 TYPE_FIELDS (etype)));
2844 /* If converting a type that does not contain a template into one
2845 that does, convert to the data type and then build the template. */
2846 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type)
2847 && ! (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype)))
2849 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2851 return
2852 build_constructor
2853 (type,
2854 tree_cons (TYPE_FIELDS (type),
2855 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2856 obj_type, NULL_TREE),
2857 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2858 convert (obj_type, expr), NULL_TREE)));
2861 /* There are some special cases of expressions that we process
2862 specially. */
2863 switch (TREE_CODE (expr))
2865 case ERROR_MARK:
2866 return expr;
2868 case TRANSFORM_EXPR:
2869 case NULL_EXPR:
2870 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2871 conversion in gnat_expand_expr. NULL_EXPR does not represent
2872 and actual value, so no conversion is needed. */
2873 TREE_TYPE (expr) = type;
2874 return expr;
2876 case STRING_CST:
2877 case CONSTRUCTOR:
2878 /* If we are converting a STRING_CST to another constrained array type,
2879 just make a new one in the proper type. Likewise for a
2880 CONSTRUCTOR. But if the mode of the type is different, we must
2881 ensure a new RTL is made for the constant. */
2882 if (code == ecode && AGGREGATE_TYPE_P (etype)
2883 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2884 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
2886 expr = copy_node (expr);
2887 TREE_TYPE (expr) = type;
2889 if (TYPE_MODE (type) != TYPE_MODE (etype))
2890 TREE_CST_RTL (expr) = 0;
2892 return expr;
2894 break;
2896 case COMPONENT_REF:
2897 /* If we are converting between two aggregate types of the same
2898 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2899 This avoid unneeded conversions which makes reference computations
2900 more complex. */
2901 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
2902 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2903 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
2904 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
2905 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
2906 TREE_OPERAND (expr, 1));
2908 break;
2910 case UNCONSTRAINED_ARRAY_REF:
2911 /* Convert this to the type of the inner array by getting the address of
2912 the array from the template. */
2913 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2914 build_component_ref (TREE_OPERAND (expr, 0),
2915 get_identifier ("P_ARRAY"),
2916 NULL_TREE));
2917 etype = TREE_TYPE (expr);
2918 ecode = TREE_CODE (etype);
2919 break;
2921 case VIEW_CONVERT_EXPR:
2922 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2923 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2924 return convert (type, TREE_OPERAND (expr, 0));
2925 break;
2927 case INDIRECT_REF:
2928 /* If both types are record types, just convert the pointer and
2929 make a new INDIRECT_REF.
2931 ??? Disable this for now since it causes problems with the
2932 code in build_binary_op for MODIFY_EXPR which wants to
2933 strip off conversions. But that code really is a mess and
2934 we need to do this a much better way some time. */
2935 if (0
2936 && (TREE_CODE (type) == RECORD_TYPE
2937 || TREE_CODE (type) == UNION_TYPE)
2938 && (TREE_CODE (etype) == RECORD_TYPE
2939 || TREE_CODE (etype) == UNION_TYPE)
2940 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2941 return build_unary_op (INDIRECT_REF, NULL_TREE,
2942 convert (build_pointer_type (type),
2943 TREE_OPERAND (expr, 0)));
2944 break;
2946 default:
2947 break;
2950 /* Check for converting to a pointer to an unconstrained array. */
2951 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2952 return convert_to_fat_pointer (type, expr);
2954 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
2955 || (code == INTEGER_CST && ecode == INTEGER_CST
2956 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
2957 return fold (build1 (NOP_EXPR, type, expr));
2959 switch (code)
2961 case VOID_TYPE:
2962 return build1 (CONVERT_EXPR, type, expr);
2964 case INTEGER_TYPE:
2965 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
2966 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE))
2967 return unchecked_convert (type, expr);
2968 else if (TYPE_BIASED_REPRESENTATION_P (type))
2969 return fold (build1 (CONVERT_EXPR, type,
2970 fold (build (MINUS_EXPR, TREE_TYPE (type),
2971 convert (TREE_TYPE (type), expr),
2972 TYPE_MIN_VALUE (type)))));
2974 /* ... fall through ... */
2976 case ENUMERAL_TYPE:
2977 return fold (convert_to_integer (type, expr));
2979 case POINTER_TYPE:
2980 case REFERENCE_TYPE:
2981 /* If converting between two pointers to records denoting
2982 both a template and type, adjust if needed to account
2983 for any differing offsets, since one might be negative. */
2984 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
2986 tree bit_diff
2987 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
2988 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
2989 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
2990 sbitsize_int (BITS_PER_UNIT));
2992 expr = build1 (NOP_EXPR, type, expr);
2993 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
2994 if (integer_zerop (byte_diff))
2995 return expr;
2997 return build_binary_op (PLUS_EXPR, type, expr,
2998 fold (convert_to_pointer (type, byte_diff)));
3001 /* If converting to a thin pointer, handle specially. */
3002 if (TYPE_THIN_POINTER_P (type)
3003 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3004 return convert_to_thin_pointer (type, expr);
3006 /* If converting fat pointer to normal pointer, get the pointer to the
3007 array and then convert it. */
3008 else if (TYPE_FAT_POINTER_P (etype))
3009 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3010 NULL_TREE);
3012 return fold (convert_to_pointer (type, expr));
3014 case REAL_TYPE:
3015 return fold (convert_to_real (type, expr));
3017 case RECORD_TYPE:
3018 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3019 return
3020 build_constructor
3021 (type, tree_cons (TYPE_FIELDS (type),
3022 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3023 NULL_TREE));
3025 /* ... fall through ... */
3027 case ARRAY_TYPE:
3028 /* In these cases, assume the front-end has validated the conversion.
3029 If the conversion is valid, it will be a bit-wise conversion, so
3030 it can be viewed as an unchecked conversion. */
3031 return unchecked_convert (type, expr);
3033 case UNION_TYPE:
3034 /* Just validate that the type is indeed that of a field
3035 of the type. Then make the simple conversion. */
3036 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3037 if (TREE_TYPE (tem) == etype)
3038 return build1 (CONVERT_EXPR, type, expr);
3040 gigi_abort (413);
3042 case UNCONSTRAINED_ARRAY_TYPE:
3043 /* If EXPR is a constrained array, take its address, convert it to a
3044 fat pointer, and then dereference it. Likewise if EXPR is a
3045 record containing both a template and a constrained array.
3046 Note that a record representing a left justified modular type
3047 always represents a packed constrained array. */
3048 if (ecode == ARRAY_TYPE
3049 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3050 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3051 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3052 return
3053 build_unary_op
3054 (INDIRECT_REF, NULL_TREE,
3055 convert_to_fat_pointer (TREE_TYPE (type),
3056 build_unary_op (ADDR_EXPR,
3057 NULL_TREE, expr)));
3059 /* Do something very similar for converting one unconstrained
3060 array to another. */
3061 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3062 return
3063 build_unary_op (INDIRECT_REF, NULL_TREE,
3064 convert (TREE_TYPE (type),
3065 build_unary_op (ADDR_EXPR,
3066 NULL_TREE, expr)));
3067 else
3068 gigi_abort (409);
3070 case COMPLEX_TYPE:
3071 return fold (convert_to_complex (type, expr));
3073 default:
3074 gigi_abort (410);
3078 /* Remove all conversions that are done in EXP. This includes converting
3079 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3080 is nonzero, always return the address of the containing object even if
3081 the address is not bit-aligned. */
3083 tree
3084 remove_conversions (exp, true_address)
3085 tree exp;
3086 int true_address;
3088 switch (TREE_CODE (exp))
3090 case CONSTRUCTOR:
3091 if (true_address
3092 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3093 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3094 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3095 break;
3097 case COMPONENT_REF:
3098 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3099 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3100 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3101 break;
3103 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3104 case NOP_EXPR: case CONVERT_EXPR: case GNAT_NOP_EXPR:
3105 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3107 default:
3108 break;
3111 return exp;
3114 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3115 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3116 likewise return an expression pointing to the underlying array. */
3118 tree
3119 maybe_unconstrained_array (exp)
3120 tree exp;
3122 enum tree_code code = TREE_CODE (exp);
3123 tree new;
3125 switch (TREE_CODE (TREE_TYPE (exp)))
3127 case UNCONSTRAINED_ARRAY_TYPE:
3128 if (code == UNCONSTRAINED_ARRAY_REF)
3131 = build_unary_op (INDIRECT_REF, NULL_TREE,
3132 build_component_ref (TREE_OPERAND (exp, 0),
3133 get_identifier ("P_ARRAY"),
3134 NULL_TREE));
3135 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3136 return new;
3139 else if (code == NULL_EXPR)
3140 return build1 (NULL_EXPR,
3141 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3142 (TREE_TYPE (TREE_TYPE (exp))))),
3143 TREE_OPERAND (exp, 0));
3145 else if (code == WITH_RECORD_EXPR
3146 && (TREE_OPERAND (exp, 0)
3147 != (new = maybe_unconstrained_array
3148 (TREE_OPERAND (exp, 0)))))
3149 return build (WITH_RECORD_EXPR, TREE_TYPE (new), new,
3150 TREE_OPERAND (exp, 1));
3152 case RECORD_TYPE:
3153 if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3156 = build_component_ref (exp, NULL_TREE,
3157 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))));
3158 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3159 && TYPE_IS_PADDING_P (TREE_TYPE (new)))
3160 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (new))), new);
3162 return new;
3164 break;
3166 default:
3167 break;
3170 return exp;
3173 /* Return an expression that does an unchecked converstion of EXPR to TYPE. */
3175 tree
3176 unchecked_convert (type, expr)
3177 tree type;
3178 tree expr;
3180 tree etype = TREE_TYPE (expr);
3182 /* If the expression is already the right type, we are done. */
3183 if (etype == type)
3184 return expr;
3186 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3187 new one. */
3188 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
3189 return build (WITH_RECORD_EXPR, type,
3190 unchecked_convert (type, TREE_OPERAND (expr, 0)),
3191 TREE_OPERAND (expr, 1));
3193 /* If both types types are integral just do a normal conversion.
3194 Likewise for a conversion to an unconstrained array. */
3195 if ((((INTEGRAL_TYPE_P (type)
3196 && ! (TREE_CODE (type) == INTEGER_TYPE
3197 && TYPE_VAX_FLOATING_POINT_P (type)))
3198 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3199 || (TREE_CODE (type) == RECORD_TYPE
3200 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3201 && ((INTEGRAL_TYPE_P (etype)
3202 && ! (TREE_CODE (etype) == INTEGER_TYPE
3203 && TYPE_VAX_FLOATING_POINT_P (etype)))
3204 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3205 || (TREE_CODE (etype) == RECORD_TYPE
3206 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3207 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3209 tree rtype = type;
3211 if (TREE_CODE (etype) == INTEGER_TYPE
3212 && TYPE_BIASED_REPRESENTATION_P (etype))
3214 tree ntype = copy_type (etype);
3216 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3217 TYPE_MAIN_VARIANT (ntype) = ntype;
3218 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3221 if (TREE_CODE (type) == INTEGER_TYPE
3222 && TYPE_BIASED_REPRESENTATION_P (type))
3224 rtype = copy_type (type);
3225 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3226 TYPE_MAIN_VARIANT (rtype) = rtype;
3229 expr = convert (rtype, expr);
3230 if (type != rtype)
3231 expr = build1 (GNAT_NOP_EXPR, type, expr);
3234 /* If we are converting TO an integral type whose precision is not the
3235 same as its size, first unchecked convert to a record that contains
3236 an object of the output type. Then extract the field. */
3237 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3238 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3239 GET_MODE_BITSIZE (TYPE_MODE (type))))
3241 tree rec_type = make_node (RECORD_TYPE);
3242 tree field = create_field_decl (get_identifier ("OBJ"), type,
3243 rec_type, 1, 0, 0, 0);
3245 TYPE_FIELDS (rec_type) = field;
3246 layout_type (rec_type);
3248 expr = unchecked_convert (rec_type, expr);
3249 expr = build_component_ref (expr, NULL_TREE, field);
3252 /* Similarly for integral input type whose precision is not equal to its
3253 size. */
3254 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3255 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3256 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3258 tree rec_type = make_node (RECORD_TYPE);
3259 tree field
3260 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3261 1, 0, 0, 0);
3263 TYPE_FIELDS (rec_type) = field;
3264 layout_type (rec_type);
3266 expr = build_constructor (rec_type, build_tree_list (field, expr));
3267 expr = unchecked_convert (type, expr);
3270 /* We have a special case when we are converting between two
3271 unconstrained array types. In that case, take the address,
3272 convert the fat pointer types, and dereference. */
3273 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3274 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3275 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3276 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3277 build_unary_op (ADDR_EXPR, NULL_TREE,
3278 expr)));
3279 else
3281 expr = maybe_unconstrained_array (expr);
3282 etype = TREE_TYPE (expr);
3283 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3286 /* If the result is an integral type whose size is not equal to
3287 the size of the underlying machine type, sign- or zero-extend
3288 the result. We need not do this in the case where the input is
3289 an integral type of the same precision and signedness or if the output
3290 is a biased type or if both the input and output are unsigned. */
3291 if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3292 && ! (TREE_CODE (type) == INTEGER_TYPE
3293 && TYPE_BIASED_REPRESENTATION_P (type))
3294 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3295 GET_MODE_BITSIZE (TYPE_MODE (type)))
3296 && ! (INTEGRAL_TYPE_P (etype)
3297 && TREE_UNSIGNED (type) == TREE_UNSIGNED (etype)
3298 && operand_equal_p (TYPE_RM_SIZE (type),
3299 (TYPE_RM_SIZE (etype) != 0
3300 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3302 && ! (TREE_UNSIGNED (type) && TREE_UNSIGNED (etype)))
3304 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3305 TREE_UNSIGNED (type));
3306 tree shift_expr
3307 = convert (base_type,
3308 size_binop (MINUS_EXPR,
3309 bitsize_int
3310 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3311 TYPE_RM_SIZE (type)));
3312 expr
3313 = convert (type,
3314 build_binary_op (RSHIFT_EXPR, base_type,
3315 build_binary_op (LSHIFT_EXPR, base_type,
3316 convert (base_type, expr),
3317 shift_expr),
3318 shift_expr));
3321 /* An unchecked conversion should never raise Constraint_Error. The code
3322 below assumes that GCC's conversion routines overflow the same way that
3323 the underlying hardware does. This is probably true. In the rare case
3324 when it is false, we can rely on the fact that such conversions are
3325 erroneous anyway. */
3326 if (TREE_CODE (expr) == INTEGER_CST)
3327 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3329 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3330 show no longer constant. */
3331 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3332 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3333 TREE_CONSTANT (expr) = 0;
3335 return expr;
3338 #include "gt-ada-utils.h"
3339 #include "gtype-ada.h"