1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
10 * Copyright (C) 1992-2002, Free Software Foundation, Inc. *
12 * GNAT is free software; you can redistribute it and/or modify it under *
13 * terms of the GNU General Public License as published by the Free Soft- *
14 * ware Foundation; either version 2, or (at your option) any later ver- *
15 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
16 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
17 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
18 * for more details. You should have received a copy of the GNU General *
19 * Public License distributed with GNAT; see file COPYING. If not, write *
20 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
21 * MA 02111-1307, USA. *
23 * GNAT was originally developed by the GNAT team at New York University. *
24 * It is now maintained by Ada Core Technologies Inc (http://www.gnat.com). *
26 ****************************************************************************/
52 #ifndef MAX_FIXED_MODE_SIZE
53 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
56 #ifndef MAX_BITS_PER_WORD
57 #define MAX_BITS_PER_WORD BITS_PER_WORD
60 /* If nonzero, pretend we are allocating at global level. */
63 /* Tree nodes for the various types and decls we create. */
64 tree gnat_std_decls
[(int) ADT_LAST
];
66 /* Functions to call for each of the possible raise reasons. */
67 tree gnat_raise_decls
[(int) LAST_REASON_CODE
+ 1];
69 /* Associates a GNAT tree node to a GCC tree node. It is used in
70 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
71 of `save_gnu_tree' for more info. */
72 static tree
*associate_gnat_to_gnu
;
74 /* This listhead is used to record any global objects that need elaboration.
75 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
76 initial value to assign. */
78 static tree pending_elaborations
;
80 /* This stack allows us to momentarily switch to generating elaboration
81 lists for an inner context. */
83 static struct e_stack
{struct e_stack
*next
; tree elab_list
; } *elist_stack
;
85 /* This variable keeps a table for types for each precision so that we only
86 allocate each of them once. Signed and unsigned types are kept separate.
88 Note that these types are only used when fold-const requests something
89 special. Perhaps we should NOT share these types; we'll see how it
91 static tree signed_and_unsigned_types
[2 * MAX_BITS_PER_WORD
+ 1][2];
93 /* Likewise for float types, but record these by mode. */
94 static tree float_types
[NUM_MACHINE_MODES
];
96 /* For each binding contour we allocate a binding_level structure which records
97 the entities defined or declared in that contour. Contours include:
100 one for each subprogram definition
101 one for each compound statement (declare block)
103 Binding contours are used to create GCC tree BLOCK nodes. */
107 /* A chain of ..._DECL nodes for all variables, constants, functions,
108 parameters and type declarations. These ..._DECL nodes are chained
109 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
110 in the reverse of the order supplied to be compatible with the
113 /* For each level (except the global one), a chain of BLOCK nodes for all
114 the levels that were entered and exited one level down from this one. */
116 /* The BLOCK node for this level, if one has been preallocated.
117 If 0, the BLOCK is allocated (if needed) when the level is popped. */
119 /* The binding level containing this one (the enclosing binding level). */
120 struct binding_level
*level_chain
;
123 /* The binding level currently in effect. */
124 static struct binding_level
*current_binding_level
= NULL
;
126 /* A chain of binding_level structures awaiting reuse. */
127 static struct binding_level
*free_binding_level
= NULL
;
129 /* The outermost binding level. This binding level is created when the
130 compiler is started and it will exist through the entire compilation. */
131 static struct binding_level
*global_binding_level
;
133 /* Binding level structures are initialized by copying this one. */
134 static struct binding_level clear_binding_level
= {NULL
, NULL
, NULL
, NULL
};
136 static tree merge_sizes
PARAMS ((tree
, tree
, tree
, int, int));
137 static tree compute_related_constant
PARAMS ((tree
, tree
));
138 static tree split_plus
PARAMS ((tree
, tree
*));
139 static int value_zerop
PARAMS ((tree
));
140 static tree float_type_for_size
PARAMS ((int, enum machine_mode
));
141 static tree convert_to_fat_pointer
PARAMS ((tree
, tree
));
142 static tree convert_to_thin_pointer
PARAMS ((tree
, tree
));
143 static tree make_descriptor_field
PARAMS ((const char *,tree
, tree
,
145 static void mark_binding_level
PARAMS ((PTR
));
146 static void mark_e_stack
PARAMS ((PTR
));
148 /* Initialize the association of GNAT nodes to GCC trees. */
155 associate_gnat_to_gnu
= (tree
*) xmalloc (max_gnat_nodes
* sizeof (tree
));
156 ggc_add_tree_root (associate_gnat_to_gnu
, max_gnat_nodes
);
158 for (gnat_node
= 0; gnat_node
< max_gnat_nodes
; gnat_node
++)
159 associate_gnat_to_gnu
[gnat_node
] = NULL_TREE
;
161 pending_elaborations
= build_tree_list (NULL_TREE
, NULL_TREE
);
162 ggc_add_tree_root (&pending_elaborations
, 1);
163 ggc_add_root ((PTR
) &elist_stack
, 1, sizeof (struct e_stack
), mark_e_stack
);
164 ggc_add_tree_root (&signed_and_unsigned_types
[0][0],
165 (sizeof signed_and_unsigned_types
166 / sizeof signed_and_unsigned_types
[0][0]));
167 ggc_add_tree_root (float_types
, ARRAY_SIZE (float_types
));
169 ggc_add_root (¤t_binding_level
, 1, sizeof current_binding_level
,
173 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
174 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
175 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
177 If GNU_DECL is zero, a previous association is to be reset. */
180 save_gnu_tree (gnat_entity
, gnu_decl
, no_check
)
181 Entity_Id gnat_entity
;
186 && (associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
]
187 || (! no_check
&& ! DECL_P (gnu_decl
))))
190 associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
] = gnu_decl
;
193 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
194 Return the ..._DECL node that was associated with it. If there is no tree
195 node associated with GNAT_ENTITY, abort.
197 In some cases, such as delayed elaboration or expressions that need to
198 be elaborated only once, GNAT_ENTITY is really not an entity. */
201 get_gnu_tree (gnat_entity
)
202 Entity_Id gnat_entity
;
204 if (! associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
])
207 return associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
];
210 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
213 present_gnu_tree (gnat_entity
)
214 Entity_Id gnat_entity
;
216 return (associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
] != NULL_TREE
);
220 /* Return non-zero if we are currently in the global binding level. */
225 return (force_global
!= 0 || current_binding_level
== global_binding_level
229 /* Return the list of declarations in the current level. Note that this list
230 is in reverse order (it has to be so for back-end compatibility). */
235 return current_binding_level
->names
;
238 /* Nonzero if the current level needs to have a BLOCK made. */
243 return (current_binding_level
->names
!= 0);
246 /* Enter a new binding level. The input parameter is ignored, but has to be
247 specified for back-end compatibility. */
251 int ignore ATTRIBUTE_UNUSED
;
253 struct binding_level
*newlevel
= NULL
;
255 /* Reuse a struct for this binding level, if there is one. */
256 if (free_binding_level
)
258 newlevel
= free_binding_level
;
259 free_binding_level
= free_binding_level
->level_chain
;
263 = (struct binding_level
*) xmalloc (sizeof (struct binding_level
));
265 *newlevel
= clear_binding_level
;
267 /* Add this level to the front of the chain (stack) of levels that are
269 newlevel
->level_chain
= current_binding_level
;
270 current_binding_level
= newlevel
;
273 /* Exit a binding level.
274 Pop the level off, and restore the state of the identifier-decl mappings
275 that were in effect when this level was entered.
277 If KEEP is nonzero, this level had explicit declarations, so
278 and create a "block" (a BLOCK node) for the level
279 to record its declarations and subblocks for symbol table output.
281 If FUNCTIONBODY is nonzero, this level is the body of a function,
282 so create a block as if KEEP were set and also clear out all
285 If REVERSE is nonzero, reverse the order of decls before putting
286 them into the BLOCK. */
289 poplevel (keep
, reverse
, functionbody
)
294 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
295 binding level that we are about to exit and which is returned by this
297 tree block
= NULL_TREE
;
300 tree subblock_chain
= current_binding_level
->blocks
;
302 int block_previously_created
;
304 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
305 nodes chained through the `names' field of current_binding_level are in
306 reverse order except for PARM_DECL node, which are explicitly stored in
308 current_binding_level
->names
309 = decl_chain
= (reverse
) ? nreverse (current_binding_level
->names
)
310 : current_binding_level
->names
;
312 /* Output any nested inline functions within this block which must be
313 compiled because their address is needed. */
314 for (decl_node
= decl_chain
; decl_node
; decl_node
= TREE_CHAIN (decl_node
))
315 if (TREE_CODE (decl_node
) == FUNCTION_DECL
316 && ! TREE_ASM_WRITTEN (decl_node
) && TREE_ADDRESSABLE (decl_node
)
317 && DECL_INITIAL (decl_node
) != 0)
319 push_function_context ();
320 output_inline_function (decl_node
);
321 pop_function_context ();
325 block_previously_created
= (current_binding_level
->this_block
!= 0);
326 if (block_previously_created
)
327 block
= current_binding_level
->this_block
;
328 else if (keep
|| functionbody
)
329 block
= make_node (BLOCK
);
332 BLOCK_VARS (block
) = keep
? decl_chain
: 0;
333 BLOCK_SUBBLOCKS (block
) = subblock_chain
;
336 /* Record the BLOCK node just built as the subblock its enclosing scope. */
337 for (subblock_node
= subblock_chain
; subblock_node
;
338 subblock_node
= TREE_CHAIN (subblock_node
))
339 BLOCK_SUPERCONTEXT (subblock_node
) = block
;
341 /* Clear out the meanings of the local variables of this level. */
343 for (subblock_node
= decl_chain
; subblock_node
;
344 subblock_node
= TREE_CHAIN (subblock_node
))
345 if (DECL_NAME (subblock_node
) != 0)
346 /* If the identifier was used or addressed via a local extern decl,
347 don't forget that fact. */
348 if (DECL_EXTERNAL (subblock_node
))
350 if (TREE_USED (subblock_node
))
351 TREE_USED (DECL_NAME (subblock_node
)) = 1;
352 if (TREE_ADDRESSABLE (subblock_node
))
353 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node
)) = 1;
357 /* Pop the current level, and free the structure for reuse. */
358 struct binding_level
*level
= current_binding_level
;
359 current_binding_level
= current_binding_level
->level_chain
;
360 level
->level_chain
= free_binding_level
;
361 free_binding_level
= level
;
366 /* This is the top level block of a function. The ..._DECL chain stored
367 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
368 leave them in the BLOCK because they are found in the FUNCTION_DECL
370 DECL_INITIAL (current_function_decl
) = block
;
371 BLOCK_VARS (block
) = 0;
375 if (!block_previously_created
)
376 current_binding_level
->blocks
377 = chainon (current_binding_level
->blocks
, block
);
380 /* If we did not make a block for the level just exited, any blocks made for
381 inner levels (since they cannot be recorded as subblocks in that level)
382 must be carried forward so they will later become subblocks of something
384 else if (subblock_chain
)
385 current_binding_level
->blocks
386 = chainon (current_binding_level
->blocks
, subblock_chain
);
388 TREE_USED (block
) = 1;
393 /* Insert BLOCK at the end of the list of subblocks of the
394 current binding level. This is used when a BIND_EXPR is expanded,
395 to handle the BLOCK node inside the BIND_EXPR. */
401 TREE_USED (block
) = 1;
402 current_binding_level
->blocks
403 = chainon (current_binding_level
->blocks
, block
);
406 /* Set the BLOCK node for the innermost scope
407 (the one we are currently in). */
413 current_binding_level
->this_block
= block
;
414 current_binding_level
->names
= chainon (current_binding_level
->names
,
416 current_binding_level
->blocks
= chainon (current_binding_level
->blocks
,
417 BLOCK_SUBBLOCKS (block
));
420 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
421 Returns the ..._DECL node. */
427 struct binding_level
*b
;
429 /* If at top level, there is no context. But PARM_DECLs always go in the
430 level of its function. */
431 if (global_bindings_p () && TREE_CODE (decl
) != PARM_DECL
)
433 b
= global_binding_level
;
434 DECL_CONTEXT (decl
) = 0;
438 b
= current_binding_level
;
439 DECL_CONTEXT (decl
) = current_function_decl
;
442 /* Put the declaration on the list. The list of declarations is in reverse
443 order. The list will be reversed later if necessary. This needs to be
444 this way for compatibility with the back-end.
446 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
447 will cause trouble with the debugger and aren't needed anyway. */
448 if (TREE_CODE (decl
) != TYPE_DECL
449 || TREE_CODE (TREE_TYPE (decl
)) != UNCONSTRAINED_ARRAY_TYPE
)
451 TREE_CHAIN (decl
) = b
->names
;
455 /* For the declaration of a type, set its name if it either is not already
456 set, was set to an IDENTIFIER_NODE, indicating an internal name,
457 or if the previous type name was not derived from a source name.
458 We'd rather have the type named with a real name and all the pointer
459 types to the same object have the same POINTER_TYPE node. Code in this
460 function in c-decl.c makes a copy of the type node here, but that may
461 cause us trouble with incomplete types, so let's not try it (at least
464 if (TREE_CODE (decl
) == TYPE_DECL
465 && DECL_NAME (decl
) != 0
466 && (TYPE_NAME (TREE_TYPE (decl
)) == 0
467 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl
))) == IDENTIFIER_NODE
468 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl
))) == TYPE_DECL
469 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl
)))
470 && ! DECL_ARTIFICIAL (decl
))))
471 TYPE_NAME (TREE_TYPE (decl
)) = decl
;
476 /* Do little here. Set up the standard declarations later after the
477 front end has been run. */
480 gnat_init_decl_processing ()
484 /* incomplete_decl_finalize_hook is defined in toplev.c. It needs to be set
485 by each front end to the appropriate routine that handles incomplete
486 VAR_DECL nodes. This routine will be invoked by compile_file when a
487 VAR_DECL node of DECL_SIZE zero is encountered. */
488 incomplete_decl_finalize_hook
= finish_incomplete_decl
;
490 /* Make the binding_level structure for global names. */
491 current_function_decl
= 0;
492 current_binding_level
= 0;
493 free_binding_level
= 0;
495 global_binding_level
= current_binding_level
;
497 build_common_tree_nodes (0);
499 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
500 corresponding to the size of ptr_mode. Make this here since we need
501 this before we can expand the GNAT types. */
502 set_sizetype (type_for_size (GET_MODE_BITSIZE (ptr_mode
), 0));
503 build_common_tree_nodes_2 (0);
505 pushdecl (build_decl (TYPE_DECL
, get_identifier (SIZE_TYPE
), sizetype
));
507 /* We need to make the integer type before doing anything else.
508 We stitch this in to the appropriate GNAT type later. */
509 pushdecl (build_decl (TYPE_DECL
, get_identifier ("integer"),
511 pushdecl (build_decl (TYPE_DECL
, get_identifier ("unsigned char"),
514 ptr_void_type_node
= build_pointer_type (void_type_node
);
518 /* Create the predefined scalar types such as `integer_type_node' needed
519 in the gcc back-end and initialize the global binding level. */
522 init_gigi_decls (long_long_float_type
, exception_type
)
523 tree long_long_float_type
, exception_type
;
528 /* Set the types that GCC and Gigi use from the front end. We would like
529 to do this for char_type_node, but it needs to correspond to the C
531 if (TREE_CODE (TREE_TYPE (long_long_float_type
)) == INTEGER_TYPE
)
533 /* In this case, the builtin floating point types are VAX float,
534 so make up a type for use. */
535 longest_float_type_node
= make_node (REAL_TYPE
);
536 TYPE_PRECISION (longest_float_type_node
) = LONG_DOUBLE_TYPE_SIZE
;
537 layout_type (longest_float_type_node
);
538 pushdecl (build_decl (TYPE_DECL
, get_identifier ("longest float type"),
539 longest_float_type_node
));
542 longest_float_type_node
= TREE_TYPE (long_long_float_type
);
544 except_type_node
= TREE_TYPE (exception_type
);
546 unsigned_type_node
= type_for_size (INT_TYPE_SIZE
, 1);
547 pushdecl (build_decl (TYPE_DECL
, get_identifier ("unsigned int"),
548 unsigned_type_node
));
551 = pushdecl (build_decl (TYPE_DECL
, get_identifier ("void"),
554 void_ftype
= build_function_type (void_type_node
, NULL_TREE
);
555 ptr_void_ftype
= build_pointer_type (void_ftype
);
557 /* Now declare runtime functions. */
558 endlink
= tree_cons (NULL_TREE
, void_type_node
, NULL_TREE
);
560 /* malloc is a function declaration tree for a function to allocate
562 malloc_decl
= create_subprog_decl (get_identifier ("__gnat_malloc"),
564 build_function_type (ptr_void_type_node
,
565 tree_cons (NULL_TREE
,
568 NULL_TREE
, 0, 1, 1, 0);
570 /* free is a function declaration tree for a function to free memory. */
573 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE
,
574 build_function_type (void_type_node
,
575 tree_cons (NULL_TREE
,
578 NULL_TREE
, 0, 1, 1, 0);
580 /* Make the types and functions used for exception processing. */
582 = build_array_type (type_for_mode (Pmode
, 0),
583 build_index_type (build_int_2 (5, 0)));
584 pushdecl (build_decl (TYPE_DECL
, get_identifier ("JMPBUF_T"), jmpbuf_type
));
585 jmpbuf_ptr_type
= build_pointer_type (jmpbuf_type
);
587 /* Functions to get and set the jumpbuf pointer for the current thread. */
589 = create_subprog_decl
590 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
591 NULL_TREE
, build_function_type (jmpbuf_ptr_type
, NULL_TREE
),
592 NULL_TREE
, 0, 1, 1, 0);
595 = create_subprog_decl
596 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
598 build_function_type (void_type_node
,
599 tree_cons (NULL_TREE
, jmpbuf_ptr_type
, endlink
)),
600 NULL_TREE
, 0, 1, 1, 0);
602 /* Function to get the current exception. */
604 = create_subprog_decl
605 (get_identifier ("system__soft_links__get_gnat_exception"),
607 build_function_type (build_pointer_type (except_type_node
), NULL_TREE
),
608 NULL_TREE
, 0, 1, 1, 0);
610 /* Functions that raise exceptions. */
612 = create_subprog_decl
613 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE
,
614 build_function_type (void_type_node
,
615 tree_cons (NULL_TREE
,
616 build_pointer_type (except_type_node
),
618 NULL_TREE
, 0, 1, 1, 0);
620 /* If in no exception handlers mode, all raise statements are redirected to
621 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
622 this procedure will never be called in this mode. */
623 if (No_Exception_Handlers_Set ())
626 = create_subprog_decl
627 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE
,
628 build_function_type (void_type_node
,
629 tree_cons (NULL_TREE
,
630 build_pointer_type (char_type_node
),
631 tree_cons (NULL_TREE
,
634 NULL_TREE
, 0, 1, 1, 0);
636 for (i
= 0; i
< sizeof gnat_raise_decls
/ sizeof gnat_raise_decls
[0];
638 gnat_raise_decls
[i
] = decl
;
641 /* Otherwise, make one decl for each exception reason. */
642 for (i
= 0; i
< sizeof gnat_raise_decls
/ sizeof gnat_raise_decls
[0]; i
++)
646 sprintf (name
, "__gnat_rcheck_%.2d", i
);
648 = create_subprog_decl
649 (get_identifier (name
), NULL_TREE
,
650 build_function_type (void_type_node
,
651 tree_cons (NULL_TREE
,
654 tree_cons (NULL_TREE
,
657 NULL_TREE
, 0, 1, 1, 0);
660 /* Indicate that these never return. */
661 TREE_THIS_VOLATILE (raise_nodefer_decl
) = 1;
662 TREE_SIDE_EFFECTS (raise_nodefer_decl
) = 1;
663 TREE_TYPE (raise_nodefer_decl
)
664 = build_qualified_type (TREE_TYPE (raise_nodefer_decl
),
667 for (i
= 0; i
< sizeof gnat_raise_decls
/ sizeof gnat_raise_decls
[0]; i
++)
669 TREE_THIS_VOLATILE (gnat_raise_decls
[i
]) = 1;
670 TREE_SIDE_EFFECTS (gnat_raise_decls
[i
]) = 1;
671 TREE_TYPE (gnat_raise_decls
[i
])
672 = build_qualified_type (TREE_TYPE (gnat_raise_decls
[i
]),
676 /* setjmp returns an integer and has one operand, which is a pointer to
679 = create_subprog_decl
680 (get_identifier ("setjmp"), NULL_TREE
,
681 build_function_type (integer_type_node
,
682 tree_cons (NULL_TREE
, jmpbuf_ptr_type
, endlink
)),
683 NULL_TREE
, 0, 1, 1, 0);
685 DECL_BUILT_IN_CLASS (setjmp_decl
) = BUILT_IN_NORMAL
;
686 DECL_FUNCTION_CODE (setjmp_decl
) = BUILT_IN_SETJMP
;
688 main_identifier_node
= get_identifier ("main");
690 ggc_add_tree_root (gnat_std_decls
, ARRAY_SIZE (gnat_std_decls
));
691 ggc_add_tree_root (gnat_raise_decls
, ARRAY_SIZE (gnat_raise_decls
));
694 /* This routine is called in tree.c to print an error message for invalid use
695 of an incomplete type. */
698 incomplete_type_error (dont_care_1
, dont_care_2
)
699 tree dont_care_1 ATTRIBUTE_UNUSED
;
700 tree dont_care_2 ATTRIBUTE_UNUSED
;
705 /* This function is called indirectly from toplev.c to handle incomplete
706 declarations, i.e. VAR_DECL nodes whose DECL_SIZE is zero. To be precise,
707 compile_file in toplev.c makes an indirect call through the function pointer
708 incomplete_decl_finalize_hook which is initialized to this routine in
709 init_decl_processing. */
712 finish_incomplete_decl (dont_care
)
713 tree dont_care ATTRIBUTE_UNUSED
;
718 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
719 nodes (FIELDLIST), finish constructing the record or union type.
720 If HAS_REP is nonzero, this record has a rep clause; don't call
721 layout_type but merely set the size and alignment ourselves.
722 If DEFER_DEBUG is nonzero, do not call the debugging routines
723 on this type; it will be done later. */
726 finish_record_type (record_type
, fieldlist
, has_rep
, defer_debug
)
732 enum tree_code code
= TREE_CODE (record_type
);
733 tree ada_size
= bitsize_zero_node
;
734 tree size
= bitsize_zero_node
;
735 tree size_unit
= size_zero_node
;
739 TYPE_FIELDS (record_type
) = fieldlist
;
741 if (TYPE_NAME (record_type
) != 0
742 && TREE_CODE (TYPE_NAME (record_type
)) == TYPE_DECL
)
743 TYPE_STUB_DECL (record_type
) = TYPE_NAME (record_type
);
745 TYPE_STUB_DECL (record_type
)
746 = pushdecl (build_decl (TYPE_DECL
, TYPE_NAME (record_type
),
749 /* We don't need both the typedef name and the record name output in
750 the debugging information, since they are the same. */
751 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type
)) = 1;
753 /* Globally initialize the record first. If this is a rep'ed record,
754 that just means some initializations; otherwise, layout the record. */
758 TYPE_ALIGN (record_type
) = MAX (BITS_PER_UNIT
, TYPE_ALIGN (record_type
));
759 TYPE_MODE (record_type
) = BLKmode
;
760 if (TYPE_SIZE (record_type
) == 0)
762 TYPE_SIZE (record_type
) = bitsize_zero_node
;
763 TYPE_SIZE_UNIT (record_type
) = size_zero_node
;
768 /* Ensure there isn't a size already set. There can be in an error
769 case where there is a rep clause but all fields have errors and
770 no longer have a position. */
771 TYPE_SIZE (record_type
) = 0;
772 layout_type (record_type
);
775 /* At this point, the position and size of each field is known. It was
776 either set before entry by a rep clause, or by laying out the type
777 above. We now make a pass through the fields (in reverse order for
778 QUAL_UNION_TYPEs) to compute the Ada size; the GCC size and alignment
779 (for rep'ed records that are not padding types); and the mode (for
782 if (code
== QUAL_UNION_TYPE
)
783 fieldlist
= nreverse (fieldlist
);
785 for (field
= fieldlist
; field
; field
= TREE_CHAIN (field
))
787 tree type
= TREE_TYPE (field
);
788 tree this_size
= DECL_SIZE (field
);
789 tree this_size_unit
= DECL_SIZE_UNIT (field
);
790 tree this_ada_size
= DECL_SIZE (field
);
792 /* We need to make an XVE/XVU record if any field has variable size,
793 whether or not the record does. For example, if we have an union,
794 it may be that all fields, rounded up to the alignment, have the
795 same size, in which case we'll use that size. But the debug
796 output routines (except Dwarf2) won't be able to output the fields,
797 so we need to make the special record. */
798 if (TREE_CODE (this_size
) != INTEGER_CST
)
801 if ((TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
802 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
803 && ! TYPE_IS_FAT_POINTER_P (type
)
804 && ! TYPE_CONTAINS_TEMPLATE_P (type
)
805 && TYPE_ADA_SIZE (type
) != 0)
806 this_ada_size
= TYPE_ADA_SIZE (type
);
808 if (has_rep
&& ! DECL_BIT_FIELD (field
))
809 TYPE_ALIGN (record_type
)
810 = MAX (TYPE_ALIGN (record_type
), DECL_ALIGN (field
));
815 ada_size
= size_binop (MAX_EXPR
, ada_size
, this_ada_size
);
816 size
= size_binop (MAX_EXPR
, size
, this_size
);
817 size_unit
= size_binop (MAX_EXPR
, size_unit
, this_size_unit
);
820 case QUAL_UNION_TYPE
:
822 = fold (build (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
823 this_ada_size
, ada_size
));
824 size
= fold (build (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
826 size_unit
= fold (build (COND_EXPR
, sizetype
, DECL_QUALIFIER (field
),
827 this_size_unit
, size_unit
));
831 /* Since we know here that all fields are sorted in order of
832 increasing bit position, the size of the record is one
833 higher than the ending bit of the last field processed
834 unless we have a rep clause, since in that case we might
835 have a field outside a QUAL_UNION_TYPE that has a higher ending
836 position. So use a MAX in that case. Also, if this field is a
837 QUAL_UNION_TYPE, we need to take into account the previous size in
838 the case of empty variants. */
840 = merge_sizes (ada_size
, bit_position (field
), this_ada_size
,
841 TREE_CODE (type
) == QUAL_UNION_TYPE
, has_rep
);
842 size
= merge_sizes (size
, bit_position (field
), this_size
,
843 TREE_CODE (type
) == QUAL_UNION_TYPE
, has_rep
);
845 = merge_sizes (size_unit
, byte_position (field
), this_size_unit
,
846 TREE_CODE (type
) == QUAL_UNION_TYPE
, has_rep
);
854 if (code
== QUAL_UNION_TYPE
)
855 nreverse (fieldlist
);
857 /* If this is a padding record, we never want to make the size smaller than
858 what was specified in it, if any. */
859 if (TREE_CODE (record_type
) == RECORD_TYPE
860 && TYPE_IS_PADDING_P (record_type
) && TYPE_SIZE (record_type
) != 0)
862 size
= TYPE_SIZE (record_type
);
863 size_unit
= TYPE_SIZE_UNIT (record_type
);
866 /* Now set any of the values we've just computed that apply. */
867 if (! TYPE_IS_FAT_POINTER_P (record_type
)
868 && ! TYPE_CONTAINS_TEMPLATE_P (record_type
))
869 TYPE_ADA_SIZE (record_type
) = ada_size
;
871 #ifdef ROUND_TYPE_SIZE
872 size
= ROUND_TYPE_SIZE (record_type
, size
, TYPE_ALIGN (record_type
));
873 size_unit
= ROUND_TYPE_SIZE_UNIT (record_size
, size_unit
,
874 TYPE_ALIGN (record_type
) / BITS_PER_UNIT
);
876 size
= round_up (size
, TYPE_ALIGN (record_type
));
877 size_unit
= round_up (size_unit
, TYPE_ALIGN (record_type
) / BITS_PER_UNIT
);
881 && ! (TREE_CODE (record_type
) == RECORD_TYPE
882 && TYPE_IS_PADDING_P (record_type
)
883 && TREE_CODE (size
) != INTEGER_CST
884 && contains_placeholder_p (size
)))
886 TYPE_SIZE (record_type
) = size
;
887 TYPE_SIZE_UNIT (record_type
) = size_unit
;
891 compute_record_mode (record_type
);
895 /* If this record is of variable size, rename it so that the
896 debugger knows it is and make a new, parallel, record
897 that tells the debugger how the record is laid out. See
902 = make_node (TREE_CODE (record_type
) == QUAL_UNION_TYPE
903 ? UNION_TYPE
: TREE_CODE (record_type
));
904 tree orig_id
= DECL_NAME (TYPE_STUB_DECL (record_type
));
906 = concat_id_with_name (orig_id
,
907 TREE_CODE (record_type
) == QUAL_UNION_TYPE
909 tree last_pos
= bitsize_zero_node
;
912 TYPE_NAME (new_record_type
) = new_id
;
913 TYPE_ALIGN (new_record_type
) = BIGGEST_ALIGNMENT
;
914 TYPE_STUB_DECL (new_record_type
)
915 = pushdecl (build_decl (TYPE_DECL
, new_id
, new_record_type
));
916 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type
)) = 1;
917 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type
))
918 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type
));
919 TYPE_SIZE (new_record_type
) = size_int (TYPE_ALIGN (record_type
));
921 /* Now scan all the fields, replacing each field with a new
922 field corresponding to the new encoding. */
923 for (old_field
= TYPE_FIELDS (record_type
); old_field
!= 0;
924 old_field
= TREE_CHAIN (old_field
))
926 tree field_type
= TREE_TYPE (old_field
);
927 tree field_name
= DECL_NAME (old_field
);
929 tree curpos
= bit_position (old_field
);
931 unsigned int align
= 0;
934 /* See how the position was modified from the last position.
936 There are two basic cases we support: a value was added
937 to the last position or the last position was rounded to
938 a boundary and they something was added. Check for the
939 first case first. If not, see if there is any evidence
940 of rounding. If so, round the last position and try
943 If this is a union, the position can be taken as zero. */
945 if (TREE_CODE (new_record_type
) == UNION_TYPE
)
946 pos
= bitsize_zero_node
, align
= 0;
948 pos
= compute_related_constant (curpos
, last_pos
);
950 if (pos
== 0 && TREE_CODE (curpos
) == MULT_EXPR
951 && TREE_CODE (TREE_OPERAND (curpos
, 1)) == INTEGER_CST
)
953 align
= TREE_INT_CST_LOW (TREE_OPERAND (curpos
, 1));
954 pos
= compute_related_constant (curpos
,
955 round_up (last_pos
, align
));
957 else if (pos
== 0 && TREE_CODE (curpos
) == PLUS_EXPR
958 && TREE_CODE (TREE_OPERAND (curpos
, 1)) == INTEGER_CST
959 && TREE_CODE (TREE_OPERAND (curpos
, 0)) == MULT_EXPR
960 && host_integerp (TREE_OPERAND
961 (TREE_OPERAND (curpos
, 0), 1),
966 (TREE_OPERAND (TREE_OPERAND (curpos
, 0), 1), 1);
967 pos
= compute_related_constant (curpos
,
968 round_up (last_pos
, align
));
971 /* If we can't compute a position, set it to zero.
973 ??? We really should abort here, but it's too much work
974 to get this correct for all cases. */
977 pos
= bitsize_zero_node
;
979 /* See if this type is variable-size and make a new type
980 and indicate the indirection if so. */
981 if (TREE_CODE (DECL_SIZE (old_field
)) != INTEGER_CST
)
983 field_type
= build_pointer_type (field_type
);
987 /* Make a new field name, if necessary. */
988 if (var
|| align
!= 0)
993 sprintf (suffix
, "XV%c%u", var
? 'L' : 'A',
994 align
/ BITS_PER_UNIT
);
996 strcpy (suffix
, "XVL");
998 field_name
= concat_id_with_name (field_name
, suffix
);
1001 new_field
= create_field_decl (field_name
, field_type
,
1003 DECL_SIZE (old_field
), pos
, 0);
1004 TREE_CHAIN (new_field
) = TYPE_FIELDS (new_record_type
);
1005 TYPE_FIELDS (new_record_type
) = new_field
;
1007 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1008 zero. The only time it's not the last field of the record
1009 is when there are other components at fixed positions after
1010 it (meaning there was a rep clause for every field) and we
1011 want to be able to encode them. */
1012 last_pos
= size_binop (PLUS_EXPR
, bit_position (old_field
),
1013 (TREE_CODE (TREE_TYPE (old_field
))
1016 : DECL_SIZE (old_field
));
1019 TYPE_FIELDS (new_record_type
)
1020 = nreverse (TYPE_FIELDS (new_record_type
));
1022 rest_of_type_compilation (new_record_type
, global_bindings_p ());
1025 rest_of_type_compilation (record_type
, global_bindings_p ());
1029 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1030 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1031 if this represents a QUAL_UNION_TYPE in which case we must look for
1032 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1033 is nonzero, we must take the MAX of the end position of this field
1034 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1036 We return an expression for the size. */
1039 merge_sizes (last_size
, first_bit
, size
, special
, has_rep
)
1041 tree first_bit
, size
;
1045 tree type
= TREE_TYPE (last_size
);
1047 if (! special
|| TREE_CODE (size
) != COND_EXPR
)
1049 tree
new = size_binop (PLUS_EXPR
, first_bit
, size
);
1052 new = size_binop (MAX_EXPR
, last_size
, new);
1057 return fold (build (COND_EXPR
, type
, TREE_OPERAND (size
, 0),
1058 integer_zerop (TREE_OPERAND (size
, 1))
1059 ? last_size
: merge_sizes (last_size
, first_bit
,
1060 TREE_OPERAND (size
, 1),
1062 integer_zerop (TREE_OPERAND (size
, 2))
1063 ? last_size
: merge_sizes (last_size
, first_bit
,
1064 TREE_OPERAND (size
, 2),
1068 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1069 related by the addition of a constant. Return that constant if so. */
1072 compute_related_constant (op0
, op1
)
1075 tree op0_var
, op1_var
;
1076 tree op0_con
= split_plus (op0
, &op0_var
);
1077 tree op1_con
= split_plus (op1
, &op1_var
);
1078 tree result
= size_binop (MINUS_EXPR
, op0_con
, op1_con
);
1080 if (operand_equal_p (op0_var
, op1_var
, 0))
1082 else if (operand_equal_p (op0
, size_binop (PLUS_EXPR
, op1_var
, result
), 0))
1088 /* Utility function of above to split a tree OP which may be a sum, into a
1089 constant part, which is returned, and a variable part, which is stored
1090 in *PVAR. *PVAR may be size_zero_node. All operations must be of
1094 split_plus (in
, pvar
)
1098 tree result
= bitsize_zero_node
;
1100 while (TREE_CODE (in
) == NON_LVALUE_EXPR
)
1101 in
= TREE_OPERAND (in
, 0);
1104 if (TREE_CODE (in
) == INTEGER_CST
)
1106 *pvar
= bitsize_zero_node
;
1109 else if (TREE_CODE (in
) == PLUS_EXPR
|| TREE_CODE (in
) == MINUS_EXPR
)
1111 tree lhs_var
, rhs_var
;
1112 tree lhs_con
= split_plus (TREE_OPERAND (in
, 0), &lhs_var
);
1113 tree rhs_con
= split_plus (TREE_OPERAND (in
, 1), &rhs_var
);
1115 result
= size_binop (PLUS_EXPR
, result
, lhs_con
);
1116 result
= size_binop (TREE_CODE (in
), result
, rhs_con
);
1118 if (lhs_var
== TREE_OPERAND (in
, 0)
1119 && rhs_var
== TREE_OPERAND (in
, 1))
1120 return bitsize_zero_node
;
1122 *pvar
= size_binop (TREE_CODE (in
), lhs_var
, rhs_var
);
1126 return bitsize_zero_node
;
1129 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1130 subprogram. If it is void_type_node, then we are dealing with a procedure,
1131 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1132 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1133 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1134 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1135 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1136 RETURNS_WITH_DSP is nonzero if the function is to return with a
1137 depressed stack pointer. */
1140 create_subprog_type (return_type
, param_decl_list
, cico_list
,
1141 returns_unconstrained
, returns_by_ref
, returns_with_dsp
)
1143 tree param_decl_list
;
1145 int returns_unconstrained
, returns_by_ref
, returns_with_dsp
;
1147 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1148 the subprogram formal parameters. This list is generated by traversing the
1149 input list of PARM_DECL nodes. */
1150 tree param_type_list
= NULL
;
1154 for (param_decl
= param_decl_list
; param_decl
;
1155 param_decl
= TREE_CHAIN (param_decl
))
1156 param_type_list
= tree_cons (NULL_TREE
, TREE_TYPE (param_decl
),
1159 /* The list of the function parameter types has to be terminated by the void
1160 type to signal to the back-end that we are not dealing with a variable
1161 parameter subprogram, but that the subprogram has a fixed number of
1163 param_type_list
= tree_cons (NULL_TREE
, void_type_node
, param_type_list
);
1165 /* The list of argument types has been created in reverse
1167 param_type_list
= nreverse (param_type_list
);
1169 type
= build_function_type (return_type
, param_type_list
);
1171 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1172 or the new type should, make a copy of TYPE. Likewise for
1173 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1174 if (TYPE_CI_CO_LIST (type
) != 0 || cico_list
!= 0
1175 || TYPE_RETURNS_UNCONSTRAINED_P (type
) != returns_unconstrained
1176 || TYPE_RETURNS_BY_REF_P (type
) != returns_by_ref
)
1177 type
= copy_type (type
);
1179 TYPE_CI_CO_LIST (type
) = cico_list
;
1180 TYPE_RETURNS_UNCONSTRAINED_P (type
) = returns_unconstrained
;
1181 TYPE_RETURNS_STACK_DEPRESSED (type
) = returns_with_dsp
;
1182 TYPE_RETURNS_BY_REF_P (type
) = returns_by_ref
;
1186 /* Return a copy of TYPE but safe to modify in any way. */
1192 tree
new = copy_node (type
);
1194 /* copy_node clears this field instead of copying it, because it is
1195 aliased with TREE_CHAIN. */
1196 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type
);
1198 TYPE_POINTER_TO (new) = 0;
1199 TYPE_REFERENCE_TO (new) = 0;
1200 TYPE_MAIN_VARIANT (new) = new;
1201 TYPE_NEXT_VARIANT (new) = 0;
1206 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1207 TYPE_INDEX_TYPE is INDEX. */
1210 create_index_type (min
, max
, index
)
1214 /* First build a type for the desired range. */
1215 tree type
= build_index_2_type (min
, max
);
1217 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1218 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1219 is set, but not to INDEX, make a copy of this type with the requested
1220 index type. Note that we have no way of sharing these types, but that's
1221 only a small hole. */
1222 if (TYPE_INDEX_TYPE (type
) == index
)
1224 else if (TYPE_INDEX_TYPE (type
) != 0)
1225 type
= copy_type (type
);
1227 TYPE_INDEX_TYPE (type
) = index
;
1231 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1232 string) and TYPE is a ..._TYPE node giving its data type.
1233 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1234 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1235 information about this type. */
1238 create_type_decl (type_name
, type
, attr_list
, artificial_p
, debug_info_p
)
1241 struct attrib
*attr_list
;
1245 tree type_decl
= build_decl (TYPE_DECL
, type_name
, type
);
1246 enum tree_code code
= TREE_CODE (type
);
1248 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
1249 pushdecl (type_decl
);
1250 process_attributes (type_decl
, attr_list
);
1252 /* Pass type declaration information to the debugger unless this is an
1253 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1254 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1255 a dummy type, which will be completed later, or a type for which
1256 debugging information was not requested. */
1257 if (code
== UNCONSTRAINED_ARRAY_TYPE
|| TYPE_IS_DUMMY_P (type
)
1259 DECL_IGNORED_P (type_decl
) = 1;
1260 else if (code
!= ENUMERAL_TYPE
&& code
!= RECORD_TYPE
1261 && ! ((code
== POINTER_TYPE
|| code
== REFERENCE_TYPE
)
1262 && TYPE_IS_DUMMY_P (TREE_TYPE (type
))))
1263 rest_of_decl_compilation (type_decl
, NULL
, global_bindings_p (), 0);
1268 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1269 ASM_NAME is its assembler name (if provided). TYPE is its data type
1270 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1271 expression; NULL_TREE if none.
1273 CONST_FLAG is nonzero if this variable is constant.
1275 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1276 the current compilation unit. This flag should be set when processing the
1277 variable definitions in a package specification. EXTERN_FLAG is nonzero
1278 when processing an external variable declaration (as opposed to a
1279 definition: no storage is to be allocated for the variable here).
1281 STATIC_FLAG is only relevant when not at top level. In that case
1282 it indicates whether to always allocate storage to the variable. */
1285 create_var_decl (var_name
, asm_name
, type
, var_init
, const_flag
, public_flag
,
1286 extern_flag
, static_flag
, attr_list
)
1295 struct attrib
*attr_list
;
1300 : (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init
))
1301 && (global_bindings_p () || static_flag
1302 ? 0 != initializer_constant_valid_p (var_init
,
1303 TREE_TYPE (var_init
))
1304 : TREE_CONSTANT (var_init
))));
1306 = build_decl ((const_flag
&& init_const
1307 /* Only make a CONST_DECL for sufficiently-small objects.
1308 We consider complex double "sufficiently-small" */
1309 && TYPE_SIZE (type
) != 0
1310 && host_integerp (TYPE_SIZE_UNIT (type
), 1)
1311 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type
),
1312 GET_MODE_SIZE (DCmode
)))
1313 ? CONST_DECL
: VAR_DECL
, var_name
, type
);
1314 tree assign_init
= 0;
1316 /* If this is external, throw away any initializations unless this is a
1317 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1318 we are defining a global here, leave a constant initialization and save
1319 any variable elaborations for the elaboration routine. Otherwise, if
1320 the initializing expression is not the same as TYPE, generate the
1321 initialization with an assignment statement, since it knows how
1322 to do the required adjustents. If we are just annotating types,
1323 throw away the initialization if it isn't a constant. */
1325 if ((extern_flag
&& TREE_CODE (var_decl
) != CONST_DECL
)
1326 || (type_annotate_only
&& var_init
!= 0 && ! TREE_CONSTANT (var_init
)))
1329 if (global_bindings_p () && var_init
!= 0 && ! init_const
)
1331 add_pending_elaborations (var_decl
, var_init
);
1335 else if (var_init
!= 0
1336 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init
))
1337 != TYPE_MAIN_VARIANT (type
))
1338 || (static_flag
&& ! init_const
)))
1339 assign_init
= var_init
, var_init
= 0;
1341 DECL_COMMON (var_decl
) = !flag_no_common
;
1342 DECL_INITIAL (var_decl
) = var_init
;
1343 TREE_READONLY (var_decl
) = const_flag
;
1344 DECL_EXTERNAL (var_decl
) = extern_flag
;
1345 TREE_PUBLIC (var_decl
) = public_flag
|| extern_flag
;
1346 TREE_CONSTANT (var_decl
) = TREE_CODE (var_decl
) == CONST_DECL
;
1347 TREE_THIS_VOLATILE (var_decl
) = TREE_SIDE_EFFECTS (var_decl
)
1348 = TYPE_VOLATILE (type
);
1350 /* At the global binding level we need to allocate static storage for the
1351 variable if and only if its not external. If we are not at the top level
1352 we allocate automatic storage unless requested not to. */
1353 TREE_STATIC (var_decl
) = global_bindings_p () ? !extern_flag
: static_flag
;
1356 SET_DECL_ASSEMBLER_NAME (var_decl
, asm_name
);
1358 process_attributes (var_decl
, attr_list
);
1360 /* Add this decl to the current binding level and generate any
1361 needed code and RTL. */
1362 var_decl
= pushdecl (var_decl
);
1363 expand_decl (var_decl
);
1365 if (DECL_CONTEXT (var_decl
) != 0)
1366 expand_decl_init (var_decl
);
1368 /* If this is volatile, force it into memory. */
1369 if (TREE_SIDE_EFFECTS (var_decl
))
1370 mark_addressable (var_decl
);
1372 if (TREE_CODE (var_decl
) != CONST_DECL
)
1373 rest_of_decl_compilation (var_decl
, 0, global_bindings_p (), 0);
1375 if (assign_init
!= 0)
1377 /* If VAR_DECL has a padded type, convert it to the unpadded
1378 type so the assignment is done properly. */
1379 tree lhs
= var_decl
;
1381 if (TREE_CODE (TREE_TYPE (lhs
)) == RECORD_TYPE
1382 && TYPE_IS_PADDING_P (TREE_TYPE (lhs
)))
1383 lhs
= convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs
))), lhs
);
1385 expand_expr_stmt (build_binary_op (MODIFY_EXPR
, NULL_TREE
, lhs
,
1392 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1393 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1394 this field is in a record type with a "pragma pack". If SIZE is nonzero
1395 it is the specified size for this field. If POS is nonzero, it is the bit
1396 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1397 the address of this field for aliasing purposes. */
1400 create_field_decl (field_name
, field_type
, record_type
, packed
, size
, pos
,
1409 tree field_decl
= build_decl (FIELD_DECL
, field_name
, field_type
);
1411 DECL_CONTEXT (field_decl
) = record_type
;
1412 TREE_READONLY (field_decl
) = TREE_READONLY (field_type
);
1414 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1415 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1416 If it is a padding type where the inner field is of variable size, it
1417 must be at its natural alignment. Just handle the packed case here; we
1418 will disallow non-aligned rep clauses elsewhere. */
1419 if (packed
&& TYPE_MODE (field_type
) == BLKmode
)
1420 DECL_ALIGN (field_decl
)
1421 = ((TREE_CODE (field_type
) == RECORD_TYPE
1422 && TYPE_IS_PADDING_P (field_type
)
1423 && ! TREE_CONSTANT (DECL_SIZE (TYPE_FIELDS (field_type
))))
1424 ? TYPE_ALIGN (field_type
) : BITS_PER_UNIT
);
1426 /* If a size is specified, use it. Otherwise, see if we have a size
1427 to use that may differ from the natural size of the object. */
1429 size
= convert (bitsizetype
, size
);
1432 if (packed
== 1 && ! operand_equal_p (rm_size (field_type
),
1433 TYPE_SIZE (field_type
), 0))
1434 size
= rm_size (field_type
);
1436 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1438 if (size
!= 0 && TREE_CODE (size
) == INTEGER_CST
1439 && compare_tree_int (size
, MAX_FIXED_MODE_SIZE
) > 0)
1440 size
= round_up (size
, BITS_PER_UNIT
);
1443 /* Make a bitfield if a size is specified for two reasons: first if the size
1444 differs from the natural size. Second, if the alignment is insufficient.
1445 There are a number of ways the latter can be true. But never make a
1446 bitfield if the type of the field has a nonconstant size. */
1448 if (size
!= 0 && TREE_CODE (size
) == INTEGER_CST
1449 && TREE_CODE (TYPE_SIZE (field_type
)) == INTEGER_CST
1450 && (! operand_equal_p (TYPE_SIZE (field_type
), size
, 0)
1452 && ! value_zerop (size_binop (TRUNC_MOD_EXPR
, pos
,
1453 bitsize_int (TYPE_ALIGN
1456 || (TYPE_ALIGN (record_type
) != 0
1457 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))))
1459 DECL_BIT_FIELD (field_decl
) = 1;
1460 DECL_SIZE (field_decl
) = size
;
1461 if (! packed
&& pos
== 0)
1462 DECL_ALIGN (field_decl
)
1463 = (TYPE_ALIGN (record_type
) != 0
1464 ? MIN (TYPE_ALIGN (record_type
), TYPE_ALIGN (field_type
))
1465 : TYPE_ALIGN (field_type
));
1468 DECL_PACKED (field_decl
) = pos
!= 0 ? DECL_BIT_FIELD (field_decl
) : packed
;
1469 DECL_ALIGN (field_decl
)
1470 = MAX (DECL_ALIGN (field_decl
),
1471 DECL_BIT_FIELD (field_decl
) ? 1
1472 : packed
&& TYPE_MODE (field_type
) != BLKmode
? BITS_PER_UNIT
1473 : TYPE_ALIGN (field_type
));
1477 /* We need to pass in the alignment the DECL is known to have.
1478 This is the lowest-order bit set in POS, but no more than
1479 the alignment of the record, if one is specified. Note
1480 that an alignment of 0 is taken as infinite. */
1481 unsigned int known_align
;
1483 if (host_integerp (pos
, 1))
1484 known_align
= tree_low_cst (pos
, 1) & - tree_low_cst (pos
, 1);
1486 known_align
= BITS_PER_UNIT
;
1488 if (TYPE_ALIGN (record_type
)
1489 && (known_align
== 0 || known_align
> TYPE_ALIGN (record_type
)))
1490 known_align
= TYPE_ALIGN (record_type
);
1492 layout_decl (field_decl
, known_align
);
1493 SET_DECL_OFFSET_ALIGN (field_decl
,
1494 host_integerp (pos
, 1) ? BIGGEST_ALIGNMENT
1496 pos_from_bit (&DECL_FIELD_OFFSET (field_decl
),
1497 &DECL_FIELD_BIT_OFFSET (field_decl
),
1498 DECL_OFFSET_ALIGN (field_decl
), pos
);
1500 DECL_HAS_REP_P (field_decl
) = 1;
1503 /* If the field type is passed by reference, we will have pointers to the
1504 field, so it is addressable. */
1505 if (must_pass_by_ref (field_type
) || default_pass_by_ref (field_type
))
1508 /* Mark the decl as nonaddressable if it either is indicated so semantically
1509 or if it is a bit field. */
1510 DECL_NONADDRESSABLE_P (field_decl
)
1511 = ! addressable
|| DECL_BIT_FIELD (field_decl
);
1516 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1517 effects, has the value of zero. */
1523 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
1524 return value_zerop (TREE_OPERAND (exp
, 1));
1526 return integer_zerop (exp
);
1529 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1530 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1531 readonly (either an IN parameter or an address of a pass-by-ref
1535 create_param_decl (param_name
, param_type
, readonly
)
1540 tree param_decl
= build_decl (PARM_DECL
, param_name
, param_type
);
1542 DECL_ARG_TYPE (param_decl
) = param_type
;
1543 DECL_ARG_TYPE_AS_WRITTEN (param_decl
) = param_type
;
1544 TREE_READONLY (param_decl
) = readonly
;
1548 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1551 process_attributes (decl
, attr_list
)
1553 struct attrib
*attr_list
;
1555 for (; attr_list
; attr_list
= attr_list
->next
)
1556 switch (attr_list
->type
)
1558 case ATTR_MACHINE_ATTRIBUTE
:
1559 decl_attributes (&decl
, tree_cons (attr_list
->name
, attr_list
->arg
,
1561 ATTR_FLAG_TYPE_IN_PLACE
);
1564 case ATTR_LINK_ALIAS
:
1565 TREE_STATIC (decl
) = 1;
1566 assemble_alias (decl
, attr_list
->name
);
1569 case ATTR_WEAK_EXTERNAL
:
1571 declare_weak (decl
);
1573 post_error ("?weak declarations not supported on this target",
1574 attr_list
->error_point
);
1577 case ATTR_LINK_SECTION
:
1578 #ifdef ASM_OUTPUT_SECTION_NAME
1579 DECL_SECTION_NAME (decl
)
1580 = build_string (IDENTIFIER_LENGTH (attr_list
->name
),
1581 IDENTIFIER_POINTER (attr_list
->name
));
1582 DECL_COMMON (decl
) = 0;
1584 post_error ("?section attributes are not supported for this target",
1585 attr_list
->error_point
);
1591 /* Add some pending elaborations on the list. */
1594 add_pending_elaborations (var_decl
, var_init
)
1599 Check_Elaboration_Code_Allowed (error_gnat_node
);
1601 pending_elaborations
1602 = chainon (pending_elaborations
, build_tree_list (var_decl
, var_init
));
1605 /* Obtain any pending elaborations and clear the old list. */
1608 get_pending_elaborations ()
1610 /* Each thing added to the list went on the end; we want it on the
1612 tree result
= TREE_CHAIN (pending_elaborations
);
1614 TREE_CHAIN (pending_elaborations
) = 0;
1618 /* Mark the binding level stack. */
1621 mark_binding_level (arg
)
1624 struct binding_level
*level
= *(struct binding_level
**) arg
;
1626 for (; level
!= 0; level
= level
->level_chain
)
1628 ggc_mark_tree (level
->names
);
1629 ggc_mark_tree (level
->blocks
);
1630 ggc_mark_tree (level
->this_block
);
1634 /* Mark the pending elaboration list. */
1640 struct e_stack
*p
= *((struct e_stack
**) data
);
1644 ggc_mark_tree (p
->elab_list
);
1645 mark_e_stack (&p
->next
);
1649 /* Return nonzero if there are pending elaborations. */
1652 pending_elaborations_p ()
1654 return TREE_CHAIN (pending_elaborations
) != 0;
1657 /* Save a copy of the current pending elaboration list and make a new
1661 push_pending_elaborations ()
1663 struct e_stack
*p
= (struct e_stack
*) xmalloc (sizeof (struct e_stack
));
1665 p
->next
= elist_stack
;
1666 p
->elab_list
= pending_elaborations
;
1668 pending_elaborations
= build_tree_list (NULL_TREE
, NULL_TREE
);
1671 /* Pop the stack of pending elaborations. */
1674 pop_pending_elaborations ()
1676 struct e_stack
*p
= elist_stack
;
1678 pending_elaborations
= p
->elab_list
;
1679 elist_stack
= p
->next
;
1683 /* Return the current position in pending_elaborations so we can insert
1684 elaborations after that point. */
1687 get_elaboration_location ()
1689 return tree_last (pending_elaborations
);
1692 /* Insert the current elaborations after ELAB, which is in some elaboration
1696 insert_elaboration_list (elab
)
1699 tree next
= TREE_CHAIN (elab
);
1701 if (TREE_CHAIN (pending_elaborations
))
1703 TREE_CHAIN (elab
) = TREE_CHAIN (pending_elaborations
);
1704 TREE_CHAIN (tree_last (pending_elaborations
)) = next
;
1705 TREE_CHAIN (pending_elaborations
) = 0;
1709 /* Returns a LABEL_DECL node for LABEL_NAME. */
1712 create_label_decl (label_name
)
1715 tree label_decl
= build_decl (LABEL_DECL
, label_name
, void_type_node
);
1717 DECL_CONTEXT (label_decl
) = current_function_decl
;
1718 DECL_MODE (label_decl
) = VOIDmode
;
1719 DECL_SOURCE_LINE (label_decl
) = lineno
;
1720 DECL_SOURCE_FILE (label_decl
) = input_filename
;
1725 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1726 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1727 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1728 PARM_DECL nodes chained through the TREE_CHAIN field).
1730 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1731 appropriate fields in the FUNCTION_DECL. */
1734 create_subprog_decl (subprog_name
, asm_name
, subprog_type
, param_decl_list
,
1735 inline_flag
, public_flag
, extern_flag
, attr_list
)
1739 tree param_decl_list
;
1743 struct attrib
*attr_list
;
1745 tree return_type
= TREE_TYPE (subprog_type
);
1746 tree subprog_decl
= build_decl (FUNCTION_DECL
, subprog_name
, subprog_type
);
1748 /* If this is a function nested inside an inlined external function, it
1749 means we aren't going to compile the outer function unless it is
1750 actually inlined, so do the same for us. */
1751 if (current_function_decl
!= 0 && DECL_INLINE (current_function_decl
)
1752 && DECL_EXTERNAL (current_function_decl
))
1755 DECL_EXTERNAL (subprog_decl
) = extern_flag
;
1756 TREE_PUBLIC (subprog_decl
) = public_flag
;
1757 DECL_INLINE (subprog_decl
) = inline_flag
;
1758 TREE_READONLY (subprog_decl
) = TYPE_READONLY (subprog_type
);
1759 TREE_THIS_VOLATILE (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
1760 TREE_SIDE_EFFECTS (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
1761 DECL_ARGUMENTS (subprog_decl
) = param_decl_list
;
1762 DECL_RESULT (subprog_decl
) = build_decl (RESULT_DECL
, 0, return_type
);
1765 DECL_ASSEMBLER_NAME (subprog_decl
) = asm_name
;
1767 process_attributes (subprog_decl
, attr_list
);
1769 /* Add this decl to the current binding level. */
1770 subprog_decl
= pushdecl (subprog_decl
);
1772 /* Output the assembler code and/or RTL for the declaration. */
1773 rest_of_decl_compilation (subprog_decl
, 0, global_bindings_p (), 0);
1775 return subprog_decl
;
1778 /* Count how deep we are into nested functions. This is because
1779 we shouldn't call the backend function context routines unless we
1780 are in a nested function. */
1782 static int function_nesting_depth
;
1784 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1785 body. This routine needs to be invoked before processing the declarations
1786 appearing in the subprogram. */
1789 begin_subprog_body (subprog_decl
)
1792 tree param_decl_list
;
1796 if (function_nesting_depth
++ != 0)
1797 push_function_context ();
1799 announce_function (subprog_decl
);
1801 /* Make this field nonzero so further routines know that this is not
1802 tentative. error_mark_node is replaced below (in poplevel) with the
1804 DECL_INITIAL (subprog_decl
) = error_mark_node
;
1806 /* This function exists in static storage. This does not mean `static' in
1808 TREE_STATIC (subprog_decl
) = 1;
1810 /* Enter a new binding level. */
1811 current_function_decl
= subprog_decl
;
1814 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1815 subprogram body) so that they can be recognized as local variables in the
1818 The list of PARM_DECL nodes is stored in the right order in
1819 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1820 which they are transmitted to `pushdecl' we need to reverse the list of
1821 PARM_DECLs if we want it to be stored in the right order. The reason why
1822 we want to make sure the PARM_DECLs are stored in the correct order is
1823 that this list will be retrieved in a few lines with a call to `getdecl'
1824 to store it back into the DECL_ARGUMENTS field. */
1825 param_decl_list
= nreverse (DECL_ARGUMENTS (subprog_decl
));
1827 for (param_decl
= param_decl_list
; param_decl
; param_decl
= next_param
)
1829 next_param
= TREE_CHAIN (param_decl
);
1830 TREE_CHAIN (param_decl
) = NULL
;
1831 pushdecl (param_decl
);
1834 /* Store back the PARM_DECL nodes. They appear in the right order. */
1835 DECL_ARGUMENTS (subprog_decl
) = getdecls ();
1837 init_function_start (subprog_decl
, input_filename
, lineno
);
1838 expand_function_start (subprog_decl
, 0);
1840 /* If this function is `main', emit a call to `__main'
1841 to run global initializers, etc. */
1842 if (DECL_ASSEMBLER_NAME (subprog_decl
) != 0
1843 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl
))
1844 && DECL_CONTEXT (subprog_decl
) == NULL_TREE
)
1845 expand_main_function ();
1848 /* Finish the definition of the current subprogram and compile it all the way
1849 to assembler language output. */
1858 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl
))
1859 = current_function_decl
;
1861 /* Mark the RESULT_DECL as being in this subprogram. */
1862 DECL_CONTEXT (DECL_RESULT (current_function_decl
)) = current_function_decl
;
1864 expand_function_end (input_filename
, lineno
, 0);
1866 /* If this is a nested function, push a new GC context. That will keep
1867 local variables on the stack from being collected while we're doing
1868 the compilation of this function. */
1869 if (function_nesting_depth
> 1)
1870 ggc_push_context ();
1872 rest_of_compilation (current_function_decl
);
1874 if (function_nesting_depth
> 1)
1878 /* If we're sure this function is defined in this file then mark it
1880 if (TREE_ASM_WRITTEN (current_function_decl
))
1881 mark_fn_defined_in_this_file (current_function_decl
);
1884 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1885 not be seen when we call this function and will be in
1886 unallocated memory anyway. */
1887 for (cico_list
= TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl
));
1888 cico_list
!= 0; cico_list
= TREE_CHAIN (cico_list
))
1889 TREE_VALUE (cico_list
) = 0;
1891 if (DECL_SAVED_INSNS (current_function_decl
) == 0)
1893 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1894 was saved for inline, in which case the DECL_RTLs are in
1895 preserved memory. */
1896 for (decl
= DECL_ARGUMENTS (current_function_decl
);
1897 decl
!= 0; decl
= TREE_CHAIN (decl
))
1899 SET_DECL_RTL (decl
, 0);
1900 DECL_INCOMING_RTL (decl
) = 0;
1903 /* Similarly, discard DECL_RTL of the return value. */
1904 SET_DECL_RTL (DECL_RESULT (current_function_decl
), 0);
1906 /* But DECL_INITIAL must remain nonzero so we know this
1907 was an actual function definition unless toplev.c decided not
1909 if (DECL_INITIAL (current_function_decl
) != 0)
1910 DECL_INITIAL (current_function_decl
) = error_mark_node
;
1912 DECL_ARGUMENTS (current_function_decl
) = 0;
1915 /* If we are not at the bottom of the function nesting stack, pop up to
1916 the containing function. Otherwise show we aren't in any function. */
1917 if (--function_nesting_depth
!= 0)
1918 pop_function_context ();
1920 current_function_decl
= 0;
1923 /* Return a definition for a builtin function named NAME and whose data type
1924 is TYPE. TYPE should be a function type with argument types.
1925 FUNCTION_CODE tells later passes how to compile calls to this function.
1926 See tree.h for its possible values.
1928 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1929 the name to be called if we can't opencode the function. */
1932 builtin_function (name
, type
, function_code
, class, library_name
)
1936 enum built_in_class
class;
1937 const char *library_name
;
1939 tree decl
= build_decl (FUNCTION_DECL
, get_identifier (name
), type
);
1941 DECL_EXTERNAL (decl
) = 1;
1942 TREE_PUBLIC (decl
) = 1;
1944 DECL_ASSEMBLER_NAME (decl
) = get_identifier (library_name
);
1947 DECL_BUILT_IN_CLASS (decl
) = class;
1948 DECL_FUNCTION_CODE (decl
) = function_code
;
1952 /* Return an integer type with the number of bits of precision given by
1953 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1954 it is a signed type. */
1957 type_for_size (precision
, unsignedp
)
1964 if (precision
<= 2 * MAX_BITS_PER_WORD
1965 && signed_and_unsigned_types
[precision
][unsignedp
] != 0)
1966 return signed_and_unsigned_types
[precision
][unsignedp
];
1969 t
= make_unsigned_type (precision
);
1971 t
= make_signed_type (precision
);
1973 if (precision
<= 2 * MAX_BITS_PER_WORD
)
1974 signed_and_unsigned_types
[precision
][unsignedp
] = t
;
1976 if (TYPE_NAME (t
) == 0)
1978 sprintf (type_name
, "%sSIGNED_%d", unsignedp
? "UN" : "", precision
);
1979 TYPE_NAME (t
) = get_identifier (type_name
);
1985 /* Likewise for floating-point types. */
1988 float_type_for_size (precision
, mode
)
1990 enum machine_mode mode
;
1995 if (float_types
[(int) mode
] != 0)
1996 return float_types
[(int) mode
];
1998 float_types
[(int) mode
] = t
= make_node (REAL_TYPE
);
1999 TYPE_PRECISION (t
) = precision
;
2002 if (TYPE_MODE (t
) != mode
)
2005 if (TYPE_NAME (t
) == 0)
2007 sprintf (type_name
, "FLOAT_%d", precision
);
2008 TYPE_NAME (t
) = get_identifier (type_name
);
2014 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2015 an unsigned type; otherwise a signed type is returned. */
2018 type_for_mode (mode
, unsignedp
)
2019 enum machine_mode mode
;
2022 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
2023 return float_type_for_size (GET_MODE_BITSIZE (mode
), mode
);
2025 return type_for_size (GET_MODE_BITSIZE (mode
), unsignedp
);
2028 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2031 unsigned_type (type_node
)
2034 tree type
= type_for_size (TYPE_PRECISION (type_node
), 1);
2036 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2038 type
= copy_node (type
);
2039 TREE_TYPE (type
) = type_node
;
2041 else if (TREE_TYPE (type_node
) != 0
2042 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2043 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2045 type
= copy_node (type
);
2046 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2052 /* Return the signed version of a TYPE_NODE, a scalar type. */
2055 signed_type (type_node
)
2058 tree type
= type_for_size (TYPE_PRECISION (type_node
), 0);
2060 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2062 type
= copy_node (type
);
2063 TREE_TYPE (type
) = type_node
;
2065 else if (TREE_TYPE (type_node
) != 0
2066 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2067 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2069 type
= copy_node (type
);
2070 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2076 /* Return a type the same as TYPE except unsigned or signed according to
2080 signed_or_unsigned_type (unsignedp
, type
)
2084 if (! INTEGRAL_TYPE_P (type
) || TREE_UNSIGNED (type
) == unsignedp
)
2087 return type_for_size (TYPE_PRECISION (type
), unsignedp
);
2090 /* EXP is an expression for the size of an object. If this size contains
2091 discriminant references, replace them with the maximum (if MAX_P) or
2092 minimum (if ! MAX_P) possible value of the discriminant. */
2095 max_size (exp
, max_p
)
2099 enum tree_code code
= TREE_CODE (exp
);
2100 tree type
= TREE_TYPE (exp
);
2102 switch (TREE_CODE_CLASS (code
))
2109 if (code
== TREE_LIST
)
2110 return tree_cons (TREE_PURPOSE (exp
),
2111 max_size (TREE_VALUE (exp
), max_p
),
2112 TREE_CHAIN (exp
) != 0
2113 ? max_size (TREE_CHAIN (exp
), max_p
) : 0);
2117 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2118 modify. Otherwise, we abort since it is something we can't
2120 if (! contains_placeholder_p (exp
))
2123 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
2125 max_size (max_p
? TYPE_MAX_VALUE (type
) : TYPE_MIN_VALUE (type
), 1);
2128 return max_p
? size_one_node
: size_zero_node
;
2133 switch (TREE_CODE_LENGTH (code
))
2136 if (code
== NON_LVALUE_EXPR
)
2137 return max_size (TREE_OPERAND (exp
, 0), max_p
);
2140 fold (build1 (code
, type
,
2141 max_size (TREE_OPERAND (exp
, 0),
2142 code
== NEGATE_EXPR
? ! max_p
: max_p
)));
2145 if (code
== RTL_EXPR
)
2147 else if (code
== COMPOUND_EXPR
)
2148 return max_size (TREE_OPERAND (exp
, 1), max_p
);
2149 else if (code
== WITH_RECORD_EXPR
)
2153 tree lhs
= max_size (TREE_OPERAND (exp
, 0), max_p
);
2154 tree rhs
= max_size (TREE_OPERAND (exp
, 1),
2155 code
== MINUS_EXPR
? ! max_p
: max_p
);
2157 /* Special-case wanting the maximum value of a MIN_EXPR.
2158 In that case, if one side overflows, return the other.
2159 sizetype is signed, but we know sizes are non-negative.
2160 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2161 overflowing or the maximum possible value and the RHS
2163 if (max_p
&& code
== MIN_EXPR
&& TREE_OVERFLOW (rhs
))
2165 else if (max_p
&& code
== MIN_EXPR
&& TREE_OVERFLOW (lhs
))
2167 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
2168 && (TREE_OVERFLOW (lhs
)
2169 || operand_equal_p (lhs
, TYPE_MAX_VALUE (type
), 0))
2170 && ! TREE_CONSTANT (rhs
))
2173 return fold (build (code
, type
, lhs
, rhs
));
2177 if (code
== SAVE_EXPR
)
2179 else if (code
== COND_EXPR
)
2180 return fold (build (MAX_EXPR
, type
,
2181 max_size (TREE_OPERAND (exp
, 1), max_p
),
2182 max_size (TREE_OPERAND (exp
, 2), max_p
)));
2183 else if (code
== CALL_EXPR
&& TREE_OPERAND (exp
, 1) != 0)
2184 return build (CALL_EXPR
, type
, TREE_OPERAND (exp
, 0),
2185 max_size (TREE_OPERAND (exp
, 1), max_p
));
2192 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2193 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2194 Return a constructor for the template. */
2197 build_template (template_type
, array_type
, expr
)
2202 tree template_elts
= NULL_TREE
;
2203 tree bound_list
= NULL_TREE
;
2206 if (TREE_CODE (array_type
) == RECORD_TYPE
2207 && (TYPE_IS_PADDING_P (array_type
)
2208 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type
)))
2209 array_type
= TREE_TYPE (TYPE_FIELDS (array_type
));
2211 if (TREE_CODE (array_type
) == ARRAY_TYPE
2212 || (TREE_CODE (array_type
) == INTEGER_TYPE
2213 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type
)))
2214 bound_list
= TYPE_ACTUAL_BOUNDS (array_type
);
2216 /* First make the list for a CONSTRUCTOR for the template. Go down the
2217 field list of the template instead of the type chain because this
2218 array might be an Ada array of arrays and we can't tell where the
2219 nested arrays stop being the underlying object. */
2221 for (field
= TYPE_FIELDS (template_type
); field
;
2223 ? (bound_list
= TREE_CHAIN (bound_list
))
2224 : (array_type
= TREE_TYPE (array_type
))),
2225 field
= TREE_CHAIN (TREE_CHAIN (field
)))
2227 tree bounds
, min
, max
;
2229 /* If we have a bound list, get the bounds from there. Likewise
2230 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2231 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2232 This will give us a maximum range. */
2233 if (bound_list
!= 0)
2234 bounds
= TREE_VALUE (bound_list
);
2235 else if (TREE_CODE (array_type
) == ARRAY_TYPE
)
2236 bounds
= TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type
));
2237 else if (expr
!= 0 && TREE_CODE (expr
) == PARM_DECL
2238 && DECL_BY_COMPONENT_PTR_P (expr
))
2239 bounds
= TREE_TYPE (field
);
2243 min
= convert (TREE_TYPE (TREE_CHAIN (field
)), TYPE_MIN_VALUE (bounds
));
2244 max
= convert (TREE_TYPE (field
), TYPE_MAX_VALUE (bounds
));
2246 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2247 surround them with a WITH_RECORD_EXPR giving EXPR as the
2249 if (! TREE_CONSTANT (min
) && contains_placeholder_p (min
))
2250 min
= build (WITH_RECORD_EXPR
, TREE_TYPE (min
), min
, expr
);
2251 if (! TREE_CONSTANT (max
) && contains_placeholder_p (max
))
2252 max
= build (WITH_RECORD_EXPR
, TREE_TYPE (max
), max
, expr
);
2254 template_elts
= tree_cons (TREE_CHAIN (field
), max
,
2255 tree_cons (field
, min
, template_elts
));
2258 return build_constructor (template_type
, nreverse (template_elts
));
2261 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2262 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2263 in the type contains in its DECL_INITIAL the expression to use when
2264 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2265 to print out an error message if the mechanism cannot be applied to
2266 an object of that type and also for the name. */
2269 build_vms_descriptor (type
, mech
, gnat_entity
)
2271 Mechanism_Type mech
;
2272 Entity_Id gnat_entity
;
2274 tree record_type
= make_node (RECORD_TYPE
);
2275 tree field_list
= 0;
2284 /* If TYPE is an unconstrained array, use the underlying array type. */
2285 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
2286 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
2288 /* If this is an array, compute the number of dimensions in the array,
2289 get the index types, and point to the inner type. */
2290 if (TREE_CODE (type
) != ARRAY_TYPE
)
2293 for (ndim
= 1, inner_type
= type
;
2294 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
2295 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
2296 ndim
++, inner_type
= TREE_TYPE (inner_type
))
2299 idx_arr
= (tree
*) alloca (ndim
* sizeof (tree
));
2301 if (mech
!= By_Descriptor_NCA
2302 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
2303 for (i
= ndim
- 1, inner_type
= type
;
2305 i
--, inner_type
= TREE_TYPE (inner_type
))
2306 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2308 for (i
= 0, inner_type
= type
;
2310 i
++, inner_type
= TREE_TYPE (inner_type
))
2311 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2313 /* Now get the DTYPE value. */
2314 switch (TREE_CODE (type
))
2318 if (TYPE_VAX_FLOATING_POINT_P (type
))
2319 switch ((int) TYPE_DIGITS_VALUE (type
))
2332 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
2335 dtype
= TREE_UNSIGNED (type
) ? 2 : 6;
2338 dtype
= TREE_UNSIGNED (type
) ? 3 : 7;
2341 dtype
= TREE_UNSIGNED (type
) ? 4 : 8;
2344 dtype
= TREE_UNSIGNED (type
) ? 5 : 9;
2347 dtype
= TREE_UNSIGNED (type
) ? 25 : 26;
2353 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
2357 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
2358 && TYPE_VAX_FLOATING_POINT_P (type
))
2359 switch ((int) TYPE_DIGITS_VALUE (type
))
2371 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
2382 /* Get the CLASS value. */
2385 case By_Descriptor_A
:
2388 case By_Descriptor_NCA
:
2391 case By_Descriptor_SB
:
2398 /* Make the type for a descriptor for VMS. The first four fields
2399 are the same for all types. */
2402 = chainon (field_list
,
2403 make_descriptor_field
2404 ("LENGTH", type_for_size (16, 1), record_type
,
2405 size_in_bytes (mech
== By_Descriptor_A
? inner_type
: type
)));
2407 field_list
= chainon (field_list
,
2408 make_descriptor_field ("DTYPE", type_for_size (8, 1),
2409 record_type
, size_int (dtype
)));
2410 field_list
= chainon (field_list
,
2411 make_descriptor_field ("CLASS", type_for_size (8, 1),
2412 record_type
, size_int (class)));
2415 = chainon (field_list
,
2416 make_descriptor_field ("POINTER",
2417 build_pointer_type (type
),
2420 build_pointer_type (type
),
2421 build (PLACEHOLDER_EXPR
,
2427 case By_Descriptor_S
:
2430 case By_Descriptor_SB
:
2432 = chainon (field_list
,
2433 make_descriptor_field
2434 ("SB_L1", type_for_size (32, 1), record_type
,
2435 TREE_CODE (type
) == ARRAY_TYPE
2436 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) : size_zero_node
));
2438 = chainon (field_list
,
2439 make_descriptor_field
2440 ("SB_L2", type_for_size (32, 1), record_type
,
2441 TREE_CODE (type
) == ARRAY_TYPE
2442 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) : size_zero_node
));
2445 case By_Descriptor_A
:
2446 case By_Descriptor_NCA
:
2447 field_list
= chainon (field_list
,
2448 make_descriptor_field ("SCALE",
2449 type_for_size (8, 1),
2453 field_list
= chainon (field_list
,
2454 make_descriptor_field ("DIGITS",
2455 type_for_size (8, 1),
2460 = chainon (field_list
,
2461 make_descriptor_field
2462 ("AFLAGS", type_for_size (8, 1), record_type
,
2463 size_int (mech
== By_Descriptor_NCA
2465 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2466 : (TREE_CODE (type
) == ARRAY_TYPE
2467 && TYPE_CONVENTION_FORTRAN_P (type
)
2470 field_list
= chainon (field_list
,
2471 make_descriptor_field ("DIMCT",
2472 type_for_size (8, 1),
2476 field_list
= chainon (field_list
,
2477 make_descriptor_field ("ARSIZE",
2478 type_for_size (32, 1),
2480 size_in_bytes (type
)));
2482 /* Now build a pointer to the 0,0,0... element. */
2483 tem
= build (PLACEHOLDER_EXPR
, type
);
2484 for (i
= 0, inner_type
= type
; i
< ndim
;
2485 i
++, inner_type
= TREE_TYPE (inner_type
))
2486 tem
= build (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
2487 convert (TYPE_DOMAIN (inner_type
), size_zero_node
));
2490 = chainon (field_list
,
2491 make_descriptor_field
2492 ("A0", build_pointer_type (inner_type
), record_type
,
2493 build1 (ADDR_EXPR
, build_pointer_type (inner_type
), tem
)));
2495 /* Next come the addressing coefficients. */
2497 for (i
= 0; i
< ndim
; i
++)
2501 = size_binop (MULT_EXPR
, tem
,
2502 size_binop (PLUS_EXPR
,
2503 size_binop (MINUS_EXPR
,
2504 TYPE_MAX_VALUE (idx_arr
[i
]),
2505 TYPE_MIN_VALUE (idx_arr
[i
])),
2508 fname
[0] = (mech
== By_Descriptor_NCA
? 'S' : 'M');
2509 fname
[1] = '0' + i
, fname
[2] = 0;
2510 field_list
= chainon (field_list
,
2511 make_descriptor_field (fname
,
2512 type_for_size (32, 1),
2516 if (mech
== By_Descriptor_NCA
)
2520 /* Finally here are the bounds. */
2521 for (i
= 0; i
< ndim
; i
++)
2525 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
2527 = chainon (field_list
,
2528 make_descriptor_field
2529 (fname
, type_for_size (32, 1), record_type
,
2530 TYPE_MIN_VALUE (idx_arr
[i
])));
2534 = chainon (field_list
,
2535 make_descriptor_field
2536 (fname
, type_for_size (32, 1), record_type
,
2537 TYPE_MAX_VALUE (idx_arr
[i
])));
2542 post_error ("unsupported descriptor type for &", gnat_entity
);
2545 finish_record_type (record_type
, field_list
, 0, 1);
2546 pushdecl (build_decl (TYPE_DECL
, create_concat_name (gnat_entity
, "DESC"),
2552 /* Utility routine for above code to make a field. */
2555 make_descriptor_field (name
, type
, rec_type
, initial
)
2562 = create_field_decl (get_identifier (name
), type
, rec_type
, 0, 0, 0, 0);
2564 DECL_INITIAL (field
) = initial
;
2568 /* Build a type to be used to represent an aliased object whose nominal
2569 type is an unconstrained array. This consists of a RECORD_TYPE containing
2570 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2571 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2572 is used to represent an arbitrary unconstrained object. Use NAME
2573 as the name of the record. */
2576 build_unc_object_type (template_type
, object_type
, name
)
2581 tree type
= make_node (RECORD_TYPE
);
2582 tree template_field
= create_field_decl (get_identifier ("BOUNDS"),
2583 template_type
, type
, 0, 0, 0, 1);
2584 tree array_field
= create_field_decl (get_identifier ("ARRAY"), object_type
,
2587 TYPE_NAME (type
) = name
;
2588 TYPE_CONTAINS_TEMPLATE_P (type
) = 1;
2589 finish_record_type (type
,
2590 chainon (chainon (NULL_TREE
, template_field
),
2597 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2598 the normal case this is just two adjustments, but we have more to do
2599 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2602 update_pointer_to (old_type
, new_type
)
2606 tree ptr
= TYPE_POINTER_TO (old_type
);
2607 tree ref
= TYPE_REFERENCE_TO (old_type
);
2610 /* If this is the main variant, process all the other variants first. */
2611 if (TYPE_MAIN_VARIANT (old_type
) == old_type
)
2612 for (type
= TYPE_NEXT_VARIANT (old_type
); type
!= 0;
2613 type
= TYPE_NEXT_VARIANT (type
))
2614 update_pointer_to (type
, new_type
);
2616 /* If no pointer or reference, we are done. Otherwise, get the new type with
2617 the same qualifiers as the old type and see if it is the same as the old
2619 if (ptr
== 0 && ref
== 0)
2622 new_type
= build_qualified_type (new_type
, TYPE_QUALS (old_type
));
2623 if (old_type
== new_type
)
2626 /* First handle the simple case. */
2627 if (TREE_CODE (new_type
) != UNCONSTRAINED_ARRAY_TYPE
)
2630 TREE_TYPE (ptr
) = new_type
;
2631 TYPE_POINTER_TO (new_type
) = ptr
;
2634 TREE_TYPE (ref
) = new_type
;
2635 TYPE_REFERENCE_TO (new_type
) = ref
;
2637 if (ptr
!= 0 && TYPE_NAME (ptr
) != 0
2638 && TREE_CODE (TYPE_NAME (ptr
)) == TYPE_DECL
2639 && TREE_CODE (new_type
) != ENUMERAL_TYPE
)
2640 rest_of_decl_compilation (TYPE_NAME (ptr
), NULL
,
2641 global_bindings_p (), 0);
2642 if (ref
!= 0 && TYPE_NAME (ref
) != 0
2643 && TREE_CODE (TYPE_NAME (ref
)) == TYPE_DECL
2644 && TREE_CODE (new_type
) != ENUMERAL_TYPE
)
2645 rest_of_decl_compilation (TYPE_NAME (ref
), NULL
,
2646 global_bindings_p (), 0);
2649 /* Now deal with the unconstrained array case. In this case the "pointer"
2650 is actually a RECORD_TYPE where the types of both fields are
2651 pointers to void. In that case, copy the field list from the
2652 old type to the new one and update the fields' context. */
2653 else if (TREE_CODE (ptr
) != RECORD_TYPE
|| ! TYPE_IS_FAT_POINTER_P (ptr
))
2658 tree new_obj_rec
= TYPE_OBJECT_RECORD_TYPE (new_type
);
2663 TYPE_FIELDS (ptr
) = TYPE_FIELDS (TYPE_POINTER_TO (new_type
));
2664 DECL_CONTEXT (TYPE_FIELDS (ptr
)) = ptr
;
2665 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr
))) = ptr
;
2667 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2670 ??? This is now the only use of gnat_substitute_in_type, which
2671 is now a very "heavy" routine to do this, so it should be replaced
2673 ptr_temp_type
= TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr
)));
2674 new_ref
= build (COMPONENT_REF
, ptr_temp_type
,
2675 build (PLACEHOLDER_EXPR
, ptr
),
2676 TREE_CHAIN (TYPE_FIELDS (ptr
)));
2679 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))),
2680 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))),
2681 TREE_CHAIN (TYPE_FIELDS (ptr
)), new_ref
));
2683 for (var
= TYPE_MAIN_VARIANT (ptr
); var
; var
= TYPE_NEXT_VARIANT (var
))
2684 TYPE_UNCONSTRAINED_ARRAY (var
) = new_type
;
2686 TYPE_POINTER_TO (new_type
) = TYPE_REFERENCE_TO (new_type
)
2687 = TREE_TYPE (new_type
) = ptr
;
2689 /* Now handle updating the allocation record, what the thin pointer
2690 points to. Update all pointers from the old record into the new
2691 one, update the types of the fields, and recompute the size. */
2693 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type
), new_obj_rec
);
2695 TREE_TYPE (TYPE_FIELDS (new_obj_rec
)) = TREE_TYPE (ptr_temp_type
);
2696 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
)))
2697 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
)));
2698 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
)))
2699 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))));
2700 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
)))
2701 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))));
2703 TYPE_SIZE (new_obj_rec
)
2704 = size_binop (PLUS_EXPR
,
2705 DECL_SIZE (TYPE_FIELDS (new_obj_rec
)),
2706 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
))));
2707 TYPE_SIZE_UNIT (new_obj_rec
)
2708 = size_binop (PLUS_EXPR
,
2709 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec
)),
2710 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
))));
2711 rest_of_type_compilation (ptr
, global_bindings_p ());
2715 /* Convert a pointer to a constrained array into a pointer to a fat
2716 pointer. This involves making or finding a template. */
2719 convert_to_fat_pointer (type
, expr
)
2723 tree template_type
= TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type
))));
2724 tree
template, template_addr
;
2725 tree etype
= TREE_TYPE (expr
);
2727 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2728 pointer to the template and array. */
2729 if (integer_zerop (expr
))
2733 tree_cons (TYPE_FIELDS (type
),
2734 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
),
2735 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
2736 convert (build_pointer_type (template_type
),
2740 /* If EXPR is a thin pointer, make the template and data from the record. */
2742 else if (TYPE_THIN_POINTER_P (etype
))
2744 tree fields
= TYPE_FIELDS (TREE_TYPE (etype
));
2746 expr
= save_expr (expr
);
2747 if (TREE_CODE (expr
) == ADDR_EXPR
)
2748 expr
= TREE_OPERAND (expr
, 0);
2750 expr
= build1 (INDIRECT_REF
, TREE_TYPE (etype
), expr
);
2752 template = build_component_ref (expr
, NULL_TREE
, fields
);
2753 expr
= build_unary_op (ADDR_EXPR
, NULL_TREE
,
2754 build_component_ref (expr
, NULL_TREE
,
2755 TREE_CHAIN (fields
)));
2758 /* Otherwise, build the constructor for the template. */
2759 template = build_template (template_type
, TREE_TYPE (etype
), expr
);
2761 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template);
2763 /* The result is a CONSTRUCTOR for the fat pointer. */
2765 build_constructor (type
,
2766 tree_cons (TYPE_FIELDS (type
), expr
,
2767 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
2768 template_addr
, NULL_TREE
)));
2771 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2772 is something that is a fat pointer, so convert to it first if it EXPR
2773 is not already a fat pointer. */
2776 convert_to_thin_pointer (type
, expr
)
2780 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr
)))
2782 = convert_to_fat_pointer
2783 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
))), expr
);
2785 /* We get the pointer to the data and use a NOP_EXPR to make it the
2787 expr
= build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (TREE_TYPE (expr
)));
2788 expr
= build1 (NOP_EXPR
, type
, expr
);
2793 /* Create an expression whose value is that of EXPR,
2794 converted to type TYPE. The TREE_TYPE of the value
2795 is always TYPE. This function implements all reasonable
2796 conversions; callers should filter out those that are
2797 not permitted by the language being compiled. */
2800 convert (type
, expr
)
2803 enum tree_code code
= TREE_CODE (type
);
2804 tree etype
= TREE_TYPE (expr
);
2805 enum tree_code ecode
= TREE_CODE (etype
);
2808 /* If EXPR is already the right type, we are done. */
2812 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2814 if (TREE_CODE (expr
) == WITH_RECORD_EXPR
)
2815 return build (WITH_RECORD_EXPR
, type
,
2816 convert (type
, TREE_OPERAND (expr
, 0)),
2817 TREE_OPERAND (expr
, 1));
2819 /* If the input type has padding, remove it by doing a component reference
2820 to the field. If the output type has padding, make a constructor
2821 to build the record. If both input and output have padding and are
2822 of variable size, do this as an unchecked conversion. */
2823 if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
2824 && TYPE_IS_PADDING_P (type
) && TYPE_IS_PADDING_P (etype
)
2825 && (! TREE_CONSTANT (TYPE_SIZE (type
))
2826 || ! TREE_CONSTANT (TYPE_SIZE (etype
))))
2828 else if (ecode
== RECORD_TYPE
&& TYPE_IS_PADDING_P (etype
))
2830 /* If we have just converted to this padded type, just get
2831 the inner expression. */
2832 if (TREE_CODE (expr
) == CONSTRUCTOR
2833 && CONSTRUCTOR_ELTS (expr
) != 0
2834 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr
)) == TYPE_FIELDS (etype
))
2835 return TREE_VALUE (CONSTRUCTOR_ELTS (expr
));
2837 return convert (type
, build_component_ref (expr
, NULL_TREE
,
2838 TYPE_FIELDS (etype
)));
2840 else if (code
== RECORD_TYPE
&& TYPE_IS_PADDING_P (type
))
2842 /* If we previously converted from another type and our type is
2843 of variable size, remove the conversion to avoid the need for
2844 variable-size temporaries. */
2845 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
2846 && ! TREE_CONSTANT (TYPE_SIZE (type
)))
2847 expr
= TREE_OPERAND (expr
, 0);
2849 /* If we are just removing the padding from expr, convert the original
2850 object if we have variable size. That will avoid the need
2851 for some variable-size temporaries. */
2852 if (TREE_CODE (expr
) == COMPONENT_REF
2853 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr
, 0))) == RECORD_TYPE
2854 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
2855 && ! TREE_CONSTANT (TYPE_SIZE (type
)))
2856 return convert (type
, TREE_OPERAND (expr
, 0));
2858 /* If the result type is a padded type with a self-referentially-sized
2859 field and the expression type is a record, do this as an
2860 unchecked converstion. */
2861 else if (TREE_CODE (DECL_SIZE (TYPE_FIELDS (type
))) != INTEGER_CST
2862 && contains_placeholder_p (DECL_SIZE (TYPE_FIELDS (type
)))
2863 && TREE_CODE (etype
) == RECORD_TYPE
)
2864 return unchecked_convert (type
, expr
);
2868 build_constructor (type
,
2869 tree_cons (TYPE_FIELDS (type
),
2871 (TYPE_FIELDS (type
)),
2876 /* If the input is a biased type, adjust first. */
2877 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
2878 return convert (type
, fold (build (PLUS_EXPR
, TREE_TYPE (etype
),
2879 fold (build1 (GNAT_NOP_EXPR
,
2880 TREE_TYPE (etype
), expr
)),
2881 TYPE_MIN_VALUE (etype
))));
2883 /* If the input is a left-justified modular type, we need to extract
2884 the actual object before converting it to any other type with the
2885 exception of an unconstrained array. */
2886 if (ecode
== RECORD_TYPE
&& TYPE_LEFT_JUSTIFIED_MODULAR_P (etype
)
2887 && code
!= UNCONSTRAINED_ARRAY_TYPE
)
2888 return convert (type
, build_component_ref (expr
, NULL_TREE
,
2889 TYPE_FIELDS (etype
)));
2891 /* If converting a type that does not contain a template into one
2892 that does, convert to the data type and then build the template. */
2893 if (code
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (type
)
2894 && ! (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
)))
2896 tree obj_type
= TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type
)));
2901 tree_cons (TYPE_FIELDS (type
),
2902 build_template (TREE_TYPE (TYPE_FIELDS (type
)),
2903 obj_type
, NULL_TREE
),
2904 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
2905 convert (obj_type
, expr
), NULL_TREE
)));
2908 /* There are some special cases of expressions that we process
2910 switch (TREE_CODE (expr
))
2915 case TRANSFORM_EXPR
:
2917 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2918 conversion in gnat_expand_expr. NULL_EXPR does not represent
2919 and actual value, so no conversion is needed. */
2920 TREE_TYPE (expr
) = type
;
2925 /* If we are converting a STRING_CST to another constrained array type,
2926 just make a new one in the proper type. Likewise for a
2927 CONSTRUCTOR. But if the mode of the type is different, we must
2928 ensure a new RTL is made for the constant. */
2929 if (code
== ecode
&& AGGREGATE_TYPE_P (etype
)
2930 && ! (TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
2931 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
))
2933 expr
= copy_node (expr
);
2934 TREE_TYPE (expr
) = type
;
2936 if (TYPE_MODE (type
) != TYPE_MODE (etype
))
2937 TREE_CST_RTL (expr
) = 0;
2944 /* If we are converting between two aggregate types of the same
2945 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2946 This avoid unneeded conversions which makes reference computations
2948 if (code
== ecode
&& TYPE_MODE (type
) == TYPE_MODE (etype
)
2949 && AGGREGATE_TYPE_P (type
) && AGGREGATE_TYPE_P (etype
)
2950 && TYPE_ALIGN (type
) == TYPE_ALIGN (etype
)
2951 && operand_equal_p (TYPE_SIZE (type
), TYPE_SIZE (etype
), 0))
2952 return build (COMPONENT_REF
, type
, TREE_OPERAND (expr
, 0),
2953 TREE_OPERAND (expr
, 1));
2957 case UNCONSTRAINED_ARRAY_REF
:
2958 /* Convert this to the type of the inner array by getting the address of
2959 the array from the template. */
2960 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
2961 build_component_ref (TREE_OPERAND (expr
, 0),
2962 get_identifier ("P_ARRAY"),
2964 etype
= TREE_TYPE (expr
);
2965 ecode
= TREE_CODE (etype
);
2968 case VIEW_CONVERT_EXPR
:
2969 if (AGGREGATE_TYPE_P (type
) && AGGREGATE_TYPE_P (etype
)
2970 && ! TYPE_FAT_POINTER_P (type
) && ! TYPE_FAT_POINTER_P (etype
))
2971 return convert (type
, TREE_OPERAND (expr
, 0));
2975 /* If both types are record types, just convert the pointer and
2976 make a new INDIRECT_REF.
2978 ??? Disable this for now since it causes problems with the
2979 code in build_binary_op for MODIFY_EXPR which wants to
2980 strip off conversions. But that code really is a mess and
2981 we need to do this a much better way some time. */
2983 && (TREE_CODE (type
) == RECORD_TYPE
2984 || TREE_CODE (type
) == UNION_TYPE
)
2985 && (TREE_CODE (etype
) == RECORD_TYPE
2986 || TREE_CODE (etype
) == UNION_TYPE
)
2987 && ! TYPE_FAT_POINTER_P (type
) && ! TYPE_FAT_POINTER_P (etype
))
2988 return build_unary_op (INDIRECT_REF
, NULL_TREE
,
2989 convert (build_pointer_type (type
),
2990 TREE_OPERAND (expr
, 0)));
2997 /* Check for converting to a pointer to an unconstrained array. */
2998 if (TYPE_FAT_POINTER_P (type
) && ! TYPE_FAT_POINTER_P (etype
))
2999 return convert_to_fat_pointer (type
, expr
);
3001 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
)
3002 || (code
== INTEGER_CST
&& ecode
== INTEGER_CST
3003 && (type
== TREE_TYPE (etype
) || etype
== TREE_TYPE (type
))))
3004 return fold (build1 (NOP_EXPR
, type
, expr
));
3009 return build1 (CONVERT_EXPR
, type
, expr
);
3012 if (TYPE_HAS_ACTUAL_BOUNDS_P (type
)
3013 && (ecode
== ARRAY_TYPE
|| ecode
== UNCONSTRAINED_ARRAY_TYPE
))
3014 return unchecked_convert (type
, expr
);
3015 else if (TYPE_BIASED_REPRESENTATION_P (type
))
3016 return fold (build1 (CONVERT_EXPR
, type
,
3017 fold (build (MINUS_EXPR
, TREE_TYPE (type
),
3018 convert (TREE_TYPE (type
), expr
),
3019 TYPE_MIN_VALUE (type
)))));
3021 /* ... fall through ... */
3024 return fold (convert_to_integer (type
, expr
));
3027 case REFERENCE_TYPE
:
3028 /* If converting between two pointers to records denoting
3029 both a template and type, adjust if needed to account
3030 for any differing offsets, since one might be negative. */
3031 if (TYPE_THIN_POINTER_P (etype
) && TYPE_THIN_POINTER_P (type
))
3034 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype
))),
3035 bit_position (TYPE_FIELDS (TREE_TYPE (type
))));
3036 tree byte_diff
= size_binop (CEIL_DIV_EXPR
, bit_diff
,
3037 sbitsize_int (BITS_PER_UNIT
));
3039 expr
= build1 (NOP_EXPR
, type
, expr
);
3040 TREE_CONSTANT (expr
) = TREE_CONSTANT (TREE_OPERAND (expr
, 0));
3041 if (integer_zerop (byte_diff
))
3044 return build_binary_op (PLUS_EXPR
, type
, expr
,
3045 fold (convert_to_pointer (type
, byte_diff
)));
3048 /* If converting to a thin pointer, handle specially. */
3049 if (TYPE_THIN_POINTER_P (type
)
3050 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)) != 0)
3051 return convert_to_thin_pointer (type
, expr
);
3053 /* If converting fat pointer to normal pointer, get the pointer to the
3054 array and then convert it. */
3055 else if (TYPE_FAT_POINTER_P (etype
))
3056 expr
= build_component_ref (expr
, get_identifier ("P_ARRAY"),
3059 return fold (convert_to_pointer (type
, expr
));
3062 return fold (convert_to_real (type
, expr
));
3065 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type
) && ! AGGREGATE_TYPE_P (etype
))
3068 (type
, tree_cons (TYPE_FIELDS (type
),
3069 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
),
3072 /* ... fall through ... */
3075 /* In these cases, assume the front-end has validated the conversion.
3076 If the conversion is valid, it will be a bit-wise conversion, so
3077 it can be viewed as an unchecked conversion. */
3078 return unchecked_convert (type
, expr
);
3081 /* Just validate that the type is indeed that of a field
3082 of the type. Then make the simple conversion. */
3083 for (tem
= TYPE_FIELDS (type
); tem
; tem
= TREE_CHAIN (tem
))
3084 if (TREE_TYPE (tem
) == etype
)
3085 return build1 (CONVERT_EXPR
, type
, expr
);
3089 case UNCONSTRAINED_ARRAY_TYPE
:
3090 /* If EXPR is a constrained array, take its address, convert it to a
3091 fat pointer, and then dereference it. Likewise if EXPR is a
3092 record containing both a template and a constrained array.
3093 Note that a record representing a left justified modular type
3094 always represents a packed constrained array. */
3095 if (ecode
== ARRAY_TYPE
3096 || (ecode
== INTEGER_TYPE
&& TYPE_HAS_ACTUAL_BOUNDS_P (etype
))
3097 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))
3098 || (ecode
== RECORD_TYPE
&& TYPE_LEFT_JUSTIFIED_MODULAR_P (etype
)))
3101 (INDIRECT_REF
, NULL_TREE
,
3102 convert_to_fat_pointer (TREE_TYPE (type
),
3103 build_unary_op (ADDR_EXPR
,
3106 /* Do something very similar for converting one unconstrained
3107 array to another. */
3108 else if (ecode
== UNCONSTRAINED_ARRAY_TYPE
)
3110 build_unary_op (INDIRECT_REF
, NULL_TREE
,
3111 convert (TREE_TYPE (type
),
3112 build_unary_op (ADDR_EXPR
,
3118 return fold (convert_to_complex (type
, expr
));
3125 /* Remove all conversions that are done in EXP. This includes converting
3126 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3127 is nonzero, always return the address of the containing object even if
3128 the address is not bit-aligned. */
3131 remove_conversions (exp
, true_address
)
3135 switch (TREE_CODE (exp
))
3139 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
3140 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp
)))
3141 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp
)), 1);
3145 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == RECORD_TYPE
3146 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
3147 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
3150 case VIEW_CONVERT_EXPR
: case NON_LVALUE_EXPR
:
3151 case NOP_EXPR
: case CONVERT_EXPR
: case GNAT_NOP_EXPR
:
3152 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
3161 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3162 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3163 likewise return an expression pointing to the underlying array. */
3166 maybe_unconstrained_array (exp
)
3169 enum tree_code code
= TREE_CODE (exp
);
3172 switch (TREE_CODE (TREE_TYPE (exp
)))
3174 case UNCONSTRAINED_ARRAY_TYPE
:
3175 if (code
== UNCONSTRAINED_ARRAY_REF
)
3178 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
3179 build_component_ref (TREE_OPERAND (exp
, 0),
3180 get_identifier ("P_ARRAY"),
3182 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp
);
3186 else if (code
== NULL_EXPR
)
3187 return build1 (NULL_EXPR
,
3188 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3189 (TREE_TYPE (TREE_TYPE (exp
))))),
3190 TREE_OPERAND (exp
, 0));
3192 else if (code
== WITH_RECORD_EXPR
3193 && (TREE_OPERAND (exp
, 0)
3194 != (new = maybe_unconstrained_array
3195 (TREE_OPERAND (exp
, 0)))))
3196 return build (WITH_RECORD_EXPR
, TREE_TYPE (new), new,
3197 TREE_OPERAND (exp
, 1));
3200 if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp
)))
3203 = build_component_ref (exp
, NULL_TREE
,
3204 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp
))));
3205 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3206 && TYPE_IS_PADDING_P (TREE_TYPE (new)))
3207 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (new))), new);
3220 /* Return an expression that does an unchecked converstion of EXPR to TYPE. */
3223 unchecked_convert (type
, expr
)
3227 tree etype
= TREE_TYPE (expr
);
3229 /* If the expression is already the right type, we are done. */
3233 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3235 if (TREE_CODE (expr
) == WITH_RECORD_EXPR
)
3236 return build (WITH_RECORD_EXPR
, type
,
3237 unchecked_convert (type
, TREE_OPERAND (expr
, 0)),
3238 TREE_OPERAND (expr
, 1));
3240 /* If both types types are integral just do a normal conversion.
3241 Likewise for a conversion to an unconstrained array. */
3242 if ((((INTEGRAL_TYPE_P (type
)
3243 && ! (TREE_CODE (type
) == INTEGER_TYPE
3244 && TYPE_VAX_FLOATING_POINT_P (type
)))
3245 || (POINTER_TYPE_P (type
) && ! TYPE_THIN_POINTER_P (type
))
3246 || (TREE_CODE (type
) == RECORD_TYPE
3247 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type
)))
3248 && ((INTEGRAL_TYPE_P (etype
)
3249 && ! (TREE_CODE (etype
) == INTEGER_TYPE
3250 && TYPE_VAX_FLOATING_POINT_P (etype
)))
3251 || (POINTER_TYPE_P (etype
) && ! TYPE_THIN_POINTER_P (etype
))
3252 || (TREE_CODE (etype
) == RECORD_TYPE
3253 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype
))))
3254 || TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3258 if (TREE_CODE (etype
) == INTEGER_TYPE
3259 && TYPE_BIASED_REPRESENTATION_P (etype
))
3261 tree ntype
= copy_type (etype
);
3263 TYPE_BIASED_REPRESENTATION_P (ntype
) = 0;
3264 TYPE_MAIN_VARIANT (ntype
) = ntype
;
3265 expr
= build1 (GNAT_NOP_EXPR
, ntype
, expr
);
3268 if (TREE_CODE (type
) == INTEGER_TYPE
3269 && TYPE_BIASED_REPRESENTATION_P (type
))
3271 rtype
= copy_type (type
);
3272 TYPE_BIASED_REPRESENTATION_P (rtype
) = 0;
3273 TYPE_MAIN_VARIANT (rtype
) = rtype
;
3276 expr
= convert (rtype
, expr
);
3278 expr
= build1 (GNAT_NOP_EXPR
, type
, expr
);
3281 /* If we are converting TO an integral type whose precision is not the
3282 same as its size, first unchecked convert to a record that contains
3283 an object of the output type. Then extract the field. */
3284 else if (INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
) != 0
3285 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
3286 GET_MODE_BITSIZE (TYPE_MODE (type
))))
3288 tree rec_type
= make_node (RECORD_TYPE
);
3289 tree field
= create_field_decl (get_identifier ("OBJ"), type
,
3290 rec_type
, 1, 0, 0, 0);
3292 TYPE_FIELDS (rec_type
) = field
;
3293 layout_type (rec_type
);
3295 expr
= unchecked_convert (rec_type
, expr
);
3296 expr
= build_component_ref (expr
, NULL_TREE
, field
);
3299 /* Similarly for integral input type whose precision is not equal to its
3301 else if (INTEGRAL_TYPE_P (etype
) && TYPE_RM_SIZE (etype
) != 0
3302 && 0 != compare_tree_int (TYPE_RM_SIZE (etype
),
3303 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
3305 tree rec_type
= make_node (RECORD_TYPE
);
3307 = create_field_decl (get_identifier ("OBJ"), etype
, rec_type
,
3310 TYPE_FIELDS (rec_type
) = field
;
3311 layout_type (rec_type
);
3313 expr
= build_constructor (rec_type
, build_tree_list (field
, expr
));
3314 expr
= unchecked_convert (type
, expr
);
3317 /* We have a special case when we are converting between two
3318 unconstrained array types. In that case, take the address,
3319 convert the fat pointer types, and dereference. */
3320 else if (TREE_CODE (etype
) == UNCONSTRAINED_ARRAY_TYPE
3321 && TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3322 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
3323 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (type
),
3324 build_unary_op (ADDR_EXPR
, NULL_TREE
,
3328 expr
= maybe_unconstrained_array (expr
);
3329 etype
= TREE_TYPE (expr
);
3330 expr
= build1 (VIEW_CONVERT_EXPR
, type
, expr
);
3333 /* If the result is an integral type whose size is not equal to
3334 the size of the underlying machine type, sign- or zero-extend
3335 the result. We need not do this in the case where the input is
3336 an integral type of the same precision and signedness or if the output
3337 is a biased type or if both the input and output are unsigned. */
3338 if (INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
) != 0
3339 && ! (TREE_CODE (type
) == INTEGER_TYPE
3340 && TYPE_BIASED_REPRESENTATION_P (type
))
3341 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
3342 GET_MODE_BITSIZE (TYPE_MODE (type
)))
3343 && ! (INTEGRAL_TYPE_P (etype
)
3344 && TREE_UNSIGNED (type
) == TREE_UNSIGNED (etype
)
3345 && operand_equal_p (TYPE_RM_SIZE (type
),
3346 (TYPE_RM_SIZE (etype
) != 0
3347 ? TYPE_RM_SIZE (etype
) : TYPE_SIZE (etype
)),
3349 && ! (TREE_UNSIGNED (type
) && TREE_UNSIGNED (etype
)))
3351 tree base_type
= type_for_mode (TYPE_MODE (type
), TREE_UNSIGNED (type
));
3353 = convert (base_type
,
3354 size_binop (MINUS_EXPR
,
3356 (GET_MODE_BITSIZE (TYPE_MODE (type
))),
3357 TYPE_RM_SIZE (type
)));
3360 build_binary_op (RSHIFT_EXPR
, base_type
,
3361 build_binary_op (LSHIFT_EXPR
, base_type
,
3362 convert (base_type
, expr
),
3367 /* An unchecked conversion should never raise Constraint_Error. The code
3368 below assumes that GCC's conversion routines overflow the same way that
3369 the underlying hardware does. This is probably true. In the rare case
3370 when it is false, we can rely on the fact that such conversions are
3371 erroneous anyway. */
3372 if (TREE_CODE (expr
) == INTEGER_CST
)
3373 TREE_OVERFLOW (expr
) = TREE_CONSTANT_OVERFLOW (expr
) = 0;
3375 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3376 show no longer constant. */
3377 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
3378 && ! operand_equal_p (TYPE_SIZE_UNIT (type
), TYPE_SIZE_UNIT (etype
), 1))
3379 TREE_CONSTANT (expr
) = 0;