* tree-cfg.c (tree_find_edge_insert_loc): Handle naked RETURN_EXPR.
[official-gcc.git] / gcc / ada / utils2.c
blob5847910d2e6745e07c7b5d0197cea47b9e2faaad
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2005, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 51 Franklin Street, Fifth Floor, *
20 * Boston, MA 02110-1301, USA. *
21 * *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 * *
25 ****************************************************************************/
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "tree.h"
32 #include "rtl.h"
33 #include "ggc.h"
34 #include "flags.h"
35 #include "output.h"
36 #include "ada.h"
37 #include "types.h"
38 #include "atree.h"
39 #include "stringt.h"
40 #include "uintp.h"
41 #include "fe.h"
42 #include "elists.h"
43 #include "nlists.h"
44 #include "sinfo.h"
45 #include "einfo.h"
46 #include "ada-tree.h"
47 #include "gigi.h"
49 static tree find_common_type (tree, tree);
50 static bool contains_save_expr_p (tree);
51 static tree contains_null_expr (tree);
52 static tree compare_arrays (tree, tree, tree);
53 static tree nonbinary_modular_operation (enum tree_code, tree, tree, tree);
54 static tree build_simple_component_ref (tree, tree, tree, bool);
56 /* Prepare expr to be an argument of a TRUTH_NOT_EXPR or other logical
57 operation.
59 This preparation consists of taking the ordinary representation of
60 an expression expr and producing a valid tree boolean expression
61 describing whether expr is nonzero. We could simply always do
63 build_binary_op (NE_EXPR, expr, integer_zero_node, 1),
65 but we optimize comparisons, &&, ||, and !.
67 The resulting type should always be the same as the input type.
68 This function is simpler than the corresponding C version since
69 the only possible operands will be things of Boolean type. */
71 tree
72 gnat_truthvalue_conversion (tree expr)
74 tree type = TREE_TYPE (expr);
76 switch (TREE_CODE (expr))
78 case EQ_EXPR: case NE_EXPR: case LE_EXPR: case GE_EXPR:
79 case LT_EXPR: case GT_EXPR:
80 case TRUTH_ANDIF_EXPR:
81 case TRUTH_ORIF_EXPR:
82 case TRUTH_AND_EXPR:
83 case TRUTH_OR_EXPR:
84 case TRUTH_XOR_EXPR:
85 case ERROR_MARK:
86 return expr;
88 case INTEGER_CST:
89 return (integer_zerop (expr) ? convert (type, integer_zero_node)
90 : convert (type, integer_one_node));
92 case REAL_CST:
93 return (real_zerop (expr) ? convert (type, integer_zero_node)
94 : convert (type, integer_one_node));
96 case COND_EXPR:
97 /* Distribute the conversion into the arms of a COND_EXPR. */
98 return fold
99 (build3 (COND_EXPR, type, TREE_OPERAND (expr, 0),
100 gnat_truthvalue_conversion (TREE_OPERAND (expr, 1)),
101 gnat_truthvalue_conversion (TREE_OPERAND (expr, 2))));
103 default:
104 return build_binary_op (NE_EXPR, type, expr,
105 convert (type, integer_zero_node));
109 /* Return the base type of TYPE. */
111 tree
112 get_base_type (tree type)
114 if (TREE_CODE (type) == RECORD_TYPE
115 && TYPE_JUSTIFIED_MODULAR_P (type))
116 type = TREE_TYPE (TYPE_FIELDS (type));
118 while (TREE_TYPE (type)
119 && (TREE_CODE (type) == INTEGER_TYPE
120 || TREE_CODE (type) == REAL_TYPE))
121 type = TREE_TYPE (type);
123 return type;
126 /* Likewise, but only return types known to the Ada source. */
127 tree
128 get_ada_base_type (tree type)
130 while (TREE_TYPE (type)
131 && (TREE_CODE (type) == INTEGER_TYPE
132 || TREE_CODE (type) == REAL_TYPE)
133 && !TYPE_EXTRA_SUBTYPE_P (type))
134 type = TREE_TYPE (type);
136 return type;
139 /* EXP is a GCC tree representing an address. See if we can find how
140 strictly the object at that address is aligned. Return that alignment
141 in bits. If we don't know anything about the alignment, return 0. */
143 unsigned int
144 known_alignment (tree exp)
146 unsigned int this_alignment;
147 unsigned int lhs, rhs;
148 unsigned int type_alignment;
150 /* For pointer expressions, we know that the designated object is always at
151 least as strictly aligned as the designated subtype, so we account for
152 both type and expression information in this case.
154 Beware that we can still get a dummy designated subtype here (e.g. Taft
155 Amendement types), in which the alignment information is meaningless and
156 should be ignored.
158 We always compute a type_alignment value and return the MAX of it
159 compared with what we get from the expression tree. Just set the
160 type_alignment value to 0 when the type information is to be ignored. */
161 type_alignment
162 = ((POINTER_TYPE_P (TREE_TYPE (exp))
163 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
164 ? TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))) : 0);
166 switch (TREE_CODE (exp))
168 case CONVERT_EXPR:
169 case NOP_EXPR:
170 case NON_LVALUE_EXPR:
171 /* Conversions between pointers and integers don't change the alignment
172 of the underlying object. */
173 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
174 break;
176 case PLUS_EXPR:
177 case MINUS_EXPR:
178 /* If two address are added, the alignment of the result is the
179 minimum of the two alignments. */
180 lhs = known_alignment (TREE_OPERAND (exp, 0));
181 rhs = known_alignment (TREE_OPERAND (exp, 1));
182 this_alignment = MIN (lhs, rhs);
183 break;
185 case INTEGER_CST:
186 /* The first part of this represents the lowest bit in the constant,
187 but is it in bytes, not bits. */
188 this_alignment
189 = MIN (BITS_PER_UNIT
190 * (TREE_INT_CST_LOW (exp) & - TREE_INT_CST_LOW (exp)),
191 BIGGEST_ALIGNMENT);
192 break;
194 case MULT_EXPR:
195 /* If we know the alignment of just one side, use it. Otherwise,
196 use the product of the alignments. */
197 lhs = known_alignment (TREE_OPERAND (exp, 0));
198 rhs = known_alignment (TREE_OPERAND (exp, 1));
200 if (lhs == 0 || rhs == 0)
201 this_alignment = MIN (BIGGEST_ALIGNMENT, MAX (lhs, rhs));
202 else
203 this_alignment = MIN (BIGGEST_ALIGNMENT, lhs * rhs);
204 break;
206 case ADDR_EXPR:
207 this_alignment = expr_align (TREE_OPERAND (exp, 0));
208 break;
210 default:
211 this_alignment = 0;
212 break;
215 return MAX (type_alignment, this_alignment);
218 /* We have a comparison or assignment operation on two types, T1 and T2,
219 which are both either array types or both record types.
220 Return the type that both operands should be converted to, if any.
221 Otherwise return zero. */
223 static tree
224 find_common_type (tree t1, tree t2)
226 /* If either type is non-BLKmode, use it. Note that we know that we will
227 not have any alignment problems since if we did the non-BLKmode
228 type could not have been used. */
229 if (TYPE_MODE (t1) != BLKmode)
230 return t1;
231 else if (TYPE_MODE (t2) != BLKmode)
232 return t2;
234 /* Otherwise, return the type that has a constant size. */
235 if (TREE_CONSTANT (TYPE_SIZE (t1)))
236 return t1;
237 else if (TREE_CONSTANT (TYPE_SIZE (t2)))
238 return t2;
240 /* In this case, both types have variable size. It's probably
241 best to leave the "type mismatch" because changing it could
242 case a bad self-referential reference. */
243 return 0;
246 /* See if EXP contains a SAVE_EXPR in a position where we would
247 normally put it.
249 ??? This is a real kludge, but is probably the best approach short
250 of some very general solution. */
252 static bool
253 contains_save_expr_p (tree exp)
255 switch (TREE_CODE (exp))
257 case SAVE_EXPR:
258 return true;
260 case ADDR_EXPR: case INDIRECT_REF:
261 case COMPONENT_REF:
262 case NOP_EXPR: case CONVERT_EXPR: case VIEW_CONVERT_EXPR:
263 return contains_save_expr_p (TREE_OPERAND (exp, 0));
265 case CONSTRUCTOR:
267 tree value;
268 unsigned HOST_WIDE_INT ix;
270 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), ix, value)
271 if (contains_save_expr_p (value))
272 return true;
273 return false;
276 default:
277 return false;
281 /* See if EXP contains a NULL_EXPR in an expression we use for sizes. Return
282 it if so. This is used to detect types whose sizes involve computations
283 that are known to raise Constraint_Error. */
285 static tree
286 contains_null_expr (tree exp)
288 tree tem;
290 if (TREE_CODE (exp) == NULL_EXPR)
291 return exp;
293 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
295 case tcc_unary:
296 return contains_null_expr (TREE_OPERAND (exp, 0));
298 case tcc_comparison:
299 case tcc_binary:
300 tem = contains_null_expr (TREE_OPERAND (exp, 0));
301 if (tem)
302 return tem;
304 return contains_null_expr (TREE_OPERAND (exp, 1));
306 case tcc_expression:
307 switch (TREE_CODE (exp))
309 case SAVE_EXPR:
310 return contains_null_expr (TREE_OPERAND (exp, 0));
312 case COND_EXPR:
313 tem = contains_null_expr (TREE_OPERAND (exp, 0));
314 if (tem)
315 return tem;
317 tem = contains_null_expr (TREE_OPERAND (exp, 1));
318 if (tem)
319 return tem;
321 return contains_null_expr (TREE_OPERAND (exp, 2));
323 default:
324 return 0;
327 default:
328 return 0;
332 /* Return an expression tree representing an equality comparison of
333 A1 and A2, two objects of ARRAY_TYPE. The returned expression should
334 be of type RESULT_TYPE
336 Two arrays are equal in one of two ways: (1) if both have zero length
337 in some dimension (not necessarily the same dimension) or (2) if the
338 lengths in each dimension are equal and the data is equal. We perform the
339 length tests in as efficient a manner as possible. */
341 static tree
342 compare_arrays (tree result_type, tree a1, tree a2)
344 tree t1 = TREE_TYPE (a1);
345 tree t2 = TREE_TYPE (a2);
346 tree result = convert (result_type, integer_one_node);
347 tree a1_is_null = convert (result_type, integer_zero_node);
348 tree a2_is_null = convert (result_type, integer_zero_node);
349 bool length_zero_p = false;
351 /* Process each dimension separately and compare the lengths. If any
352 dimension has a size known to be zero, set SIZE_ZERO_P to 1 to
353 suppress the comparison of the data. */
354 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
356 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
357 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
358 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
359 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
360 tree bt = get_base_type (TREE_TYPE (lb1));
361 tree length1 = fold (build2 (MINUS_EXPR, bt, ub1, lb1));
362 tree length2 = fold (build2 (MINUS_EXPR, bt, ub2, lb2));
363 tree nbt;
364 tree tem;
365 tree comparison, this_a1_is_null, this_a2_is_null;
367 /* If the length of the first array is a constant, swap our operands
368 unless the length of the second array is the constant zero.
369 Note that we have set the `length' values to the length - 1. */
370 if (TREE_CODE (length1) == INTEGER_CST
371 && !integer_zerop (fold (build2 (PLUS_EXPR, bt, length2,
372 convert (bt, integer_one_node)))))
374 tem = a1, a1 = a2, a2 = tem;
375 tem = t1, t1 = t2, t2 = tem;
376 tem = lb1, lb1 = lb2, lb2 = tem;
377 tem = ub1, ub1 = ub2, ub2 = tem;
378 tem = length1, length1 = length2, length2 = tem;
379 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
382 /* If the length of this dimension in the second array is the constant
383 zero, we can just go inside the original bounds for the first
384 array and see if last < first. */
385 if (integer_zerop (fold (build2 (PLUS_EXPR, bt, length2,
386 convert (bt, integer_one_node)))))
388 tree ub = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
389 tree lb = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
391 comparison = build_binary_op (LT_EXPR, result_type, ub, lb);
392 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
393 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
395 length_zero_p = true;
396 this_a1_is_null = comparison;
397 this_a2_is_null = convert (result_type, integer_one_node);
400 /* If the length is some other constant value, we know that the
401 this dimension in the first array cannot be superflat, so we
402 can just use its length from the actual stored bounds. */
403 else if (TREE_CODE (length2) == INTEGER_CST)
405 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
406 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
407 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
408 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
409 nbt = get_base_type (TREE_TYPE (ub1));
411 comparison
412 = build_binary_op (EQ_EXPR, result_type,
413 build_binary_op (MINUS_EXPR, nbt, ub1, lb1),
414 build_binary_op (MINUS_EXPR, nbt, ub2, lb2));
416 /* Note that we know that UB2 and LB2 are constant and hence
417 cannot contain a PLACEHOLDER_EXPR. */
419 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
420 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
422 this_a1_is_null = build_binary_op (LT_EXPR, result_type, ub1, lb1);
423 this_a2_is_null = convert (result_type, integer_zero_node);
426 /* Otherwise compare the computed lengths. */
427 else
429 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
430 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
432 comparison
433 = build_binary_op (EQ_EXPR, result_type, length1, length2);
435 this_a1_is_null
436 = build_binary_op (LT_EXPR, result_type, length1,
437 convert (bt, integer_zero_node));
438 this_a2_is_null
439 = build_binary_op (LT_EXPR, result_type, length2,
440 convert (bt, integer_zero_node));
443 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
444 result, comparison);
446 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
447 this_a1_is_null, a1_is_null);
448 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
449 this_a2_is_null, a2_is_null);
451 t1 = TREE_TYPE (t1);
452 t2 = TREE_TYPE (t2);
455 /* Unless the size of some bound is known to be zero, compare the
456 data in the array. */
457 if (!length_zero_p)
459 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
461 if (type)
462 a1 = convert (type, a1), a2 = convert (type, a2);
464 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result,
465 fold (build2 (EQ_EXPR, result_type, a1, a2)));
469 /* The result is also true if both sizes are zero. */
470 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
471 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
472 a1_is_null, a2_is_null),
473 result);
475 /* If either operand contains SAVE_EXPRs, they have to be evaluated before
476 starting the comparison above since the place it would be otherwise
477 evaluated would be wrong. */
479 if (contains_save_expr_p (a1))
480 result = build2 (COMPOUND_EXPR, result_type, a1, result);
482 if (contains_save_expr_p (a2))
483 result = build2 (COMPOUND_EXPR, result_type, a2, result);
485 return result;
488 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
489 type TYPE. We know that TYPE is a modular type with a nonbinary
490 modulus. */
492 static tree
493 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
494 tree rhs)
496 tree modulus = TYPE_MODULUS (type);
497 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
498 unsigned int precision;
499 bool unsignedp = true;
500 tree op_type = type;
501 tree result;
503 /* If this is an addition of a constant, convert it to a subtraction
504 of a constant since we can do that faster. */
505 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
506 rhs = fold (build2 (MINUS_EXPR, type, modulus, rhs)), op_code = MINUS_EXPR;
508 /* For the logical operations, we only need PRECISION bits. For
509 addition and subtraction, we need one more and for multiplication we
510 need twice as many. But we never want to make a size smaller than
511 our size. */
512 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
513 needed_precision += 1;
514 else if (op_code == MULT_EXPR)
515 needed_precision *= 2;
517 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
519 /* Unsigned will do for everything but subtraction. */
520 if (op_code == MINUS_EXPR)
521 unsignedp = false;
523 /* If our type is the wrong signedness or isn't wide enough, make a new
524 type and convert both our operands to it. */
525 if (TYPE_PRECISION (op_type) < precision
526 || TYPE_UNSIGNED (op_type) != unsignedp)
528 /* Copy the node so we ensure it can be modified to make it modular. */
529 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
530 modulus = convert (op_type, modulus);
531 SET_TYPE_MODULUS (op_type, modulus);
532 TYPE_MODULAR_P (op_type) = 1;
533 lhs = convert (op_type, lhs);
534 rhs = convert (op_type, rhs);
537 /* Do the operation, then we'll fix it up. */
538 result = fold (build2 (op_code, op_type, lhs, rhs));
540 /* For multiplication, we have no choice but to do a full modulus
541 operation. However, we want to do this in the narrowest
542 possible size. */
543 if (op_code == MULT_EXPR)
545 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
546 modulus = convert (div_type, modulus);
547 SET_TYPE_MODULUS (div_type, modulus);
548 TYPE_MODULAR_P (div_type) = 1;
549 result = convert (op_type,
550 fold (build2 (TRUNC_MOD_EXPR, div_type,
551 convert (div_type, result), modulus)));
554 /* For subtraction, add the modulus back if we are negative. */
555 else if (op_code == MINUS_EXPR)
557 result = save_expr (result);
558 result = fold (build3 (COND_EXPR, op_type,
559 build2 (LT_EXPR, integer_type_node, result,
560 convert (op_type, integer_zero_node)),
561 fold (build2 (PLUS_EXPR, op_type,
562 result, modulus)),
563 result));
566 /* For the other operations, subtract the modulus if we are >= it. */
567 else
569 result = save_expr (result);
570 result = fold (build3 (COND_EXPR, op_type,
571 build2 (GE_EXPR, integer_type_node,
572 result, modulus),
573 fold (build2 (MINUS_EXPR, op_type,
574 result, modulus)),
575 result));
578 return convert (type, result);
581 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
582 desired for the result. Usually the operation is to be performed
583 in that type. For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
584 in which case the type to be used will be derived from the operands.
586 This function is very much unlike the ones for C and C++ since we
587 have already done any type conversion and matching required. All we
588 have to do here is validate the work done by SEM and handle subtypes. */
590 tree
591 build_binary_op (enum tree_code op_code, tree result_type,
592 tree left_operand, tree right_operand)
594 tree left_type = TREE_TYPE (left_operand);
595 tree right_type = TREE_TYPE (right_operand);
596 tree left_base_type = get_base_type (left_type);
597 tree right_base_type = get_base_type (right_type);
598 tree operation_type = result_type;
599 tree best_type = NULL_TREE;
600 tree modulus;
601 tree result;
602 bool has_side_effects = false;
604 if (operation_type
605 && TREE_CODE (operation_type) == RECORD_TYPE
606 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
607 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
609 if (operation_type
610 && !AGGREGATE_TYPE_P (operation_type)
611 && TYPE_EXTRA_SUBTYPE_P (operation_type))
612 operation_type = get_base_type (operation_type);
614 modulus = (operation_type && TREE_CODE (operation_type) == INTEGER_TYPE
615 && TYPE_MODULAR_P (operation_type)
616 ? TYPE_MODULUS (operation_type) : 0);
618 switch (op_code)
620 case MODIFY_EXPR:
621 /* If there were any integral or pointer conversions on LHS, remove
622 them; we'll be putting them back below if needed. Likewise for
623 conversions between array and record types. But don't do this if
624 the right operand is not BLKmode (for packed arrays)
625 unless we are not changing the mode. */
626 while ((TREE_CODE (left_operand) == CONVERT_EXPR
627 || TREE_CODE (left_operand) == NOP_EXPR
628 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
629 && (((INTEGRAL_TYPE_P (left_type)
630 || POINTER_TYPE_P (left_type))
631 && (INTEGRAL_TYPE_P (TREE_TYPE
632 (TREE_OPERAND (left_operand, 0)))
633 || POINTER_TYPE_P (TREE_TYPE
634 (TREE_OPERAND (left_operand, 0)))))
635 || (((TREE_CODE (left_type) == RECORD_TYPE
636 /* Don't remove conversions to justified modular
637 types. */
638 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
639 || TREE_CODE (left_type) == ARRAY_TYPE)
640 && ((TREE_CODE (TREE_TYPE
641 (TREE_OPERAND (left_operand, 0)))
642 == RECORD_TYPE)
643 || (TREE_CODE (TREE_TYPE
644 (TREE_OPERAND (left_operand, 0)))
645 == ARRAY_TYPE))
646 && (TYPE_MODE (right_type) == BLKmode
647 || (TYPE_MODE (left_type)
648 == TYPE_MODE (TREE_TYPE
649 (TREE_OPERAND
650 (left_operand, 0))))))))
652 left_operand = TREE_OPERAND (left_operand, 0);
653 left_type = TREE_TYPE (left_operand);
656 if (!operation_type)
657 operation_type = left_type;
659 /* If the RHS has a conversion between record and array types and
660 an inner type is no worse, use it. Note we cannot do this for
661 modular types or types with TYPE_ALIGN_OK, since the latter
662 might indicate a conversion between a root type and a class-wide
663 type, which we must not remove. */
664 while (TREE_CODE (right_operand) == VIEW_CONVERT_EXPR
665 && (((TREE_CODE (right_type) == RECORD_TYPE
666 || TREE_CODE (right_type) == UNION_TYPE)
667 && !TYPE_JUSTIFIED_MODULAR_P (right_type)
668 && !TYPE_ALIGN_OK (right_type)
669 && !TYPE_IS_FAT_POINTER_P (right_type))
670 || TREE_CODE (right_type) == ARRAY_TYPE)
671 && ((((TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
672 == RECORD_TYPE)
673 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
674 == UNION_TYPE))
675 && !(TYPE_JUSTIFIED_MODULAR_P
676 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
677 && !(TYPE_ALIGN_OK
678 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
679 && !(TYPE_IS_FAT_POINTER_P
680 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))))
681 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
682 == ARRAY_TYPE))
683 && (0 == (best_type
684 = find_common_type (right_type,
685 TREE_TYPE (TREE_OPERAND
686 (right_operand, 0))))
687 || right_type != best_type))
689 right_operand = TREE_OPERAND (right_operand, 0);
690 right_type = TREE_TYPE (right_operand);
693 /* If we are copying one array or record to another, find the best type
694 to use. */
695 if (((TREE_CODE (left_type) == ARRAY_TYPE
696 && TREE_CODE (right_type) == ARRAY_TYPE)
697 || (TREE_CODE (left_type) == RECORD_TYPE
698 && TREE_CODE (right_type) == RECORD_TYPE))
699 && (best_type = find_common_type (left_type, right_type)))
700 operation_type = best_type;
702 /* If a class-wide type may be involved, force use of the RHS type. */
703 if ((TREE_CODE (right_type) == RECORD_TYPE
704 || TREE_CODE (right_type) == UNION_TYPE)
705 && TYPE_ALIGN_OK (right_type))
706 operation_type = right_type;
708 /* Ensure everything on the LHS is valid. If we have a field reference,
709 strip anything that get_inner_reference can handle. Then remove any
710 conversions with type types having the same code and mode. Mark
711 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
712 either an INDIRECT_REF or a decl. */
713 result = left_operand;
714 while (1)
716 tree restype = TREE_TYPE (result);
718 if (TREE_CODE (result) == COMPONENT_REF
719 || TREE_CODE (result) == ARRAY_REF
720 || TREE_CODE (result) == ARRAY_RANGE_REF)
721 while (handled_component_p (result))
722 result = TREE_OPERAND (result, 0);
723 else if (TREE_CODE (result) == REALPART_EXPR
724 || TREE_CODE (result) == IMAGPART_EXPR
725 || ((TREE_CODE (result) == NOP_EXPR
726 || TREE_CODE (result) == CONVERT_EXPR)
727 && (((TREE_CODE (restype)
728 == TREE_CODE (TREE_TYPE
729 (TREE_OPERAND (result, 0))))
730 && (TYPE_MODE (TREE_TYPE
731 (TREE_OPERAND (result, 0)))
732 == TYPE_MODE (restype)))
733 || TYPE_ALIGN_OK (restype))))
734 result = TREE_OPERAND (result, 0);
735 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
737 TREE_ADDRESSABLE (result) = 1;
738 result = TREE_OPERAND (result, 0);
740 else
741 break;
744 gcc_assert (TREE_CODE (result) == INDIRECT_REF
745 || TREE_CODE (result) == NULL_EXPR || DECL_P (result));
747 /* Convert the right operand to the operation type unless
748 it is either already of the correct type or if the type
749 involves a placeholder, since the RHS may not have the same
750 record type. */
751 if (operation_type != right_type
752 && (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type))))
754 right_operand = convert (operation_type, right_operand);
755 right_type = operation_type;
758 /* If the left operand is not the same type as the operation type,
759 surround it in a VIEW_CONVERT_EXPR. */
760 if (left_type != operation_type)
761 left_operand = unchecked_convert (operation_type, left_operand, false);
763 has_side_effects = true;
764 modulus = NULL_TREE;
765 break;
767 case ARRAY_REF:
768 if (!operation_type)
769 operation_type = TREE_TYPE (left_type);
771 /* ... fall through ... */
773 case ARRAY_RANGE_REF:
775 /* First convert the right operand to its base type. This will
776 prevent unneeded signedness conversions when sizetype is wider than
777 integer. */
778 right_operand = convert (right_base_type, right_operand);
779 right_operand = convert (TYPE_DOMAIN (left_type), right_operand);
781 if (!TREE_CONSTANT (right_operand)
782 || !TREE_CONSTANT (TYPE_MIN_VALUE (right_type)))
783 gnat_mark_addressable (left_operand);
785 modulus = NULL_TREE;
786 break;
788 case GE_EXPR:
789 case LE_EXPR:
790 case GT_EXPR:
791 case LT_EXPR:
792 gcc_assert (!POINTER_TYPE_P (left_type));
794 /* ... fall through ... */
796 case EQ_EXPR:
797 case NE_EXPR:
798 /* If either operand is a NULL_EXPR, just return a new one. */
799 if (TREE_CODE (left_operand) == NULL_EXPR)
800 return build2 (op_code, result_type,
801 build1 (NULL_EXPR, integer_type_node,
802 TREE_OPERAND (left_operand, 0)),
803 integer_zero_node);
805 else if (TREE_CODE (right_operand) == NULL_EXPR)
806 return build2 (op_code, result_type,
807 build1 (NULL_EXPR, integer_type_node,
808 TREE_OPERAND (right_operand, 0)),
809 integer_zero_node);
811 /* If either object is a justified modular types, get the
812 fields from within. */
813 if (TREE_CODE (left_type) == RECORD_TYPE
814 && TYPE_JUSTIFIED_MODULAR_P (left_type))
816 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
817 left_operand);
818 left_type = TREE_TYPE (left_operand);
819 left_base_type = get_base_type (left_type);
822 if (TREE_CODE (right_type) == RECORD_TYPE
823 && TYPE_JUSTIFIED_MODULAR_P (right_type))
825 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
826 right_operand);
827 right_type = TREE_TYPE (right_operand);
828 right_base_type = get_base_type (right_type);
831 /* If both objects are arrays, compare them specially. */
832 if ((TREE_CODE (left_type) == ARRAY_TYPE
833 || (TREE_CODE (left_type) == INTEGER_TYPE
834 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
835 && (TREE_CODE (right_type) == ARRAY_TYPE
836 || (TREE_CODE (right_type) == INTEGER_TYPE
837 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
839 result = compare_arrays (result_type, left_operand, right_operand);
841 if (op_code == NE_EXPR)
842 result = invert_truthvalue (result);
843 else
844 gcc_assert (op_code == EQ_EXPR);
846 return result;
849 /* Otherwise, the base types must be the same unless the objects are
850 records. If we have records, use the best type and convert both
851 operands to that type. */
852 if (left_base_type != right_base_type)
854 if (TREE_CODE (left_base_type) == RECORD_TYPE
855 && TREE_CODE (right_base_type) == RECORD_TYPE)
857 /* The only way these are permitted to be the same is if both
858 types have the same name. In that case, one of them must
859 not be self-referential. Use that one as the best type.
860 Even better is if one is of fixed size. */
861 best_type = NULL_TREE;
863 gcc_assert (TYPE_NAME (left_base_type)
864 && (TYPE_NAME (left_base_type)
865 == TYPE_NAME (right_base_type)));
867 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
868 best_type = left_base_type;
869 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
870 best_type = right_base_type;
871 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
872 best_type = left_base_type;
873 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
874 best_type = right_base_type;
875 else
876 gcc_unreachable ();
878 left_operand = convert (best_type, left_operand);
879 right_operand = convert (best_type, right_operand);
881 else
882 gcc_unreachable ();
885 /* If we are comparing a fat pointer against zero, we need to
886 just compare the data pointer. */
887 else if (TYPE_FAT_POINTER_P (left_base_type)
888 && TREE_CODE (right_operand) == CONSTRUCTOR
889 && integer_zerop (VEC_index (constructor_elt,
890 CONSTRUCTOR_ELTS (right_operand),
891 0)->value))
893 right_operand = build_component_ref (left_operand, NULL_TREE,
894 TYPE_FIELDS (left_base_type),
895 false);
896 left_operand = convert (TREE_TYPE (right_operand),
897 integer_zero_node);
899 else
901 left_operand = convert (left_base_type, left_operand);
902 right_operand = convert (right_base_type, right_operand);
905 modulus = NULL_TREE;
906 break;
908 case PREINCREMENT_EXPR:
909 case PREDECREMENT_EXPR:
910 case POSTINCREMENT_EXPR:
911 case POSTDECREMENT_EXPR:
912 /* In these, the result type and the left operand type should be the
913 same. Do the operation in the base type of those and convert the
914 right operand (which is an integer) to that type.
916 Note that these operations are only used in loop control where
917 we guarantee that no overflow can occur. So nothing special need
918 be done for modular types. */
920 gcc_assert (left_type == result_type);
921 operation_type = get_base_type (result_type);
922 left_operand = convert (operation_type, left_operand);
923 right_operand = convert (operation_type, right_operand);
924 has_side_effects = true;
925 modulus = NULL_TREE;
926 break;
928 case LSHIFT_EXPR:
929 case RSHIFT_EXPR:
930 case LROTATE_EXPR:
931 case RROTATE_EXPR:
932 /* The RHS of a shift can be any type. Also, ignore any modulus
933 (we used to abort, but this is needed for unchecked conversion
934 to modular types). Otherwise, processing is the same as normal. */
935 gcc_assert (operation_type == left_base_type);
936 modulus = NULL_TREE;
937 left_operand = convert (operation_type, left_operand);
938 break;
940 case TRUTH_ANDIF_EXPR:
941 case TRUTH_ORIF_EXPR:
942 case TRUTH_AND_EXPR:
943 case TRUTH_OR_EXPR:
944 case TRUTH_XOR_EXPR:
945 left_operand = gnat_truthvalue_conversion (left_operand);
946 right_operand = gnat_truthvalue_conversion (right_operand);
947 goto common;
949 case BIT_AND_EXPR:
950 case BIT_IOR_EXPR:
951 case BIT_XOR_EXPR:
952 /* For binary modulus, if the inputs are in range, so are the
953 outputs. */
954 if (modulus && integer_pow2p (modulus))
955 modulus = NULL_TREE;
957 goto common;
959 case COMPLEX_EXPR:
960 gcc_assert (TREE_TYPE (result_type) == left_base_type
961 && TREE_TYPE (result_type) == right_base_type);
962 left_operand = convert (left_base_type, left_operand);
963 right_operand = convert (right_base_type, right_operand);
964 break;
966 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
967 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
968 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
969 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
970 /* These always produce results lower than either operand. */
971 modulus = NULL_TREE;
972 goto common;
974 default:
975 common:
976 /* The result type should be the same as the base types of the
977 both operands (and they should be the same). Convert
978 everything to the result type. */
980 gcc_assert (operation_type == left_base_type
981 && left_base_type == right_base_type);
982 left_operand = convert (operation_type, left_operand);
983 right_operand = convert (operation_type, right_operand);
986 if (modulus && !integer_pow2p (modulus))
988 result = nonbinary_modular_operation (op_code, operation_type,
989 left_operand, right_operand);
990 modulus = NULL_TREE;
992 /* If either operand is a NULL_EXPR, just return a new one. */
993 else if (TREE_CODE (left_operand) == NULL_EXPR)
994 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
995 else if (TREE_CODE (right_operand) == NULL_EXPR)
996 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
997 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
998 result = fold (build4 (op_code, operation_type, left_operand,
999 right_operand, NULL_TREE, NULL_TREE));
1000 else
1001 result
1002 = fold (build2 (op_code, operation_type, left_operand, right_operand));
1004 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1005 TREE_CONSTANT (result)
1006 |= (TREE_CONSTANT (left_operand) & TREE_CONSTANT (right_operand)
1007 && op_code != ARRAY_REF && op_code != ARRAY_RANGE_REF);
1009 if ((op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1010 && TYPE_VOLATILE (operation_type))
1011 TREE_THIS_VOLATILE (result) = 1;
1013 /* If we are working with modular types, perform the MOD operation
1014 if something above hasn't eliminated the need for it. */
1015 if (modulus)
1016 result = fold (build2 (FLOOR_MOD_EXPR, operation_type, result,
1017 convert (operation_type, modulus)));
1019 if (result_type && result_type != operation_type)
1020 result = convert (result_type, result);
1022 return result;
1025 /* Similar, but for unary operations. */
1027 tree
1028 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1030 tree type = TREE_TYPE (operand);
1031 tree base_type = get_base_type (type);
1032 tree operation_type = result_type;
1033 tree result;
1034 bool side_effects = false;
1036 if (operation_type
1037 && TREE_CODE (operation_type) == RECORD_TYPE
1038 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1039 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1041 if (operation_type
1042 && !AGGREGATE_TYPE_P (operation_type)
1043 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1044 operation_type = get_base_type (operation_type);
1046 switch (op_code)
1048 case REALPART_EXPR:
1049 case IMAGPART_EXPR:
1050 if (!operation_type)
1051 result_type = operation_type = TREE_TYPE (type);
1052 else
1053 gcc_assert (result_type == TREE_TYPE (type));
1055 result = fold (build1 (op_code, operation_type, operand));
1056 break;
1058 case TRUTH_NOT_EXPR:
1059 gcc_assert (result_type == base_type);
1060 result = invert_truthvalue (gnat_truthvalue_conversion (operand));
1061 break;
1063 case ATTR_ADDR_EXPR:
1064 case ADDR_EXPR:
1065 switch (TREE_CODE (operand))
1067 case INDIRECT_REF:
1068 case UNCONSTRAINED_ARRAY_REF:
1069 result = TREE_OPERAND (operand, 0);
1071 /* Make sure the type here is a pointer, not a reference.
1072 GCC wants pointer types for function addresses. */
1073 if (!result_type)
1074 result_type = build_pointer_type (type);
1075 break;
1077 case NULL_EXPR:
1078 result = operand;
1079 TREE_TYPE (result) = type = build_pointer_type (type);
1080 break;
1082 case ARRAY_REF:
1083 case ARRAY_RANGE_REF:
1084 case COMPONENT_REF:
1085 case BIT_FIELD_REF:
1086 /* If this is for 'Address, find the address of the prefix and
1087 add the offset to the field. Otherwise, do this the normal
1088 way. */
1089 if (op_code == ATTR_ADDR_EXPR)
1091 HOST_WIDE_INT bitsize;
1092 HOST_WIDE_INT bitpos;
1093 tree offset, inner;
1094 enum machine_mode mode;
1095 int unsignedp, volatilep;
1097 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1098 &mode, &unsignedp, &volatilep,
1099 false);
1101 /* If INNER is a padding type whose field has a self-referential
1102 size, convert to that inner type. We know the offset is zero
1103 and we need to have that type visible. */
1104 if (TREE_CODE (TREE_TYPE (inner)) == RECORD_TYPE
1105 && TYPE_IS_PADDING_P (TREE_TYPE (inner))
1106 && (CONTAINS_PLACEHOLDER_P
1107 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1108 (TREE_TYPE (inner)))))))
1109 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1110 inner);
1112 /* Compute the offset as a byte offset from INNER. */
1113 if (!offset)
1114 offset = size_zero_node;
1116 if (bitpos % BITS_PER_UNIT != 0)
1117 post_error
1118 ("taking address of object not aligned on storage unit?",
1119 error_gnat_node);
1121 offset = size_binop (PLUS_EXPR, offset,
1122 size_int (bitpos / BITS_PER_UNIT));
1124 /* Take the address of INNER, convert the offset to void *, and
1125 add then. It will later be converted to the desired result
1126 type, if any. */
1127 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1128 inner = convert (ptr_void_type_node, inner);
1129 offset = convert (ptr_void_type_node, offset);
1130 result = build_binary_op (PLUS_EXPR, ptr_void_type_node,
1131 inner, offset);
1132 result = convert (build_pointer_type (TREE_TYPE (operand)),
1133 result);
1134 break;
1136 goto common;
1138 case CONSTRUCTOR:
1139 /* If this is just a constructor for a padded record, we can
1140 just take the address of the single field and convert it to
1141 a pointer to our type. */
1142 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1144 result = VEC_index (constructor_elt,
1145 CONSTRUCTOR_ELTS (operand),
1146 0)->value;
1147 result
1148 = build_unary_op (ADDR_EXPR, NULL_TREE, result);
1149 result = convert (build_pointer_type (TREE_TYPE (operand)),
1150 result);
1151 break;
1154 goto common;
1156 case NOP_EXPR:
1157 if (AGGREGATE_TYPE_P (type)
1158 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1159 return build_unary_op (ADDR_EXPR, result_type,
1160 TREE_OPERAND (operand, 0));
1162 /* If this NOP_EXPR doesn't change the mode, get the result type
1163 from this type and go down. We need to do this in case
1164 this is a conversion of a CONST_DECL. */
1165 if (TYPE_MODE (type) != BLKmode
1166 && (TYPE_MODE (type)
1167 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0)))))
1168 return build_unary_op (ADDR_EXPR,
1169 (result_type ? result_type
1170 : build_pointer_type (type)),
1171 TREE_OPERAND (operand, 0));
1172 goto common;
1174 case CONST_DECL:
1175 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1177 /* ... fall through ... */
1179 default:
1180 common:
1182 /* If we are taking the address of a padded record whose field is
1183 contains a template, take the address of the template. */
1184 if (TREE_CODE (type) == RECORD_TYPE
1185 && TYPE_IS_PADDING_P (type)
1186 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1187 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1189 type = TREE_TYPE (TYPE_FIELDS (type));
1190 operand = convert (type, operand);
1193 if (type != error_mark_node)
1194 operation_type = build_pointer_type (type);
1196 gnat_mark_addressable (operand);
1197 result = fold (build1 (ADDR_EXPR, operation_type, operand));
1200 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1201 break;
1203 case INDIRECT_REF:
1204 /* If we want to refer to an entire unconstrained array,
1205 make up an expression to do so. This will never survive to
1206 the backend. If TYPE is a thin pointer, first convert the
1207 operand to a fat pointer. */
1208 if (TYPE_THIN_POINTER_P (type)
1209 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
1211 operand
1212 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1213 operand);
1214 type = TREE_TYPE (operand);
1217 if (TYPE_FAT_POINTER_P (type))
1219 result = build1 (UNCONSTRAINED_ARRAY_REF,
1220 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1221 TREE_READONLY (result) = TREE_STATIC (result)
1222 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1224 else if (TREE_CODE (operand) == ADDR_EXPR)
1225 result = TREE_OPERAND (operand, 0);
1227 else
1229 result = fold (build1 (op_code, TREE_TYPE (type), operand));
1230 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1233 side_effects
1234 = (!TYPE_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)));
1235 break;
1237 case NEGATE_EXPR:
1238 case BIT_NOT_EXPR:
1240 tree modulus = ((operation_type
1241 && TREE_CODE (operation_type) == INTEGER_TYPE
1242 && TYPE_MODULAR_P (operation_type))
1243 ? TYPE_MODULUS (operation_type) : 0);
1244 int mod_pow2 = modulus && integer_pow2p (modulus);
1246 /* If this is a modular type, there are various possibilities
1247 depending on the operation and whether the modulus is a
1248 power of two or not. */
1250 if (modulus)
1252 gcc_assert (operation_type == base_type);
1253 operand = convert (operation_type, operand);
1255 /* The fastest in the negate case for binary modulus is
1256 the straightforward code; the TRUNC_MOD_EXPR below
1257 is an AND operation. */
1258 if (op_code == NEGATE_EXPR && mod_pow2)
1259 result = fold (build2 (TRUNC_MOD_EXPR, operation_type,
1260 fold (build1 (NEGATE_EXPR, operation_type,
1261 operand)),
1262 modulus));
1264 /* For nonbinary negate case, return zero for zero operand,
1265 else return the modulus minus the operand. If the modulus
1266 is a power of two minus one, we can do the subtraction
1267 as an XOR since it is equivalent and faster on most machines. */
1268 else if (op_code == NEGATE_EXPR && !mod_pow2)
1270 if (integer_pow2p (fold (build2 (PLUS_EXPR, operation_type,
1271 modulus,
1272 convert (operation_type,
1273 integer_one_node)))))
1274 result = fold (build2 (BIT_XOR_EXPR, operation_type,
1275 operand, modulus));
1276 else
1277 result = fold (build2 (MINUS_EXPR, operation_type,
1278 modulus, operand));
1280 result = fold (build3 (COND_EXPR, operation_type,
1281 fold (build2 (NE_EXPR,
1282 integer_type_node,
1283 operand,
1284 convert
1285 (operation_type,
1286 integer_zero_node))),
1287 result, operand));
1289 else
1291 /* For the NOT cases, we need a constant equal to
1292 the modulus minus one. For a binary modulus, we
1293 XOR against the constant and subtract the operand from
1294 that constant for nonbinary modulus. */
1296 tree cnst = fold (build2 (MINUS_EXPR, operation_type, modulus,
1297 convert (operation_type,
1298 integer_one_node)));
1300 if (mod_pow2)
1301 result = fold (build2 (BIT_XOR_EXPR, operation_type,
1302 operand, cnst));
1303 else
1304 result = fold (build2 (MINUS_EXPR, operation_type,
1305 cnst, operand));
1308 break;
1312 /* ... fall through ... */
1314 default:
1315 gcc_assert (operation_type == base_type);
1316 result = fold (build1 (op_code, operation_type, convert (operation_type,
1317 operand)));
1320 if (side_effects)
1322 TREE_SIDE_EFFECTS (result) = 1;
1323 if (TREE_CODE (result) == INDIRECT_REF)
1324 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1327 if (result_type && TREE_TYPE (result) != result_type)
1328 result = convert (result_type, result);
1330 return result;
1333 /* Similar, but for COND_EXPR. */
1335 tree
1336 build_cond_expr (tree result_type, tree condition_operand,
1337 tree true_operand, tree false_operand)
1339 tree result;
1340 bool addr_p = false;
1342 /* The front-end verifies that result, true and false operands have same base
1343 type. Convert everything to the result type. */
1345 true_operand = convert (result_type, true_operand);
1346 false_operand = convert (result_type, false_operand);
1348 /* If the result type is unconstrained, take the address of
1349 the operands and then dereference our result. */
1350 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1351 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1353 addr_p = true;
1354 result_type = build_pointer_type (result_type);
1355 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1356 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1359 result = fold (build3 (COND_EXPR, result_type, condition_operand,
1360 true_operand, false_operand));
1362 /* If either operand is a SAVE_EXPR (possibly surrounded by
1363 arithmetic, make sure it gets done. */
1364 true_operand = skip_simple_arithmetic (true_operand);
1365 false_operand = skip_simple_arithmetic (false_operand);
1367 if (TREE_CODE (true_operand) == SAVE_EXPR)
1368 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1370 if (TREE_CODE (false_operand) == SAVE_EXPR)
1371 result = build2 (COMPOUND_EXPR, result_type, false_operand, result);
1373 /* ??? Seems the code above is wrong, as it may move ahead of the COND
1374 SAVE_EXPRs with side effects and not shared by both arms. */
1376 if (addr_p)
1377 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1379 return result;
1383 /* Build a CALL_EXPR to call FUNDECL with one argument, ARG. Return
1384 the CALL_EXPR. */
1386 tree
1387 build_call_1_expr (tree fundecl, tree arg)
1389 tree call = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1390 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1391 chainon (NULL_TREE, build_tree_list (NULL_TREE, arg)),
1392 NULL_TREE);
1394 TREE_SIDE_EFFECTS (call) = 1;
1396 return call;
1399 /* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2. Return
1400 the CALL_EXPR. */
1402 tree
1403 build_call_2_expr (tree fundecl, tree arg1, tree arg2)
1405 tree call = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1406 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1407 chainon (chainon (NULL_TREE,
1408 build_tree_list (NULL_TREE, arg1)),
1409 build_tree_list (NULL_TREE, arg2)),
1410 NULL_TREE);
1412 TREE_SIDE_EFFECTS (call) = 1;
1414 return call;
1417 /* Likewise to call FUNDECL with no arguments. */
1419 tree
1420 build_call_0_expr (tree fundecl)
1422 tree call = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1423 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1424 NULL_TREE, NULL_TREE);
1426 TREE_SIDE_EFFECTS (call) = 1;
1428 return call;
1431 /* Call a function that raises an exception and pass the line number and file
1432 name, if requested. MSG says which exception function to call. */
1434 tree
1435 build_call_raise (int msg)
1437 tree fndecl = gnat_raise_decls[msg];
1438 const char *str
1439 = (Debug_Flag_NN || Exception_Locations_Suppressed) ? "" : ref_filename;
1440 int len = strlen (str) + 1;
1441 tree filename = build_string (len, str);
1443 TREE_TYPE (filename)
1444 = build_array_type (char_type_node,
1445 build_index_type (build_int_cst (NULL_TREE, len)));
1447 return
1448 build_call_2_expr (fndecl,
1449 build1 (ADDR_EXPR, build_pointer_type (char_type_node),
1450 filename),
1451 build_int_cst (NULL_TREE, input_line));
1454 /* Return a CONSTRUCTOR of TYPE whose list is LIST. */
1456 tree
1457 gnat_build_constructor (tree type, tree list)
1459 tree elmt;
1460 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1461 bool side_effects = false;
1462 tree result;
1464 for (elmt = list; elmt; elmt = TREE_CHAIN (elmt))
1466 if (!TREE_CONSTANT (TREE_VALUE (elmt))
1467 || (TREE_CODE (type) == RECORD_TYPE
1468 && DECL_BIT_FIELD (TREE_PURPOSE (elmt))
1469 && TREE_CODE (TREE_VALUE (elmt)) != INTEGER_CST)
1470 || !initializer_constant_valid_p (TREE_VALUE (elmt),
1471 TREE_TYPE (TREE_VALUE (elmt))))
1472 allconstant = false;
1474 if (TREE_SIDE_EFFECTS (TREE_VALUE (elmt)))
1475 side_effects = true;
1477 /* Propagate an NULL_EXPR from the size of the type. We won't ever
1478 be executing the code we generate here in that case, but handle it
1479 specially to avoid the cmpiler blowing up. */
1480 if (TREE_CODE (type) == RECORD_TYPE
1481 && (0 != (result
1482 = contains_null_expr (DECL_SIZE (TREE_PURPOSE (elmt))))))
1483 return build1 (NULL_EXPR, type, TREE_OPERAND (result, 0));
1486 /* If TYPE is a RECORD_TYPE and the fields are not in the
1487 same order as their bit position, don't treat this as constant
1488 since varasm.c can't handle it. */
1489 if (allconstant && TREE_CODE (type) == RECORD_TYPE)
1491 tree last_pos = bitsize_zero_node;
1492 tree field;
1494 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1496 tree this_pos = bit_position (field);
1498 if (TREE_CODE (this_pos) != INTEGER_CST
1499 || tree_int_cst_lt (this_pos, last_pos))
1501 allconstant = false;
1502 break;
1505 last_pos = this_pos;
1509 result = build_constructor_from_list (type, list);
1510 TREE_CONSTANT (result) = TREE_INVARIANT (result)
1511 = TREE_STATIC (result) = allconstant;
1512 TREE_SIDE_EFFECTS (result) = side_effects;
1513 TREE_READONLY (result) = TYPE_READONLY (type) || allconstant;
1514 return result;
1517 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1518 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1519 for the field. Don't fold the result if NO_FOLD_P is true.
1521 We also handle the fact that we might have been passed a pointer to the
1522 actual record and know how to look for fields in variant parts. */
1524 static tree
1525 build_simple_component_ref (tree record_variable, tree component,
1526 tree field, bool no_fold_p)
1528 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1529 tree ref;
1531 gcc_assert ((TREE_CODE (record_type) == RECORD_TYPE
1532 || TREE_CODE (record_type) == UNION_TYPE
1533 || TREE_CODE (record_type) == QUAL_UNION_TYPE)
1534 && TYPE_SIZE (record_type)
1535 && (component != 0) != (field != 0));
1537 /* If no field was specified, look for a field with the specified name
1538 in the current record only. */
1539 if (!field)
1540 for (field = TYPE_FIELDS (record_type); field;
1541 field = TREE_CHAIN (field))
1542 if (DECL_NAME (field) == component)
1543 break;
1545 if (!field)
1546 return NULL_TREE;
1548 /* If this field is not in the specified record, see if we can find
1549 something in the record whose original field is the same as this one. */
1550 if (DECL_CONTEXT (field) != record_type)
1551 /* Check if there is a field with name COMPONENT in the record. */
1553 tree new_field;
1555 /* First loop thru normal components. */
1557 for (new_field = TYPE_FIELDS (record_type); new_field;
1558 new_field = TREE_CHAIN (new_field))
1559 if (DECL_ORIGINAL_FIELD (new_field) == field
1560 || new_field == DECL_ORIGINAL_FIELD (field)
1561 || (DECL_ORIGINAL_FIELD (field)
1562 && (DECL_ORIGINAL_FIELD (field)
1563 == DECL_ORIGINAL_FIELD (new_field))))
1564 break;
1566 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1567 the component in the first search. Doing this search in 2 steps
1568 is required to avoiding hidden homonymous fields in the
1569 _Parent field. */
1571 if (!new_field)
1572 for (new_field = TYPE_FIELDS (record_type); new_field;
1573 new_field = TREE_CHAIN (new_field))
1574 if (DECL_INTERNAL_P (new_field))
1576 tree field_ref
1577 = build_simple_component_ref (record_variable,
1578 NULL_TREE, new_field, no_fold_p);
1579 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1580 no_fold_p);
1582 if (ref)
1583 return ref;
1586 field = new_field;
1589 if (!field)
1590 return NULL_TREE;
1592 /* It would be nice to call "fold" here, but that can lose a type
1593 we need to tag a PLACEHOLDER_EXPR with, so we can't do it. */
1594 ref = build3 (COMPONENT_REF, TREE_TYPE (field), record_variable, field,
1595 NULL_TREE);
1597 if (TREE_READONLY (record_variable) || TREE_READONLY (field))
1598 TREE_READONLY (ref) = 1;
1599 if (TREE_THIS_VOLATILE (record_variable) || TREE_THIS_VOLATILE (field)
1600 || TYPE_VOLATILE (record_type))
1601 TREE_THIS_VOLATILE (ref) = 1;
1603 return no_fold_p ? ref : fold (ref);
1606 /* Like build_simple_component_ref, except that we give an error if the
1607 reference could not be found. */
1609 tree
1610 build_component_ref (tree record_variable, tree component,
1611 tree field, bool no_fold_p)
1613 tree ref = build_simple_component_ref (record_variable, component, field,
1614 no_fold_p);
1616 if (ref)
1617 return ref;
1619 /* If FIELD was specified, assume this is an invalid user field so
1620 raise constraint error. Otherwise, we can't find the type to return, so
1621 abort. */
1622 gcc_assert (field);
1623 return build1 (NULL_EXPR, TREE_TYPE (field),
1624 build_call_raise (CE_Discriminant_Check_Failed));
1627 /* Build a GCC tree to call an allocation or deallocation function.
1628 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
1629 generate an allocator.
1631 GNU_SIZE is the size of the object in bytes and ALIGN is the alignment in
1632 bits. GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the
1633 storage pool to use. If not preset, malloc and free will be used except
1634 if GNAT_PROC is the "fake" value of -1, in which case we allocate the
1635 object dynamically on the stack frame. */
1637 tree
1638 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, unsigned align,
1639 Entity_Id gnat_proc, Entity_Id gnat_pool,
1640 Node_Id gnat_node)
1642 tree gnu_align = size_int (align / BITS_PER_UNIT);
1644 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
1646 if (Present (gnat_proc))
1648 /* The storage pools are obviously always tagged types, but the
1649 secondary stack uses the same mechanism and is not tagged */
1650 if (Is_Tagged_Type (Etype (gnat_pool)))
1652 /* The size is the third parameter; the alignment is the
1653 same type. */
1654 Entity_Id gnat_size_type
1655 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1656 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1657 tree gnu_proc = gnat_to_gnu (gnat_proc);
1658 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1659 tree gnu_pool = gnat_to_gnu (gnat_pool);
1660 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1661 tree gnu_args = NULL_TREE;
1662 tree gnu_call;
1664 /* The first arg is always the address of the storage pool; next
1665 comes the address of the object, for a deallocator, then the
1666 size and alignment. */
1667 gnu_args
1668 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_pool_addr));
1670 if (gnu_obj)
1671 gnu_args
1672 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_obj));
1674 gnu_args
1675 = chainon (gnu_args,
1676 build_tree_list (NULL_TREE,
1677 convert (gnu_size_type, gnu_size)));
1678 gnu_args
1679 = chainon (gnu_args,
1680 build_tree_list (NULL_TREE,
1681 convert (gnu_size_type, gnu_align)));
1683 gnu_call = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (gnu_proc)),
1684 gnu_proc_addr, gnu_args, NULL_TREE);
1685 TREE_SIDE_EFFECTS (gnu_call) = 1;
1686 return gnu_call;
1689 /* Secondary stack case. */
1690 else
1692 /* The size is the second parameter */
1693 Entity_Id gnat_size_type
1694 = Etype (Next_Formal (First_Formal (gnat_proc)));
1695 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1696 tree gnu_proc = gnat_to_gnu (gnat_proc);
1697 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1698 tree gnu_args = NULL_TREE;
1699 tree gnu_call;
1701 /* The first arg is the address of the object, for a
1702 deallocator, then the size */
1703 if (gnu_obj)
1704 gnu_args
1705 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_obj));
1707 gnu_args
1708 = chainon (gnu_args,
1709 build_tree_list (NULL_TREE,
1710 convert (gnu_size_type, gnu_size)));
1712 gnu_call = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (gnu_proc)),
1713 gnu_proc_addr, gnu_args, NULL_TREE);
1714 TREE_SIDE_EFFECTS (gnu_call) = 1;
1715 return gnu_call;
1719 else if (gnu_obj)
1720 return build_call_1_expr (free_decl, gnu_obj);
1722 /* ??? For now, disable variable-sized allocators in the stack since
1723 we can't yet gimplify an ALLOCATE_EXPR. */
1724 else if (gnat_pool == -1
1725 && TREE_CODE (gnu_size) == INTEGER_CST && !flag_stack_check)
1727 /* If the size is a constant, we can put it in the fixed portion of
1728 the stack frame to avoid the need to adjust the stack pointer. */
1729 if (TREE_CODE (gnu_size) == INTEGER_CST && !flag_stack_check)
1731 tree gnu_range
1732 = build_range_type (NULL_TREE, size_one_node, gnu_size);
1733 tree gnu_array_type = build_array_type (char_type_node, gnu_range);
1734 tree gnu_decl
1735 = create_var_decl (get_identifier ("RETVAL"), NULL_TREE,
1736 gnu_array_type, NULL_TREE, false, false, false,
1737 false, NULL, gnat_node);
1739 return convert (ptr_void_type_node,
1740 build_unary_op (ADDR_EXPR, NULL_TREE, gnu_decl));
1742 else
1743 gcc_unreachable ();
1744 #if 0
1745 return build2 (ALLOCATE_EXPR, ptr_void_type_node, gnu_size, gnu_align);
1746 #endif
1748 else
1750 if (Nkind (gnat_node) != N_Allocator || !Comes_From_Source (gnat_node))
1751 Check_No_Implicit_Heap_Alloc (gnat_node);
1752 return build_call_1_expr (malloc_decl, gnu_size);
1756 /* Build a GCC tree to correspond to allocating an object of TYPE whose
1757 initial value is INIT, if INIT is nonzero. Convert the expression to
1758 RESULT_TYPE, which must be some type of pointer. Return the tree.
1759 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
1760 the storage pool to use. GNAT_NODE is used to provide an error
1761 location for restriction violations messages. If IGNORE_INIT_TYPE is
1762 true, ignore the type of INIT for the purpose of determining the size;
1763 this will cause the maximum size to be allocated if TYPE is of
1764 self-referential size. */
1766 tree
1767 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
1768 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
1770 tree size = TYPE_SIZE_UNIT (type);
1771 tree result;
1773 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
1774 if (init && TREE_CODE (init) == NULL_EXPR)
1775 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
1777 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
1778 sizes of the object and its template. Allocate the whole thing and
1779 fill in the parts that are known. */
1780 else if (TYPE_FAT_OR_THIN_POINTER_P (result_type))
1782 tree template_type
1783 = (TYPE_FAT_POINTER_P (result_type)
1784 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (result_type))))
1785 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (result_type))));
1786 tree storage_type
1787 = build_unc_object_type (template_type, type,
1788 get_identifier ("ALLOC"));
1789 tree storage_ptr_type = build_pointer_type (storage_type);
1790 tree storage;
1791 tree template_cons = NULL_TREE;
1793 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
1794 init);
1796 /* If the size overflows, pass -1 so the allocator will raise
1797 storage error. */
1798 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1799 size = ssize_int (-1);
1801 storage = build_call_alloc_dealloc (NULL_TREE, size,
1802 TYPE_ALIGN (storage_type),
1803 gnat_proc, gnat_pool, gnat_node);
1804 storage = convert (storage_ptr_type, protect_multiple_eval (storage));
1806 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1808 type = TREE_TYPE (TYPE_FIELDS (type));
1810 if (init)
1811 init = convert (type, init);
1814 /* If there is an initializing expression, make a constructor for
1815 the entire object including the bounds and copy it into the
1816 object. If there is no initializing expression, just set the
1817 bounds. */
1818 if (init)
1820 template_cons = tree_cons (TREE_CHAIN (TYPE_FIELDS (storage_type)),
1821 init, NULL_TREE);
1822 template_cons = tree_cons (TYPE_FIELDS (storage_type),
1823 build_template (template_type, type,
1824 init),
1825 template_cons);
1827 return convert
1828 (result_type,
1829 build2 (COMPOUND_EXPR, storage_ptr_type,
1830 build_binary_op
1831 (MODIFY_EXPR, storage_type,
1832 build_unary_op (INDIRECT_REF, NULL_TREE,
1833 convert (storage_ptr_type, storage)),
1834 gnat_build_constructor (storage_type, template_cons)),
1835 convert (storage_ptr_type, storage)));
1837 else
1838 return build2
1839 (COMPOUND_EXPR, result_type,
1840 build_binary_op
1841 (MODIFY_EXPR, template_type,
1842 build_component_ref
1843 (build_unary_op (INDIRECT_REF, NULL_TREE,
1844 convert (storage_ptr_type, storage)),
1845 NULL_TREE, TYPE_FIELDS (storage_type), 0),
1846 build_template (template_type, type, NULL_TREE)),
1847 convert (result_type, convert (storage_ptr_type, storage)));
1850 /* If we have an initializing expression, see if its size is simpler
1851 than the size from the type. */
1852 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
1853 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
1854 || CONTAINS_PLACEHOLDER_P (size)))
1855 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
1857 /* If the size is still self-referential, reference the initializing
1858 expression, if it is present. If not, this must have been a
1859 call to allocate a library-level object, in which case we use
1860 the maximum size. */
1861 if (CONTAINS_PLACEHOLDER_P (size))
1863 if (!ignore_init_type && init)
1864 size = substitute_placeholder_in_expr (size, init);
1865 else
1866 size = max_size (size, true);
1869 /* If the size overflows, pass -1 so the allocator will raise
1870 storage error. */
1871 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1872 size = ssize_int (-1);
1874 /* If this is a type whose alignment is larger than the
1875 biggest we support in normal alignment and this is in
1876 the default storage pool, make an "aligning type", allocate
1877 it, point to the field we need, and return that. */
1878 if (TYPE_ALIGN (type) > BIGGEST_ALIGNMENT
1879 && No (gnat_proc))
1881 tree new_type = make_aligning_type (type, TYPE_ALIGN (type), size);
1883 result = build_call_alloc_dealloc (NULL_TREE, TYPE_SIZE_UNIT (new_type),
1884 BIGGEST_ALIGNMENT, Empty,
1885 Empty, gnat_node);
1886 result = save_expr (result);
1887 result = convert (build_pointer_type (new_type), result);
1888 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1889 result = build_component_ref (result, NULL_TREE,
1890 TYPE_FIELDS (new_type), 0);
1891 result = convert (result_type,
1892 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1894 else
1895 result = convert (result_type,
1896 build_call_alloc_dealloc (NULL_TREE, size,
1897 TYPE_ALIGN (type),
1898 gnat_proc,
1899 gnat_pool,
1900 gnat_node));
1902 /* If we have an initial value, put the new address into a SAVE_EXPR, assign
1903 the value, and return the address. Do this with a COMPOUND_EXPR. */
1905 if (init)
1907 result = save_expr (result);
1908 result
1909 = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1910 build_binary_op
1911 (MODIFY_EXPR, NULL_TREE,
1912 build_unary_op (INDIRECT_REF,
1913 TREE_TYPE (TREE_TYPE (result)), result),
1914 init),
1915 result);
1918 return convert (result_type, result);
1921 /* Fill in a VMS descriptor for EXPR and return a constructor for it.
1922 GNAT_FORMAL is how we find the descriptor record. */
1924 tree
1925 fill_vms_descriptor (tree expr, Entity_Id gnat_formal)
1927 tree record_type = TREE_TYPE (TREE_TYPE (get_gnu_tree (gnat_formal)));
1928 tree field;
1929 tree const_list = NULL_TREE;
1931 expr = maybe_unconstrained_array (expr);
1932 gnat_mark_addressable (expr);
1934 for (field = TYPE_FIELDS (record_type); field; field = TREE_CHAIN (field))
1935 const_list
1936 = tree_cons (field,
1937 convert (TREE_TYPE (field),
1938 SUBSTITUTE_PLACEHOLDER_IN_EXPR
1939 (DECL_INITIAL (field), expr)),
1940 const_list);
1942 return gnat_build_constructor (record_type, nreverse (const_list));
1945 /* Indicate that we need to make the address of EXPR_NODE and it therefore
1946 should not be allocated in a register. Returns true if successful. */
1948 bool
1949 gnat_mark_addressable (tree expr_node)
1951 while (1)
1952 switch (TREE_CODE (expr_node))
1954 case ADDR_EXPR:
1955 case COMPONENT_REF:
1956 case ARRAY_REF:
1957 case ARRAY_RANGE_REF:
1958 case REALPART_EXPR:
1959 case IMAGPART_EXPR:
1960 case VIEW_CONVERT_EXPR:
1961 case CONVERT_EXPR:
1962 case NON_LVALUE_EXPR:
1963 case NOP_EXPR:
1964 expr_node = TREE_OPERAND (expr_node, 0);
1965 break;
1967 case CONSTRUCTOR:
1968 TREE_ADDRESSABLE (expr_node) = 1;
1969 return true;
1971 case VAR_DECL:
1972 case PARM_DECL:
1973 case RESULT_DECL:
1974 TREE_ADDRESSABLE (expr_node) = 1;
1975 return true;
1977 case FUNCTION_DECL:
1978 TREE_ADDRESSABLE (expr_node) = 1;
1979 return true;
1981 case CONST_DECL:
1982 return (DECL_CONST_CORRESPONDING_VAR (expr_node)
1983 && (gnat_mark_addressable
1984 (DECL_CONST_CORRESPONDING_VAR (expr_node))));
1985 default:
1986 return true;