PR target/16201
[official-gcc.git] / gcc / ada / utils2.c
blob04ab0cb4ad0fcac0ae18b6df359babef3955a8eb
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2004, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
21 * *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 * *
25 ****************************************************************************/
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "tree.h"
32 #include "rtl.h"
33 #include "ggc.h"
34 #include "flags.h"
35 #include "output.h"
36 #include "ada.h"
37 #include "types.h"
38 #include "atree.h"
39 #include "stringt.h"
40 #include "uintp.h"
41 #include "fe.h"
42 #include "elists.h"
43 #include "nlists.h"
44 #include "sinfo.h"
45 #include "einfo.h"
46 #include "ada-tree.h"
47 #include "gigi.h"
49 static tree find_common_type (tree, tree);
50 static bool contains_save_expr_p (tree);
51 static tree contains_null_expr (tree);
52 static tree compare_arrays (tree, tree, tree);
53 static tree nonbinary_modular_operation (enum tree_code, tree, tree, tree);
54 static tree build_simple_component_ref (tree, tree, tree, bool);
56 /* Prepare expr to be an argument of a TRUTH_NOT_EXPR or other logical
57 operation.
59 This preparation consists of taking the ordinary representation of
60 an expression expr and producing a valid tree boolean expression
61 describing whether expr is nonzero. We could simply always do
63 build_binary_op (NE_EXPR, expr, integer_zero_node, 1),
65 but we optimize comparisons, &&, ||, and !.
67 The resulting type should always be the same as the input type.
68 This function is simpler than the corresponding C version since
69 the only possible operands will be things of Boolean type. */
71 tree
72 gnat_truthvalue_conversion (tree expr)
74 tree type = TREE_TYPE (expr);
76 switch (TREE_CODE (expr))
78 case EQ_EXPR: case NE_EXPR: case LE_EXPR: case GE_EXPR:
79 case LT_EXPR: case GT_EXPR:
80 case TRUTH_ANDIF_EXPR:
81 case TRUTH_ORIF_EXPR:
82 case TRUTH_AND_EXPR:
83 case TRUTH_OR_EXPR:
84 case TRUTH_XOR_EXPR:
85 case ERROR_MARK:
86 return expr;
88 case INTEGER_CST:
89 return (integer_zerop (expr) ? convert (type, integer_zero_node)
90 : convert (type, integer_one_node));
92 case REAL_CST:
93 return (real_zerop (expr) ? convert (type, integer_zero_node)
94 : convert (type, integer_one_node));
96 case COND_EXPR:
97 /* Distribute the conversion into the arms of a COND_EXPR. */
98 return fold
99 (build3 (COND_EXPR, type, TREE_OPERAND (expr, 0),
100 gnat_truthvalue_conversion (TREE_OPERAND (expr, 1)),
101 gnat_truthvalue_conversion (TREE_OPERAND (expr, 2))));
103 default:
104 return build_binary_op (NE_EXPR, type, expr,
105 convert (type, integer_zero_node));
109 /* Return the base type of TYPE. */
111 tree
112 get_base_type (tree type)
114 if (TREE_CODE (type) == RECORD_TYPE
115 && TYPE_JUSTIFIED_MODULAR_P (type))
116 type = TREE_TYPE (TYPE_FIELDS (type));
118 while (TREE_TYPE (type)
119 && (TREE_CODE (type) == INTEGER_TYPE
120 || TREE_CODE (type) == REAL_TYPE))
121 type = TREE_TYPE (type);
123 return type;
126 /* Likewise, but only return types known to the Ada source. */
127 tree
128 get_ada_base_type (tree type)
130 while (TREE_TYPE (type)
131 && (TREE_CODE (type) == INTEGER_TYPE
132 || TREE_CODE (type) == REAL_TYPE)
133 && !TYPE_EXTRA_SUBTYPE_P (type))
134 type = TREE_TYPE (type);
136 return type;
139 /* EXP is a GCC tree representing an address. See if we can find how
140 strictly the object at that address is aligned. Return that alignment
141 in bits. If we don't know anything about the alignment, return 0. */
143 unsigned int
144 known_alignment (tree exp)
146 unsigned int this_alignment;
147 unsigned int lhs, rhs;
148 unsigned int type_alignment;
150 /* For pointer expressions, we know that the designated object is always at
151 least as strictly aligned as the designated subtype, so we account for
152 both type and expression information in this case.
154 Beware that we can still get a dummy designated subtype here (e.g. Taft
155 Amendement types), in which the alignment information is meaningless and
156 should be ignored.
158 We always compute a type_alignment value and return the MAX of it
159 compared with what we get from the expression tree. Just set the
160 type_alignment value to 0 when the type information is to be ignored. */
161 type_alignment
162 = ((POINTER_TYPE_P (TREE_TYPE (exp))
163 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
164 ? TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))) : 0);
166 switch (TREE_CODE (exp))
168 case CONVERT_EXPR:
169 case NOP_EXPR:
170 case NON_LVALUE_EXPR:
171 /* Conversions between pointers and integers don't change the alignment
172 of the underlying object. */
173 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
174 break;
176 case PLUS_EXPR:
177 case MINUS_EXPR:
178 /* If two address are added, the alignment of the result is the
179 minimum of the two aligments. */
180 lhs = known_alignment (TREE_OPERAND (exp, 0));
181 rhs = known_alignment (TREE_OPERAND (exp, 1));
182 this_alignment = MIN (lhs, rhs);
183 break;
185 case INTEGER_CST:
186 /* The first part of this represents the lowest bit in the constant,
187 but is it in bytes, not bits. */
188 this_alignment
189 = MIN (BITS_PER_UNIT
190 * (TREE_INT_CST_LOW (exp) & - TREE_INT_CST_LOW (exp)),
191 BIGGEST_ALIGNMENT);
192 break;
194 case MULT_EXPR:
195 /* If we know the alignment of just one side, use it. Otherwise,
196 use the product of the alignments. */
197 lhs = known_alignment (TREE_OPERAND (exp, 0));
198 rhs = known_alignment (TREE_OPERAND (exp, 1));
200 if (lhs == 0 || rhs == 0)
201 this_alignment = MIN (BIGGEST_ALIGNMENT, MAX (lhs, rhs));
202 else
203 this_alignment = MIN (BIGGEST_ALIGNMENT, lhs * rhs);
204 break;
206 case ADDR_EXPR:
207 this_alignment = expr_align (TREE_OPERAND (exp, 0));
208 break;
210 default:
211 this_alignment = 0;
212 break;
215 return MAX (type_alignment, this_alignment);
218 /* We have a comparison or assignment operation on two types, T1 and T2,
219 which are both either array types or both record types.
220 Return the type that both operands should be converted to, if any.
221 Otherwise return zero. */
223 static tree
224 find_common_type (tree t1, tree t2)
226 /* If either type is non-BLKmode, use it. Note that we know that we will
227 not have any alignment problems since if we did the non-BLKmode
228 type could not have been used. */
229 if (TYPE_MODE (t1) != BLKmode)
230 return t1;
231 else if (TYPE_MODE (t2) != BLKmode)
232 return t2;
234 /* Otherwise, return the type that has a constant size. */
235 if (TREE_CONSTANT (TYPE_SIZE (t1)))
236 return t1;
237 else if (TREE_CONSTANT (TYPE_SIZE (t2)))
238 return t2;
240 /* In this case, both types have variable size. It's probably
241 best to leave the "type mismatch" because changing it could
242 case a bad self-referential reference. */
243 return 0;
246 /* See if EXP contains a SAVE_EXPR in a position where we would
247 normally put it.
249 ??? This is a real kludge, but is probably the best approach short
250 of some very general solution. */
252 static bool
253 contains_save_expr_p (tree exp)
255 switch (TREE_CODE (exp))
257 case SAVE_EXPR:
258 return true;
260 case ADDR_EXPR: case INDIRECT_REF:
261 case COMPONENT_REF:
262 case NOP_EXPR: case CONVERT_EXPR: case VIEW_CONVERT_EXPR:
263 return contains_save_expr_p (TREE_OPERAND (exp, 0));
265 case CONSTRUCTOR:
266 return (CONSTRUCTOR_ELTS (exp)
267 && contains_save_expr_p (CONSTRUCTOR_ELTS (exp)));
269 case TREE_LIST:
270 return (contains_save_expr_p (TREE_VALUE (exp))
271 || (TREE_CHAIN (exp)
272 && contains_save_expr_p (TREE_CHAIN (exp))));
274 default:
275 return false;
279 /* See if EXP contains a NULL_EXPR in an expression we use for sizes. Return
280 it if so. This is used to detect types whose sizes involve computations
281 that are known to raise Constraint_Error. */
283 static tree
284 contains_null_expr (tree exp)
286 tree tem;
288 if (TREE_CODE (exp) == NULL_EXPR)
289 return exp;
291 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
293 case tcc_unary:
294 return contains_null_expr (TREE_OPERAND (exp, 0));
296 case tcc_comparison:
297 case tcc_binary:
298 tem = contains_null_expr (TREE_OPERAND (exp, 0));
299 if (tem)
300 return tem;
302 return contains_null_expr (TREE_OPERAND (exp, 1));
304 case tcc_expression:
305 switch (TREE_CODE (exp))
307 case SAVE_EXPR:
308 return contains_null_expr (TREE_OPERAND (exp, 0));
310 case COND_EXPR:
311 tem = contains_null_expr (TREE_OPERAND (exp, 0));
312 if (tem)
313 return tem;
315 tem = contains_null_expr (TREE_OPERAND (exp, 1));
316 if (tem)
317 return tem;
319 return contains_null_expr (TREE_OPERAND (exp, 2));
321 default:
322 return 0;
325 default:
326 return 0;
330 /* Return an expression tree representing an equality comparison of
331 A1 and A2, two objects of ARRAY_TYPE. The returned expression should
332 be of type RESULT_TYPE
334 Two arrays are equal in one of two ways: (1) if both have zero length
335 in some dimension (not necessarily the same dimension) or (2) if the
336 lengths in each dimension are equal and the data is equal. We perform the
337 length tests in as efficient a manner as possible. */
339 static tree
340 compare_arrays (tree result_type, tree a1, tree a2)
342 tree t1 = TREE_TYPE (a1);
343 tree t2 = TREE_TYPE (a2);
344 tree result = convert (result_type, integer_one_node);
345 tree a1_is_null = convert (result_type, integer_zero_node);
346 tree a2_is_null = convert (result_type, integer_zero_node);
347 bool length_zero_p = false;
349 /* Process each dimension separately and compare the lengths. If any
350 dimension has a size known to be zero, set SIZE_ZERO_P to 1 to
351 suppress the comparison of the data. */
352 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
354 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
355 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
356 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
357 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
358 tree bt = get_base_type (TREE_TYPE (lb1));
359 tree length1 = fold (build2 (MINUS_EXPR, bt, ub1, lb1));
360 tree length2 = fold (build2 (MINUS_EXPR, bt, ub2, lb2));
361 tree nbt;
362 tree tem;
363 tree comparison, this_a1_is_null, this_a2_is_null;
365 /* If the length of the first array is a constant, swap our operands
366 unless the length of the second array is the constant zero.
367 Note that we have set the `length' values to the length - 1. */
368 if (TREE_CODE (length1) == INTEGER_CST
369 && !integer_zerop (fold (build2 (PLUS_EXPR, bt, length2,
370 convert (bt, integer_one_node)))))
372 tem = a1, a1 = a2, a2 = tem;
373 tem = t1, t1 = t2, t2 = tem;
374 tem = lb1, lb1 = lb2, lb2 = tem;
375 tem = ub1, ub1 = ub2, ub2 = tem;
376 tem = length1, length1 = length2, length2 = tem;
377 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
380 /* If the length of this dimension in the second array is the constant
381 zero, we can just go inside the original bounds for the first
382 array and see if last < first. */
383 if (integer_zerop (fold (build2 (PLUS_EXPR, bt, length2,
384 convert (bt, integer_one_node)))))
386 tree ub = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
387 tree lb = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
389 comparison = build_binary_op (LT_EXPR, result_type, ub, lb);
390 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
391 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
393 length_zero_p = true;
394 this_a1_is_null = comparison;
395 this_a2_is_null = convert (result_type, integer_one_node);
398 /* If the length is some other constant value, we know that the
399 this dimension in the first array cannot be superflat, so we
400 can just use its length from the actual stored bounds. */
401 else if (TREE_CODE (length2) == INTEGER_CST)
403 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
404 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
405 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
406 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
407 nbt = get_base_type (TREE_TYPE (ub1));
409 comparison
410 = build_binary_op (EQ_EXPR, result_type,
411 build_binary_op (MINUS_EXPR, nbt, ub1, lb1),
412 build_binary_op (MINUS_EXPR, nbt, ub2, lb2));
414 /* Note that we know that UB2 and LB2 are constant and hence
415 cannot contain a PLACEHOLDER_EXPR. */
417 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
418 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
420 this_a1_is_null = build_binary_op (LT_EXPR, result_type, ub1, lb1);
421 this_a2_is_null = convert (result_type, integer_zero_node);
424 /* Otherwise compare the computed lengths. */
425 else
427 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
428 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
430 comparison
431 = build_binary_op (EQ_EXPR, result_type, length1, length2);
433 this_a1_is_null
434 = build_binary_op (LT_EXPR, result_type, length1,
435 convert (bt, integer_zero_node));
436 this_a2_is_null
437 = build_binary_op (LT_EXPR, result_type, length2,
438 convert (bt, integer_zero_node));
441 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
442 result, comparison);
444 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
445 this_a1_is_null, a1_is_null);
446 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
447 this_a2_is_null, a2_is_null);
449 t1 = TREE_TYPE (t1);
450 t2 = TREE_TYPE (t2);
453 /* Unless the size of some bound is known to be zero, compare the
454 data in the array. */
455 if (!length_zero_p)
457 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
459 if (type)
460 a1 = convert (type, a1), a2 = convert (type, a2);
462 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result,
463 fold (build2 (EQ_EXPR, result_type, a1, a2)));
467 /* The result is also true if both sizes are zero. */
468 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
469 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
470 a1_is_null, a2_is_null),
471 result);
473 /* If either operand contains SAVE_EXPRs, they have to be evaluated before
474 starting the comparison above since the place it would be otherwise
475 evaluated would be wrong. */
477 if (contains_save_expr_p (a1))
478 result = build2 (COMPOUND_EXPR, result_type, a1, result);
480 if (contains_save_expr_p (a2))
481 result = build2 (COMPOUND_EXPR, result_type, a2, result);
483 return result;
486 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
487 type TYPE. We know that TYPE is a modular type with a nonbinary
488 modulus. */
490 static tree
491 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
492 tree rhs)
494 tree modulus = TYPE_MODULUS (type);
495 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
496 unsigned int precision;
497 bool unsignedp = true;
498 tree op_type = type;
499 tree result;
501 /* If this is an addition of a constant, convert it to a subtraction
502 of a constant since we can do that faster. */
503 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
504 rhs = fold (build2 (MINUS_EXPR, type, modulus, rhs)), op_code = MINUS_EXPR;
506 /* For the logical operations, we only need PRECISION bits. For
507 addition and subraction, we need one more and for multiplication we
508 need twice as many. But we never want to make a size smaller than
509 our size. */
510 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
511 needed_precision += 1;
512 else if (op_code == MULT_EXPR)
513 needed_precision *= 2;
515 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
517 /* Unsigned will do for everything but subtraction. */
518 if (op_code == MINUS_EXPR)
519 unsignedp = false;
521 /* If our type is the wrong signedness or isn't wide enough, make a new
522 type and convert both our operands to it. */
523 if (TYPE_PRECISION (op_type) < precision
524 || TYPE_UNSIGNED (op_type) != unsignedp)
526 /* Copy the node so we ensure it can be modified to make it modular. */
527 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
528 modulus = convert (op_type, modulus);
529 SET_TYPE_MODULUS (op_type, modulus);
530 TYPE_MODULAR_P (op_type) = 1;
531 lhs = convert (op_type, lhs);
532 rhs = convert (op_type, rhs);
535 /* Do the operation, then we'll fix it up. */
536 result = fold (build2 (op_code, op_type, lhs, rhs));
538 /* For multiplication, we have no choice but to do a full modulus
539 operation. However, we want to do this in the narrowest
540 possible size. */
541 if (op_code == MULT_EXPR)
543 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
544 modulus = convert (div_type, modulus);
545 SET_TYPE_MODULUS (div_type, modulus);
546 TYPE_MODULAR_P (div_type) = 1;
547 result = convert (op_type,
548 fold (build2 (TRUNC_MOD_EXPR, div_type,
549 convert (div_type, result), modulus)));
552 /* For subtraction, add the modulus back if we are negative. */
553 else if (op_code == MINUS_EXPR)
555 result = save_expr (result);
556 result = fold (build3 (COND_EXPR, op_type,
557 build2 (LT_EXPR, integer_type_node, result,
558 convert (op_type, integer_zero_node)),
559 fold (build2 (PLUS_EXPR, op_type,
560 result, modulus)),
561 result));
564 /* For the other operations, subtract the modulus if we are >= it. */
565 else
567 result = save_expr (result);
568 result = fold (build3 (COND_EXPR, op_type,
569 build2 (GE_EXPR, integer_type_node,
570 result, modulus),
571 fold (build2 (MINUS_EXPR, op_type,
572 result, modulus)),
573 result));
576 return convert (type, result);
579 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
580 desired for the result. Usually the operation is to be performed
581 in that type. For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
582 in which case the type to be used will be derived from the operands.
584 This function is very much unlike the ones for C and C++ since we
585 have already done any type conversion and matching required. All we
586 have to do here is validate the work done by SEM and handle subtypes. */
588 tree
589 build_binary_op (enum tree_code op_code, tree result_type,
590 tree left_operand, tree right_operand)
592 tree left_type = TREE_TYPE (left_operand);
593 tree right_type = TREE_TYPE (right_operand);
594 tree left_base_type = get_base_type (left_type);
595 tree right_base_type = get_base_type (right_type);
596 tree operation_type = result_type;
597 tree best_type = NULL_TREE;
598 tree modulus;
599 tree result;
600 bool has_side_effects = false;
602 if (operation_type
603 && TREE_CODE (operation_type) == RECORD_TYPE
604 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
605 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
607 if (operation_type
608 && !AGGREGATE_TYPE_P (operation_type)
609 && TYPE_EXTRA_SUBTYPE_P (operation_type))
610 operation_type = get_base_type (operation_type);
612 modulus = (operation_type && TREE_CODE (operation_type) == INTEGER_TYPE
613 && TYPE_MODULAR_P (operation_type)
614 ? TYPE_MODULUS (operation_type) : 0);
616 switch (op_code)
618 case MODIFY_EXPR:
619 /* If there were any integral or pointer conversions on LHS, remove
620 them; we'll be putting them back below if needed. Likewise for
621 conversions between array and record types. But don't do this if
622 the right operand is not BLKmode (for packed arrays)
623 unless we are not changing the mode. */
624 while ((TREE_CODE (left_operand) == CONVERT_EXPR
625 || TREE_CODE (left_operand) == NOP_EXPR
626 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
627 && (((INTEGRAL_TYPE_P (left_type)
628 || POINTER_TYPE_P (left_type))
629 && (INTEGRAL_TYPE_P (TREE_TYPE
630 (TREE_OPERAND (left_operand, 0)))
631 || POINTER_TYPE_P (TREE_TYPE
632 (TREE_OPERAND (left_operand, 0)))))
633 || (((TREE_CODE (left_type) == RECORD_TYPE
634 /* Don't remove conversions to justified modular
635 types. */
636 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
637 || TREE_CODE (left_type) == ARRAY_TYPE)
638 && ((TREE_CODE (TREE_TYPE
639 (TREE_OPERAND (left_operand, 0)))
640 == RECORD_TYPE)
641 || (TREE_CODE (TREE_TYPE
642 (TREE_OPERAND (left_operand, 0)))
643 == ARRAY_TYPE))
644 && (TYPE_MODE (right_type) == BLKmode
645 || (TYPE_MODE (left_type)
646 == TYPE_MODE (TREE_TYPE
647 (TREE_OPERAND
648 (left_operand, 0))))))))
650 left_operand = TREE_OPERAND (left_operand, 0);
651 left_type = TREE_TYPE (left_operand);
654 if (!operation_type)
655 operation_type = left_type;
657 /* If the RHS has a conversion between record and array types and
658 an inner type is no worse, use it. Note we cannot do this for
659 modular types or types with TYPE_ALIGN_OK, since the latter
660 might indicate a conversion between a root type and a class-wide
661 type, which we must not remove. */
662 while (TREE_CODE (right_operand) == VIEW_CONVERT_EXPR
663 && ((TREE_CODE (right_type) == RECORD_TYPE
664 && !TYPE_JUSTIFIED_MODULAR_P (right_type)
665 && !TYPE_ALIGN_OK (right_type)
666 && !TYPE_IS_FAT_POINTER_P (right_type))
667 || TREE_CODE (right_type) == ARRAY_TYPE)
668 && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
669 == RECORD_TYPE)
670 && !(TYPE_JUSTIFIED_MODULAR_P
671 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
672 && !(TYPE_ALIGN_OK
673 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
674 && !(TYPE_IS_FAT_POINTER_P
675 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))))
676 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
677 == ARRAY_TYPE))
678 && (0 == (best_type
679 == find_common_type (right_type,
680 TREE_TYPE (TREE_OPERAND
681 (right_operand, 0))))
682 || right_type != best_type))
684 right_operand = TREE_OPERAND (right_operand, 0);
685 right_type = TREE_TYPE (right_operand);
688 /* If we are copying one array or record to another, find the best type
689 to use. */
690 if (((TREE_CODE (left_type) == ARRAY_TYPE
691 && TREE_CODE (right_type) == ARRAY_TYPE)
692 || (TREE_CODE (left_type) == RECORD_TYPE
693 && TREE_CODE (right_type) == RECORD_TYPE))
694 && (best_type = find_common_type (left_type, right_type)))
695 operation_type = best_type;
697 /* If a class-wide type may be involved, force use of the RHS type. */
698 if (TREE_CODE (right_type) == RECORD_TYPE && TYPE_ALIGN_OK (right_type))
699 operation_type = right_type;
701 /* Ensure everything on the LHS is valid. If we have a field reference,
702 strip anything that get_inner_reference can handle. Then remove any
703 conversions with type types having the same code and mode. Mark
704 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
705 either an INDIRECT_REF or a decl. */
706 result = left_operand;
707 while (1)
709 tree restype = TREE_TYPE (result);
711 if (TREE_CODE (result) == COMPONENT_REF
712 || TREE_CODE (result) == ARRAY_REF
713 || TREE_CODE (result) == ARRAY_RANGE_REF)
714 while (handled_component_p (result))
715 result = TREE_OPERAND (result, 0);
716 else if (TREE_CODE (result) == REALPART_EXPR
717 || TREE_CODE (result) == IMAGPART_EXPR
718 || ((TREE_CODE (result) == NOP_EXPR
719 || TREE_CODE (result) == CONVERT_EXPR)
720 && (((TREE_CODE (restype)
721 == TREE_CODE (TREE_TYPE
722 (TREE_OPERAND (result, 0))))
723 && (TYPE_MODE (TREE_TYPE
724 (TREE_OPERAND (result, 0)))
725 == TYPE_MODE (restype)))
726 || TYPE_ALIGN_OK (restype))))
727 result = TREE_OPERAND (result, 0);
728 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
730 TREE_ADDRESSABLE (result) = 1;
731 result = TREE_OPERAND (result, 0);
733 else
734 break;
737 gcc_assert (TREE_CODE (result) == INDIRECT_REF
738 || TREE_CODE (result) == NULL_EXPR || DECL_P (result));
740 /* Convert the right operand to the operation type unless
741 it is either already of the correct type or if the type
742 involves a placeholder, since the RHS may not have the same
743 record type. */
744 if (operation_type != right_type
745 && (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type))))
747 right_operand = convert (operation_type, right_operand);
748 right_type = operation_type;
751 /* If the left operand is not the same type as the operation type,
752 surround it in a VIEW_CONVERT_EXPR. */
753 if (left_type != operation_type)
754 left_operand = unchecked_convert (operation_type, left_operand, false);
756 has_side_effects = true;
757 modulus = NULL_TREE;
758 break;
760 case ARRAY_REF:
761 if (!operation_type)
762 operation_type = TREE_TYPE (left_type);
764 /* ... fall through ... */
766 case ARRAY_RANGE_REF:
768 /* First convert the right operand to its base type. This will
769 prevent unneeded signedness conversions when sizetype is wider than
770 integer. */
771 right_operand = convert (right_base_type, right_operand);
772 right_operand = convert (TYPE_DOMAIN (left_type), right_operand);
774 if (!TREE_CONSTANT (right_operand)
775 || !TREE_CONSTANT (TYPE_MIN_VALUE (right_type)))
776 gnat_mark_addressable (left_operand);
778 modulus = NULL_TREE;
779 break;
781 case GE_EXPR:
782 case LE_EXPR:
783 case GT_EXPR:
784 case LT_EXPR:
785 gcc_assert (!POINTER_TYPE_P (left_type));
787 /* ... fall through ... */
789 case EQ_EXPR:
790 case NE_EXPR:
791 /* If either operand is a NULL_EXPR, just return a new one. */
792 if (TREE_CODE (left_operand) == NULL_EXPR)
793 return build2 (op_code, result_type,
794 build1 (NULL_EXPR, integer_type_node,
795 TREE_OPERAND (left_operand, 0)),
796 integer_zero_node);
798 else if (TREE_CODE (right_operand) == NULL_EXPR)
799 return build2 (op_code, result_type,
800 build1 (NULL_EXPR, integer_type_node,
801 TREE_OPERAND (right_operand, 0)),
802 integer_zero_node);
804 /* If either object is a justified modular types, get the
805 fields from within. */
806 if (TREE_CODE (left_type) == RECORD_TYPE
807 && TYPE_JUSTIFIED_MODULAR_P (left_type))
809 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
810 left_operand);
811 left_type = TREE_TYPE (left_operand);
812 left_base_type = get_base_type (left_type);
815 if (TREE_CODE (right_type) == RECORD_TYPE
816 && TYPE_JUSTIFIED_MODULAR_P (right_type))
818 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
819 right_operand);
820 right_type = TREE_TYPE (right_operand);
821 right_base_type = get_base_type (right_type);
824 /* If both objects are arrays, compare them specially. */
825 if ((TREE_CODE (left_type) == ARRAY_TYPE
826 || (TREE_CODE (left_type) == INTEGER_TYPE
827 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
828 && (TREE_CODE (right_type) == ARRAY_TYPE
829 || (TREE_CODE (right_type) == INTEGER_TYPE
830 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
832 result = compare_arrays (result_type, left_operand, right_operand);
834 if (op_code == NE_EXPR)
835 result = invert_truthvalue (result);
836 else
837 gcc_assert (op_code == EQ_EXPR);
839 return result;
842 /* Otherwise, the base types must be the same unless the objects are
843 records. If we have records, use the best type and convert both
844 operands to that type. */
845 if (left_base_type != right_base_type)
847 if (TREE_CODE (left_base_type) == RECORD_TYPE
848 && TREE_CODE (right_base_type) == RECORD_TYPE)
850 /* The only way these are permitted to be the same is if both
851 types have the same name. In that case, one of them must
852 not be self-referential. Use that one as the best type.
853 Even better is if one is of fixed size. */
854 best_type = NULL_TREE;
856 gcc_assert (TYPE_NAME (left_base_type)
857 && (TYPE_NAME (left_base_type)
858 == TYPE_NAME (right_base_type)));
860 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
861 best_type = left_base_type;
862 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
863 best_type = right_base_type;
864 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
865 best_type = left_base_type;
866 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
867 best_type = right_base_type;
868 else
869 gcc_unreachable ();
871 left_operand = convert (best_type, left_operand);
872 right_operand = convert (best_type, right_operand);
874 else
875 gcc_unreachable ();
878 /* If we are comparing a fat pointer against zero, we need to
879 just compare the data pointer. */
880 else if (TYPE_FAT_POINTER_P (left_base_type)
881 && TREE_CODE (right_operand) == CONSTRUCTOR
882 && integer_zerop (TREE_VALUE
883 (CONSTRUCTOR_ELTS (right_operand))))
885 right_operand = build_component_ref (left_operand, NULL_TREE,
886 TYPE_FIELDS (left_base_type),
887 false);
888 left_operand = convert (TREE_TYPE (right_operand),
889 integer_zero_node);
891 else
893 left_operand = convert (left_base_type, left_operand);
894 right_operand = convert (right_base_type, right_operand);
897 modulus = NULL_TREE;
898 break;
900 case PREINCREMENT_EXPR:
901 case PREDECREMENT_EXPR:
902 case POSTINCREMENT_EXPR:
903 case POSTDECREMENT_EXPR:
904 /* In these, the result type and the left operand type should be the
905 same. Do the operation in the base type of those and convert the
906 right operand (which is an integer) to that type.
908 Note that these operations are only used in loop control where
909 we guarantee that no overflow can occur. So nothing special need
910 be done for modular types. */
912 gcc_assert (left_type == result_type);
913 operation_type = get_base_type (result_type);
914 left_operand = convert (operation_type, left_operand);
915 right_operand = convert (operation_type, right_operand);
916 has_side_effects = true;
917 modulus = NULL_TREE;
918 break;
920 case LSHIFT_EXPR:
921 case RSHIFT_EXPR:
922 case LROTATE_EXPR:
923 case RROTATE_EXPR:
924 /* The RHS of a shift can be any type. Also, ignore any modulus
925 (we used to abort, but this is needed for unchecked conversion
926 to modular types). Otherwise, processing is the same as normal. */
927 gcc_assert (operation_type == left_base_type);
928 modulus = NULL_TREE;
929 left_operand = convert (operation_type, left_operand);
930 break;
932 case TRUTH_ANDIF_EXPR:
933 case TRUTH_ORIF_EXPR:
934 case TRUTH_AND_EXPR:
935 case TRUTH_OR_EXPR:
936 case TRUTH_XOR_EXPR:
937 left_operand = gnat_truthvalue_conversion (left_operand);
938 right_operand = gnat_truthvalue_conversion (right_operand);
939 goto common;
941 case BIT_AND_EXPR:
942 case BIT_IOR_EXPR:
943 case BIT_XOR_EXPR:
944 /* For binary modulus, if the inputs are in range, so are the
945 outputs. */
946 if (modulus && integer_pow2p (modulus))
947 modulus = NULL_TREE;
949 goto common;
951 case COMPLEX_EXPR:
952 gcc_assert (TREE_TYPE (result_type) == left_base_type
953 && TREE_TYPE (result_type) == right_base_type);
954 left_operand = convert (left_base_type, left_operand);
955 right_operand = convert (right_base_type, right_operand);
956 break;
958 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
959 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
960 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
961 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
962 /* These always produce results lower than either operand. */
963 modulus = NULL_TREE;
964 goto common;
966 default:
967 common:
968 /* The result type should be the same as the base types of the
969 both operands (and they should be the same). Convert
970 everything to the result type. */
972 gcc_assert (operation_type == left_base_type
973 && left_base_type == right_base_type);
974 left_operand = convert (operation_type, left_operand);
975 right_operand = convert (operation_type, right_operand);
978 if (modulus && !integer_pow2p (modulus))
980 result = nonbinary_modular_operation (op_code, operation_type,
981 left_operand, right_operand);
982 modulus = NULL_TREE;
984 /* If either operand is a NULL_EXPR, just return a new one. */
985 else if (TREE_CODE (left_operand) == NULL_EXPR)
986 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
987 else if (TREE_CODE (right_operand) == NULL_EXPR)
988 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
989 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
990 result = fold (build4 (op_code, operation_type, left_operand,
991 right_operand, NULL_TREE, NULL_TREE));
992 else
993 result
994 = fold (build2 (op_code, operation_type, left_operand, right_operand));
996 TREE_SIDE_EFFECTS (result) |= has_side_effects;
997 TREE_CONSTANT (result)
998 |= (TREE_CONSTANT (left_operand) & TREE_CONSTANT (right_operand)
999 && op_code != ARRAY_REF && op_code != ARRAY_RANGE_REF);
1001 if ((op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1002 && TYPE_VOLATILE (operation_type))
1003 TREE_THIS_VOLATILE (result) = 1;
1005 /* If we are working with modular types, perform the MOD operation
1006 if something above hasn't eliminated the need for it. */
1007 if (modulus)
1008 result = fold (build2 (FLOOR_MOD_EXPR, operation_type, result,
1009 convert (operation_type, modulus)));
1011 if (result_type && result_type != operation_type)
1012 result = convert (result_type, result);
1014 return result;
1017 /* Similar, but for unary operations. */
1019 tree
1020 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1022 tree type = TREE_TYPE (operand);
1023 tree base_type = get_base_type (type);
1024 tree operation_type = result_type;
1025 tree result;
1026 bool side_effects = false;
1028 if (operation_type
1029 && TREE_CODE (operation_type) == RECORD_TYPE
1030 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1031 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1033 if (operation_type
1034 && !AGGREGATE_TYPE_P (operation_type)
1035 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1036 operation_type = get_base_type (operation_type);
1038 switch (op_code)
1040 case REALPART_EXPR:
1041 case IMAGPART_EXPR:
1042 if (!operation_type)
1043 result_type = operation_type = TREE_TYPE (type);
1044 else
1045 gcc_assert (result_type == TREE_TYPE (type));
1047 result = fold (build1 (op_code, operation_type, operand));
1048 break;
1050 case TRUTH_NOT_EXPR:
1051 gcc_assert (result_type == base_type);
1052 result = invert_truthvalue (gnat_truthvalue_conversion (operand));
1053 break;
1055 case ATTR_ADDR_EXPR:
1056 case ADDR_EXPR:
1057 switch (TREE_CODE (operand))
1059 case INDIRECT_REF:
1060 case UNCONSTRAINED_ARRAY_REF:
1061 result = TREE_OPERAND (operand, 0);
1063 /* Make sure the type here is a pointer, not a reference.
1064 GCC wants pointer types for function addresses. */
1065 if (!result_type)
1066 result_type = build_pointer_type (type);
1067 break;
1069 case NULL_EXPR:
1070 result = operand;
1071 TREE_TYPE (result) = type = build_pointer_type (type);
1072 break;
1074 case ARRAY_REF:
1075 case ARRAY_RANGE_REF:
1076 case COMPONENT_REF:
1077 case BIT_FIELD_REF:
1078 /* If this is for 'Address, find the address of the prefix and
1079 add the offset to the field. Otherwise, do this the normal
1080 way. */
1081 if (op_code == ATTR_ADDR_EXPR)
1083 HOST_WIDE_INT bitsize;
1084 HOST_WIDE_INT bitpos;
1085 tree offset, inner;
1086 enum machine_mode mode;
1087 int unsignedp, volatilep;
1089 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1090 &mode, &unsignedp, &volatilep, false);
1092 /* If INNER is a padding type whose field has a self-referential
1093 size, convert to that inner type. We know the offset is zero
1094 and we need to have that type visible. */
1095 if (TREE_CODE (TREE_TYPE (inner)) == RECORD_TYPE
1096 && TYPE_IS_PADDING_P (TREE_TYPE (inner))
1097 && (CONTAINS_PLACEHOLDER_P
1098 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1099 (TREE_TYPE (inner)))))))
1100 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1101 inner);
1103 /* Compute the offset as a byte offset from INNER. */
1104 if (!offset)
1105 offset = size_zero_node;
1107 if (bitpos % BITS_PER_UNIT != 0)
1108 post_error
1109 ("taking address of object not aligned on storage unit?",
1110 error_gnat_node);
1112 offset = size_binop (PLUS_EXPR, offset,
1113 size_int (bitpos / BITS_PER_UNIT));
1115 /* Take the address of INNER, convert the offset to void *, and
1116 add then. It will later be converted to the desired result
1117 type, if any. */
1118 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1119 inner = convert (ptr_void_type_node, inner);
1120 offset = convert (ptr_void_type_node, offset);
1121 result = build_binary_op (PLUS_EXPR, ptr_void_type_node,
1122 inner, offset);
1123 result = convert (build_pointer_type (TREE_TYPE (operand)),
1124 result);
1125 break;
1127 goto common;
1129 case CONSTRUCTOR:
1130 /* If this is just a constructor for a padded record, we can
1131 just take the address of the single field and convert it to
1132 a pointer to our type. */
1133 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1135 result
1136 = build_unary_op (ADDR_EXPR, NULL_TREE,
1137 TREE_VALUE (CONSTRUCTOR_ELTS (operand)));
1138 result = convert (build_pointer_type (TREE_TYPE (operand)),
1139 result);
1140 break;
1143 goto common;
1145 case NOP_EXPR:
1146 if (AGGREGATE_TYPE_P (type)
1147 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1148 return build_unary_op (ADDR_EXPR, result_type,
1149 TREE_OPERAND (operand, 0));
1151 /* If this NOP_EXPR doesn't change the mode, get the result type
1152 from this type and go down. We need to do this in case
1153 this is a conversion of a CONST_DECL. */
1154 if (TYPE_MODE (type) != BLKmode
1155 && (TYPE_MODE (type)
1156 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0)))))
1157 return build_unary_op (ADDR_EXPR,
1158 (result_type ? result_type
1159 : build_pointer_type (type)),
1160 TREE_OPERAND (operand, 0));
1161 goto common;
1163 case CONST_DECL:
1164 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1166 /* ... fall through ... */
1168 default:
1169 common:
1171 /* If we are taking the address of a padded record whose field is
1172 contains a template, take the address of the template. */
1173 if (TREE_CODE (type) == RECORD_TYPE
1174 && TYPE_IS_PADDING_P (type)
1175 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1176 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1178 type = TREE_TYPE (TYPE_FIELDS (type));
1179 operand = convert (type, operand);
1182 if (type != error_mark_node)
1183 operation_type = build_pointer_type (type);
1185 gnat_mark_addressable (operand);
1186 result = fold (build1 (ADDR_EXPR, operation_type, operand));
1189 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1190 break;
1192 case INDIRECT_REF:
1193 /* If we want to refer to an entire unconstrained array,
1194 make up an expression to do so. This will never survive to
1195 the backend. If TYPE is a thin pointer, first convert the
1196 operand to a fat pointer. */
1197 if (TYPE_THIN_POINTER_P (type)
1198 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
1200 operand
1201 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1202 operand);
1203 type = TREE_TYPE (operand);
1206 if (TYPE_FAT_POINTER_P (type))
1208 result = build1 (UNCONSTRAINED_ARRAY_REF,
1209 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1210 TREE_READONLY (result) = TREE_STATIC (result)
1211 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1213 else if (TREE_CODE (operand) == ADDR_EXPR)
1214 result = TREE_OPERAND (operand, 0);
1216 else
1218 result = fold (build1 (op_code, TREE_TYPE (type), operand));
1219 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1222 side_effects
1223 = (!TYPE_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)));
1224 break;
1226 case NEGATE_EXPR:
1227 case BIT_NOT_EXPR:
1229 tree modulus = ((operation_type
1230 && TREE_CODE (operation_type) == INTEGER_TYPE
1231 && TYPE_MODULAR_P (operation_type))
1232 ? TYPE_MODULUS (operation_type) : 0);
1233 int mod_pow2 = modulus && integer_pow2p (modulus);
1235 /* If this is a modular type, there are various possibilities
1236 depending on the operation and whether the modulus is a
1237 power of two or not. */
1239 if (modulus)
1241 gcc_assert (operation_type == base_type);
1242 operand = convert (operation_type, operand);
1244 /* The fastest in the negate case for binary modulus is
1245 the straightforward code; the TRUNC_MOD_EXPR below
1246 is an AND operation. */
1247 if (op_code == NEGATE_EXPR && mod_pow2)
1248 result = fold (build2 (TRUNC_MOD_EXPR, operation_type,
1249 fold (build1 (NEGATE_EXPR, operation_type,
1250 operand)),
1251 modulus));
1253 /* For nonbinary negate case, return zero for zero operand,
1254 else return the modulus minus the operand. If the modulus
1255 is a power of two minus one, we can do the subtraction
1256 as an XOR since it is equivalent and faster on most machines. */
1257 else if (op_code == NEGATE_EXPR && !mod_pow2)
1259 if (integer_pow2p (fold (build2 (PLUS_EXPR, operation_type,
1260 modulus,
1261 convert (operation_type,
1262 integer_one_node)))))
1263 result = fold (build2 (BIT_XOR_EXPR, operation_type,
1264 operand, modulus));
1265 else
1266 result = fold (build2 (MINUS_EXPR, operation_type,
1267 modulus, operand));
1269 result = fold (build3 (COND_EXPR, operation_type,
1270 fold (build2 (NE_EXPR,
1271 integer_type_node,
1272 operand,
1273 convert
1274 (operation_type,
1275 integer_zero_node))),
1276 result, operand));
1278 else
1280 /* For the NOT cases, we need a constant equal to
1281 the modulus minus one. For a binary modulus, we
1282 XOR against the constant and subtract the operand from
1283 that constant for nonbinary modulus. */
1285 tree cnst = fold (build2 (MINUS_EXPR, operation_type, modulus,
1286 convert (operation_type,
1287 integer_one_node)));
1289 if (mod_pow2)
1290 result = fold (build2 (BIT_XOR_EXPR, operation_type,
1291 operand, cnst));
1292 else
1293 result = fold (build2 (MINUS_EXPR, operation_type,
1294 cnst, operand));
1297 break;
1301 /* ... fall through ... */
1303 default:
1304 gcc_assert (operation_type == base_type);
1305 result = fold (build1 (op_code, operation_type, convert (operation_type,
1306 operand)));
1309 if (side_effects)
1311 TREE_SIDE_EFFECTS (result) = 1;
1312 if (TREE_CODE (result) == INDIRECT_REF)
1313 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1316 if (result_type && TREE_TYPE (result) != result_type)
1317 result = convert (result_type, result);
1319 return result;
1322 /* Similar, but for COND_EXPR. */
1324 tree
1325 build_cond_expr (tree result_type, tree condition_operand,
1326 tree true_operand, tree false_operand)
1328 tree result;
1329 bool addr_p = false;
1331 /* The front-end verifies that result, true and false operands have same base
1332 type. Convert everything to the result type. */
1334 true_operand = convert (result_type, true_operand);
1335 false_operand = convert (result_type, false_operand);
1337 /* If the result type is unconstrained, take the address of
1338 the operands and then dereference our result. */
1339 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1340 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1342 addr_p = true;
1343 result_type = build_pointer_type (result_type);
1344 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1345 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1348 result = fold (build3 (COND_EXPR, result_type, condition_operand,
1349 true_operand, false_operand));
1351 /* If either operand is a SAVE_EXPR (possibly surrounded by
1352 arithmetic, make sure it gets done. */
1353 true_operand = skip_simple_arithmetic (true_operand);
1354 false_operand = skip_simple_arithmetic (false_operand);
1356 if (TREE_CODE (true_operand) == SAVE_EXPR)
1357 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1359 if (TREE_CODE (false_operand) == SAVE_EXPR)
1360 result = build2 (COMPOUND_EXPR, result_type, false_operand, result);
1362 /* ??? Seems the code above is wrong, as it may move ahead of the COND
1363 SAVE_EXPRs with side effects and not shared by both arms. */
1365 if (addr_p)
1366 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1368 return result;
1372 /* Build a CALL_EXPR to call FUNDECL with one argument, ARG. Return
1373 the CALL_EXPR. */
1375 tree
1376 build_call_1_expr (tree fundecl, tree arg)
1378 tree call = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1379 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1380 chainon (NULL_TREE, build_tree_list (NULL_TREE, arg)),
1381 NULL_TREE);
1383 TREE_SIDE_EFFECTS (call) = 1;
1385 return call;
1388 /* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2. Return
1389 the CALL_EXPR. */
1391 tree
1392 build_call_2_expr (tree fundecl, tree arg1, tree arg2)
1394 tree call = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1395 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1396 chainon (chainon (NULL_TREE,
1397 build_tree_list (NULL_TREE, arg1)),
1398 build_tree_list (NULL_TREE, arg2)),
1399 NULL_TREE);
1401 TREE_SIDE_EFFECTS (call) = 1;
1403 return call;
1406 /* Likewise to call FUNDECL with no arguments. */
1408 tree
1409 build_call_0_expr (tree fundecl)
1411 tree call = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1412 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1413 NULL_TREE, NULL_TREE);
1415 TREE_SIDE_EFFECTS (call) = 1;
1417 return call;
1420 /* Call a function that raises an exception and pass the line number and file
1421 name, if requested. MSG says which exception function to call. */
1423 tree
1424 build_call_raise (int msg)
1426 tree fndecl = gnat_raise_decls[msg];
1427 const char *str = Debug_Flag_NN ? "" : ref_filename;
1428 int len = strlen (str) + 1;
1429 tree filename = build_string (len, str);
1431 TREE_TYPE (filename)
1432 = build_array_type (char_type_node,
1433 build_index_type (build_int_cst (NULL_TREE, len)));
1435 return
1436 build_call_2_expr (fndecl,
1437 build1 (ADDR_EXPR, build_pointer_type (char_type_node),
1438 filename),
1439 build_int_cst (NULL_TREE, input_line));
1442 /* Return a CONSTRUCTOR of TYPE whose list is LIST. */
1444 tree
1445 gnat_build_constructor (tree type, tree list)
1447 tree elmt;
1448 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1449 bool side_effects = false;
1450 tree result;
1452 for (elmt = list; elmt; elmt = TREE_CHAIN (elmt))
1454 if (!TREE_CONSTANT (TREE_VALUE (elmt))
1455 || (TREE_CODE (type) == RECORD_TYPE
1456 && DECL_BIT_FIELD (TREE_PURPOSE (elmt))
1457 && TREE_CODE (TREE_VALUE (elmt)) != INTEGER_CST)
1458 || !initializer_constant_valid_p (TREE_VALUE (elmt),
1459 TREE_TYPE (TREE_VALUE (elmt))))
1460 allconstant = false;
1462 if (TREE_SIDE_EFFECTS (TREE_VALUE (elmt)))
1463 side_effects = true;
1465 /* Propagate an NULL_EXPR from the size of the type. We won't ever
1466 be executing the code we generate here in that case, but handle it
1467 specially to avoid the cmpiler blowing up. */
1468 if (TREE_CODE (type) == RECORD_TYPE
1469 && (0 != (result
1470 = contains_null_expr (DECL_SIZE (TREE_PURPOSE (elmt))))))
1471 return build1 (NULL_EXPR, type, TREE_OPERAND (result, 0));
1474 /* If TYPE is a RECORD_TYPE and the fields are not in the
1475 same order as their bit position, don't treat this as constant
1476 since varasm.c can't handle it. */
1477 if (allconstant && TREE_CODE (type) == RECORD_TYPE)
1479 tree last_pos = bitsize_zero_node;
1480 tree field;
1482 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1484 tree this_pos = bit_position (field);
1486 if (TREE_CODE (this_pos) != INTEGER_CST
1487 || tree_int_cst_lt (this_pos, last_pos))
1489 allconstant = false;
1490 break;
1493 last_pos = this_pos;
1497 result = build_constructor (type, list);
1498 TREE_CONSTANT (result) = TREE_INVARIANT (result)
1499 = TREE_STATIC (result) = allconstant;
1500 TREE_SIDE_EFFECTS (result) = side_effects;
1501 TREE_READONLY (result) = TYPE_READONLY (type) || allconstant;
1502 return result;
1505 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1506 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1507 for the field. Don't fold the result if NO_FOLD_P is true.
1509 We also handle the fact that we might have been passed a pointer to the
1510 actual record and know how to look for fields in variant parts. */
1512 static tree
1513 build_simple_component_ref (tree record_variable, tree component,
1514 tree field, bool no_fold_p)
1516 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1517 tree ref;
1519 gcc_assert ((TREE_CODE (record_type) == RECORD_TYPE
1520 || TREE_CODE (record_type) == UNION_TYPE
1521 || TREE_CODE (record_type) == QUAL_UNION_TYPE)
1522 && TYPE_SIZE (record_type)
1523 && (component != 0) != (field != 0));
1525 /* If no field was specified, look for a field with the specified name
1526 in the current record only. */
1527 if (!field)
1528 for (field = TYPE_FIELDS (record_type); field;
1529 field = TREE_CHAIN (field))
1530 if (DECL_NAME (field) == component)
1531 break;
1533 if (!field)
1534 return NULL_TREE;
1536 /* If this field is not in the specified record, see if we can find
1537 something in the record whose original field is the same as this one. */
1538 if (DECL_CONTEXT (field) != record_type)
1539 /* Check if there is a field with name COMPONENT in the record. */
1541 tree new_field;
1543 /* First loop thru normal components. */
1545 for (new_field = TYPE_FIELDS (record_type); new_field;
1546 new_field = TREE_CHAIN (new_field))
1547 if (DECL_ORIGINAL_FIELD (new_field) == field
1548 || new_field == DECL_ORIGINAL_FIELD (field)
1549 || (DECL_ORIGINAL_FIELD (field)
1550 && (DECL_ORIGINAL_FIELD (field)
1551 == DECL_ORIGINAL_FIELD (new_field))))
1552 break;
1554 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1555 the component in the first search. Doing this search in 2 steps
1556 is required to avoiding hidden homonymous fields in the
1557 _Parent field. */
1559 if (!new_field)
1560 for (new_field = TYPE_FIELDS (record_type); new_field;
1561 new_field = TREE_CHAIN (new_field))
1562 if (DECL_INTERNAL_P (new_field))
1564 tree field_ref
1565 = build_simple_component_ref (record_variable,
1566 NULL_TREE, new_field, no_fold_p);
1567 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1568 no_fold_p);
1570 if (ref)
1571 return ref;
1574 field = new_field;
1577 if (!field)
1578 return NULL_TREE;
1580 /* It would be nice to call "fold" here, but that can lose a type
1581 we need to tag a PLACEHOLDER_EXPR with, so we can't do it. */
1582 ref = build3 (COMPONENT_REF, TREE_TYPE (field), record_variable, field,
1583 NULL_TREE);
1585 if (TREE_READONLY (record_variable) || TREE_READONLY (field))
1586 TREE_READONLY (ref) = 1;
1587 if (TREE_THIS_VOLATILE (record_variable) || TREE_THIS_VOLATILE (field)
1588 || TYPE_VOLATILE (record_type))
1589 TREE_THIS_VOLATILE (ref) = 1;
1591 return no_fold_p ? ref : fold (ref);
1594 /* Like build_simple_component_ref, except that we give an error if the
1595 reference could not be found. */
1597 tree
1598 build_component_ref (tree record_variable, tree component,
1599 tree field, bool no_fold_p)
1601 tree ref = build_simple_component_ref (record_variable, component, field,
1602 no_fold_p);
1604 if (ref)
1605 return ref;
1607 /* If FIELD was specified, assume this is an invalid user field so
1608 raise constraint error. Otherwise, we can't find the type to return, so
1609 abort. */
1610 gcc_assert (field);
1611 return build1 (NULL_EXPR, TREE_TYPE (field),
1612 build_call_raise (CE_Discriminant_Check_Failed));
1615 /* Build a GCC tree to call an allocation or deallocation function.
1616 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
1617 generate an allocator.
1619 GNU_SIZE is the size of the object in bytes and ALIGN is the alignment in
1620 bits. GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the
1621 storage pool to use. If not preset, malloc and free will be used except
1622 if GNAT_PROC is the "fake" value of -1, in which case we allocate the
1623 object dynamically on the stack frame. */
1625 tree
1626 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, unsigned align,
1627 Entity_Id gnat_proc, Entity_Id gnat_pool,
1628 Node_Id gnat_node)
1630 tree gnu_align = size_int (align / BITS_PER_UNIT);
1632 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
1634 if (Present (gnat_proc))
1636 /* The storage pools are obviously always tagged types, but the
1637 secondary stack uses the same mechanism and is not tagged */
1638 if (Is_Tagged_Type (Etype (gnat_pool)))
1640 /* The size is the third parameter; the alignment is the
1641 same type. */
1642 Entity_Id gnat_size_type
1643 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1644 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1645 tree gnu_proc = gnat_to_gnu (gnat_proc);
1646 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1647 tree gnu_pool = gnat_to_gnu (gnat_pool);
1648 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1649 tree gnu_args = NULL_TREE;
1650 tree gnu_call;
1652 /* The first arg is always the address of the storage pool; next
1653 comes the address of the object, for a deallocator, then the
1654 size and alignment. */
1655 gnu_args
1656 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_pool_addr));
1658 if (gnu_obj)
1659 gnu_args
1660 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_obj));
1662 gnu_args
1663 = chainon (gnu_args,
1664 build_tree_list (NULL_TREE,
1665 convert (gnu_size_type, gnu_size)));
1666 gnu_args
1667 = chainon (gnu_args,
1668 build_tree_list (NULL_TREE,
1669 convert (gnu_size_type, gnu_align)));
1671 gnu_call = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (gnu_proc)),
1672 gnu_proc_addr, gnu_args, NULL_TREE);
1673 TREE_SIDE_EFFECTS (gnu_call) = 1;
1674 return gnu_call;
1677 /* Secondary stack case. */
1678 else
1680 /* The size is the second parameter */
1681 Entity_Id gnat_size_type
1682 = Etype (Next_Formal (First_Formal (gnat_proc)));
1683 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1684 tree gnu_proc = gnat_to_gnu (gnat_proc);
1685 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1686 tree gnu_args = NULL_TREE;
1687 tree gnu_call;
1689 /* The first arg is the address of the object, for a
1690 deallocator, then the size */
1691 if (gnu_obj)
1692 gnu_args
1693 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_obj));
1695 gnu_args
1696 = chainon (gnu_args,
1697 build_tree_list (NULL_TREE,
1698 convert (gnu_size_type, gnu_size)));
1700 gnu_call = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (gnu_proc)),
1701 gnu_proc_addr, gnu_args, NULL_TREE);
1702 TREE_SIDE_EFFECTS (gnu_call) = 1;
1703 return gnu_call;
1707 else if (gnu_obj)
1708 return build_call_1_expr (free_decl, gnu_obj);
1710 /* ??? For now, disable variable-sized allocators in the stack since
1711 we can't yet gimplify an ALLOCATE_EXPR. */
1712 else if (gnat_pool == -1
1713 && TREE_CODE (gnu_size) == INTEGER_CST && !flag_stack_check)
1715 /* If the size is a constant, we can put it in the fixed portion of
1716 the stack frame to avoid the need to adjust the stack pointer. */
1717 if (TREE_CODE (gnu_size) == INTEGER_CST && !flag_stack_check)
1719 tree gnu_range
1720 = build_range_type (NULL_TREE, size_one_node, gnu_size);
1721 tree gnu_array_type = build_array_type (char_type_node, gnu_range);
1722 tree gnu_decl
1723 = create_var_decl (get_identifier ("RETVAL"), NULL_TREE,
1724 gnu_array_type, NULL_TREE, false, false, false,
1725 false, NULL, gnat_node);
1727 return convert (ptr_void_type_node,
1728 build_unary_op (ADDR_EXPR, NULL_TREE, gnu_decl));
1730 else
1731 gcc_unreachable ();
1732 #if 0
1733 return build2 (ALLOCATE_EXPR, ptr_void_type_node, gnu_size, gnu_align);
1734 #endif
1736 else
1738 if (Nkind (gnat_node) != N_Allocator || !Comes_From_Source (gnat_node))
1739 Check_No_Implicit_Heap_Alloc (gnat_node);
1740 return build_call_1_expr (malloc_decl, gnu_size);
1744 /* Build a GCC tree to correspond to allocating an object of TYPE whose
1745 initial value is INIT, if INIT is nonzero. Convert the expression to
1746 RESULT_TYPE, which must be some type of pointer. Return the tree.
1747 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
1748 the storage pool to use. */
1750 tree
1751 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
1752 Entity_Id gnat_pool, Node_Id gnat_node)
1754 tree size = TYPE_SIZE_UNIT (type);
1755 tree result;
1757 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
1758 if (init && TREE_CODE (init) == NULL_EXPR)
1759 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
1761 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
1762 sizes of the object and its template. Allocate the whole thing and
1763 fill in the parts that are known. */
1764 else if (TYPE_FAT_OR_THIN_POINTER_P (result_type))
1766 tree template_type
1767 = (TYPE_FAT_POINTER_P (result_type)
1768 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (result_type))))
1769 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (result_type))));
1770 tree storage_type
1771 = build_unc_object_type (template_type, type,
1772 get_identifier ("ALLOC"));
1773 tree storage_ptr_type = build_pointer_type (storage_type);
1774 tree storage;
1775 tree template_cons = NULL_TREE;
1777 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
1778 init);
1780 /* If the size overflows, pass -1 so the allocator will raise
1781 storage error. */
1782 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1783 size = ssize_int (-1);
1785 storage = build_call_alloc_dealloc (NULL_TREE, size,
1786 TYPE_ALIGN (storage_type),
1787 gnat_proc, gnat_pool, gnat_node);
1788 storage = convert (storage_ptr_type, protect_multiple_eval (storage));
1790 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1792 type = TREE_TYPE (TYPE_FIELDS (type));
1794 if (init)
1795 init = convert (type, init);
1798 /* If there is an initializing expression, make a constructor for
1799 the entire object including the bounds and copy it into the
1800 object. If there is no initializing expression, just set the
1801 bounds. */
1802 if (init)
1804 template_cons = tree_cons (TREE_CHAIN (TYPE_FIELDS (storage_type)),
1805 init, NULL_TREE);
1806 template_cons = tree_cons (TYPE_FIELDS (storage_type),
1807 build_template (template_type, type,
1808 init),
1809 template_cons);
1811 return convert
1812 (result_type,
1813 build2 (COMPOUND_EXPR, storage_ptr_type,
1814 build_binary_op
1815 (MODIFY_EXPR, storage_type,
1816 build_unary_op (INDIRECT_REF, NULL_TREE,
1817 convert (storage_ptr_type, storage)),
1818 gnat_build_constructor (storage_type, template_cons)),
1819 convert (storage_ptr_type, storage)));
1821 else
1822 return build2
1823 (COMPOUND_EXPR, result_type,
1824 build_binary_op
1825 (MODIFY_EXPR, template_type,
1826 build_component_ref
1827 (build_unary_op (INDIRECT_REF, NULL_TREE,
1828 convert (storage_ptr_type, storage)),
1829 NULL_TREE, TYPE_FIELDS (storage_type), 0),
1830 build_template (template_type, type, NULL_TREE)),
1831 convert (result_type, convert (storage_ptr_type, storage)));
1834 /* If we have an initializing expression, see if its size is simpler
1835 than the size from the type. */
1836 if (init && TYPE_SIZE_UNIT (TREE_TYPE (init))
1837 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
1838 || CONTAINS_PLACEHOLDER_P (size)))
1839 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
1841 /* If the size is still self-referential, reference the initializing
1842 expression, if it is present. If not, this must have been a
1843 call to allocate a library-level object, in which case we use
1844 the maximum size. */
1845 if (CONTAINS_PLACEHOLDER_P (size))
1847 if (init)
1848 size = substitute_placeholder_in_expr (size, init);
1849 else
1850 size = max_size (size, true);
1853 /* If the size overflows, pass -1 so the allocator will raise
1854 storage error. */
1855 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1856 size = ssize_int (-1);
1858 /* If this is a type whose alignment is larger than the
1859 biggest we support in normal alignment and this is in
1860 the default storage pool, make an "aligning type", allocate
1861 it, point to the field we need, and return that. */
1862 if (TYPE_ALIGN (type) > BIGGEST_ALIGNMENT
1863 && No (gnat_proc))
1865 tree new_type = make_aligning_type (type, TYPE_ALIGN (type), size);
1867 result = build_call_alloc_dealloc (NULL_TREE, TYPE_SIZE_UNIT (new_type),
1868 BIGGEST_ALIGNMENT, Empty,
1869 Empty, gnat_node);
1870 result = save_expr (result);
1871 result = convert (build_pointer_type (new_type), result);
1872 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1873 result = build_component_ref (result, NULL_TREE,
1874 TYPE_FIELDS (new_type), 0);
1875 result = convert (result_type,
1876 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1878 else
1879 result = convert (result_type,
1880 build_call_alloc_dealloc (NULL_TREE, size,
1881 TYPE_ALIGN (type),
1882 gnat_proc,
1883 gnat_pool,
1884 gnat_node));
1886 /* If we have an initial value, put the new address into a SAVE_EXPR, assign
1887 the value, and return the address. Do this with a COMPOUND_EXPR. */
1889 if (init)
1891 result = save_expr (result);
1892 result
1893 = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1894 build_binary_op
1895 (MODIFY_EXPR, NULL_TREE,
1896 build_unary_op (INDIRECT_REF,
1897 TREE_TYPE (TREE_TYPE (result)), result),
1898 init),
1899 result);
1902 return convert (result_type, result);
1905 /* Fill in a VMS descriptor for EXPR and return a constructor for it.
1906 GNAT_FORMAL is how we find the descriptor record. */
1908 tree
1909 fill_vms_descriptor (tree expr, Entity_Id gnat_formal)
1911 tree record_type = TREE_TYPE (TREE_TYPE (get_gnu_tree (gnat_formal)));
1912 tree field;
1913 tree const_list = NULL_TREE;
1915 expr = maybe_unconstrained_array (expr);
1916 gnat_mark_addressable (expr);
1918 for (field = TYPE_FIELDS (record_type); field; field = TREE_CHAIN (field))
1919 const_list
1920 = tree_cons (field,
1921 convert (TREE_TYPE (field),
1922 SUBSTITUTE_PLACEHOLDER_IN_EXPR
1923 (DECL_INITIAL (field), expr)),
1924 const_list);
1926 return gnat_build_constructor (record_type, nreverse (const_list));
1929 /* Indicate that we need to make the address of EXPR_NODE and it therefore
1930 should not be allocated in a register. Returns true if successful. */
1932 bool
1933 gnat_mark_addressable (tree expr_node)
1935 while (1)
1936 switch (TREE_CODE (expr_node))
1938 case ADDR_EXPR:
1939 case COMPONENT_REF:
1940 case ARRAY_REF:
1941 case ARRAY_RANGE_REF:
1942 case REALPART_EXPR:
1943 case IMAGPART_EXPR:
1944 case VIEW_CONVERT_EXPR:
1945 case CONVERT_EXPR:
1946 case NON_LVALUE_EXPR:
1947 case NOP_EXPR:
1948 expr_node = TREE_OPERAND (expr_node, 0);
1949 break;
1951 case CONSTRUCTOR:
1952 TREE_ADDRESSABLE (expr_node) = 1;
1953 return true;
1955 case VAR_DECL:
1956 case PARM_DECL:
1957 case RESULT_DECL:
1958 TREE_ADDRESSABLE (expr_node) = 1;
1959 return true;
1961 case FUNCTION_DECL:
1962 TREE_ADDRESSABLE (expr_node) = 1;
1963 return true;
1965 case CONST_DECL:
1966 return (DECL_CONST_CORRESPONDING_VAR (expr_node)
1967 && (gnat_mark_addressable
1968 (DECL_CONST_CORRESPONDING_VAR (expr_node))));
1969 default:
1970 return true;