2008-05-30 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / ada / utils2.c
blob891463ce49ca38dcb092860c29fbcfc60bdc2633
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2008, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "rtl.h"
32 #include "ggc.h"
33 #include "flags.h"
34 #include "output.h"
35 #include "ada.h"
36 #include "types.h"
37 #include "atree.h"
38 #include "stringt.h"
39 #include "namet.h"
40 #include "uintp.h"
41 #include "fe.h"
42 #include "elists.h"
43 #include "nlists.h"
44 #include "sinfo.h"
45 #include "einfo.h"
46 #include "ada-tree.h"
47 #include "gigi.h"
49 static tree find_common_type (tree, tree);
50 static bool contains_save_expr_p (tree);
51 static tree contains_null_expr (tree);
52 static tree compare_arrays (tree, tree, tree);
53 static tree nonbinary_modular_operation (enum tree_code, tree, tree, tree);
54 static tree build_simple_component_ref (tree, tree, tree, bool);
56 /* Prepare expr to be an argument of a TRUTH_NOT_EXPR or other logical
57 operation.
59 This preparation consists of taking the ordinary representation of
60 an expression expr and producing a valid tree boolean expression
61 describing whether expr is nonzero. We could simply always do
63 build_binary_op (NE_EXPR, expr, integer_zero_node, 1),
65 but we optimize comparisons, &&, ||, and !.
67 The resulting type should always be the same as the input type.
68 This function is simpler than the corresponding C version since
69 the only possible operands will be things of Boolean type. */
71 tree
72 gnat_truthvalue_conversion (tree expr)
74 tree type = TREE_TYPE (expr);
76 switch (TREE_CODE (expr))
78 case EQ_EXPR: case NE_EXPR: case LE_EXPR: case GE_EXPR:
79 case LT_EXPR: case GT_EXPR:
80 case TRUTH_ANDIF_EXPR:
81 case TRUTH_ORIF_EXPR:
82 case TRUTH_AND_EXPR:
83 case TRUTH_OR_EXPR:
84 case TRUTH_XOR_EXPR:
85 case ERROR_MARK:
86 return expr;
88 case INTEGER_CST:
89 return (integer_zerop (expr)
90 ? build_int_cst (type, 0)
91 : build_int_cst (type, 1));
93 case REAL_CST:
94 return (real_zerop (expr)
95 ? fold_convert (type, integer_zero_node)
96 : fold_convert (type, integer_one_node));
98 case COND_EXPR:
99 /* Distribute the conversion into the arms of a COND_EXPR. */
101 tree arg1 = gnat_truthvalue_conversion (TREE_OPERAND (expr, 1));
102 tree arg2 = gnat_truthvalue_conversion (TREE_OPERAND (expr, 2));
103 return fold_build3 (COND_EXPR, type, TREE_OPERAND (expr, 0),
104 arg1, arg2);
107 default:
108 return build_binary_op (NE_EXPR, type, expr,
109 fold_convert (type, integer_zero_node));
113 /* Return the base type of TYPE. */
115 tree
116 get_base_type (tree type)
118 if (TREE_CODE (type) == RECORD_TYPE
119 && TYPE_JUSTIFIED_MODULAR_P (type))
120 type = TREE_TYPE (TYPE_FIELDS (type));
122 while (TREE_TYPE (type)
123 && (TREE_CODE (type) == INTEGER_TYPE
124 || TREE_CODE (type) == REAL_TYPE))
125 type = TREE_TYPE (type);
127 return type;
130 /* EXP is a GCC tree representing an address. See if we can find how
131 strictly the object at that address is aligned. Return that alignment
132 in bits. If we don't know anything about the alignment, return 0. */
134 unsigned int
135 known_alignment (tree exp)
137 unsigned int this_alignment;
138 unsigned int lhs, rhs;
139 unsigned int type_alignment;
141 /* For pointer expressions, we know that the designated object is always at
142 least as strictly aligned as the designated subtype, so we account for
143 both type and expression information in this case.
145 Beware that we can still get a dummy designated subtype here (e.g. Taft
146 Amendment types), in which the alignment information is meaningless and
147 should be ignored.
149 We always compute a type_alignment value and return the MAX of it
150 compared with what we get from the expression tree. Just set the
151 type_alignment value to 0 when the type information is to be ignored. */
152 type_alignment
153 = ((POINTER_TYPE_P (TREE_TYPE (exp))
154 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
155 ? TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))) : 0);
157 switch (TREE_CODE (exp))
159 CASE_CONVERT:
160 case VIEW_CONVERT_EXPR:
161 case NON_LVALUE_EXPR:
162 /* Conversions between pointers and integers don't change the alignment
163 of the underlying object. */
164 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
165 break;
167 case COMPOUND_EXPR:
168 /* The value of a COMPOUND_EXPR is that of it's second operand. */
169 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
170 break;
172 case PLUS_EXPR:
173 case POINTER_PLUS_EXPR:
174 case MINUS_EXPR:
175 /* If two address are added, the alignment of the result is the
176 minimum of the two alignments. */
177 lhs = known_alignment (TREE_OPERAND (exp, 0));
178 rhs = known_alignment (TREE_OPERAND (exp, 1));
179 this_alignment = MIN (lhs, rhs);
180 break;
182 case COND_EXPR:
183 /* If there is a choice between two values, use the smallest one. */
184 lhs = known_alignment (TREE_OPERAND (exp, 1));
185 rhs = known_alignment (TREE_OPERAND (exp, 2));
186 this_alignment = MIN (lhs, rhs);
187 break;
189 case INTEGER_CST:
190 /* The first part of this represents the lowest bit in the constant,
191 but is it in bytes, not bits. */
192 this_alignment
193 = MIN (BITS_PER_UNIT
194 * (TREE_INT_CST_LOW (exp) & - TREE_INT_CST_LOW (exp)),
195 BIGGEST_ALIGNMENT);
196 break;
198 case MULT_EXPR:
199 /* If we know the alignment of just one side, use it. Otherwise,
200 use the product of the alignments. */
201 lhs = known_alignment (TREE_OPERAND (exp, 0));
202 rhs = known_alignment (TREE_OPERAND (exp, 1));
204 if (lhs == 0 || rhs == 0)
205 this_alignment = MIN (BIGGEST_ALIGNMENT, MAX (lhs, rhs));
206 else
207 this_alignment = MIN (BIGGEST_ALIGNMENT, lhs * rhs);
208 break;
210 case BIT_AND_EXPR:
211 /* A bit-and expression is as aligned as the maximum alignment of the
212 operands. We typically get here for a complex lhs and a constant
213 negative power of two on the rhs to force an explicit alignment, so
214 don't bother looking at the lhs. */
215 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
216 break;
218 case ADDR_EXPR:
219 this_alignment = expr_align (TREE_OPERAND (exp, 0));
220 break;
222 default:
223 this_alignment = 0;
224 break;
227 return MAX (type_alignment, this_alignment);
230 /* We have a comparison or assignment operation on two types, T1 and T2, which
231 are either both array types or both record types. T1 is assumed to be for
232 the left hand side operand, and T2 for the right hand side. Return the
233 type that both operands should be converted to for the operation, if any.
234 Otherwise return zero. */
236 static tree
237 find_common_type (tree t1, tree t2)
239 /* ??? As of today, various constructs lead here with types of different
240 sizes even when both constants (e.g. tagged types, packable vs regular
241 component types, padded vs unpadded types, ...). While some of these
242 would better be handled upstream (types should be made consistent before
243 calling into build_binary_op), some others are really expected and we
244 have to be careful. */
246 /* We must prevent writing more than what the target may hold if this is for
247 an assignment and the case of tagged types is handled in build_binary_op
248 so use the lhs type if it is known to be smaller, or of constant size and
249 the rhs type is not, whatever the modes. We also force t1 in case of
250 constant size equality to minimize occurrences of view conversions on the
251 lhs of assignments. */
252 if (TREE_CONSTANT (TYPE_SIZE (t1))
253 && (!TREE_CONSTANT (TYPE_SIZE (t2))
254 || !tree_int_cst_lt (TYPE_SIZE (t2), TYPE_SIZE (t1))))
255 return t1;
257 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
258 that we will not have any alignment problems since, if we did, the
259 non-BLKmode type could not have been used. */
260 if (TYPE_MODE (t1) != BLKmode)
261 return t1;
263 /* If the rhs type is of constant size, use it whatever the modes. At
264 this point it is known to be smaller, or of constant size and the
265 lhs type is not. */
266 if (TREE_CONSTANT (TYPE_SIZE (t2)))
267 return t2;
269 /* Otherwise, if the rhs type is non-BLKmode, use it. */
270 if (TYPE_MODE (t2) != BLKmode)
271 return t2;
273 /* In this case, both types have variable size and BLKmode. It's
274 probably best to leave the "type mismatch" because changing it
275 could cause a bad self-referential reference. */
276 return NULL_TREE;
279 /* See if EXP contains a SAVE_EXPR in a position where we would
280 normally put it.
282 ??? This is a real kludge, but is probably the best approach short
283 of some very general solution. */
285 static bool
286 contains_save_expr_p (tree exp)
288 switch (TREE_CODE (exp))
290 case SAVE_EXPR:
291 return true;
293 case ADDR_EXPR: case INDIRECT_REF:
294 case COMPONENT_REF:
295 CASE_CONVERT: case VIEW_CONVERT_EXPR:
296 return contains_save_expr_p (TREE_OPERAND (exp, 0));
298 case CONSTRUCTOR:
300 tree value;
301 unsigned HOST_WIDE_INT ix;
303 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), ix, value)
304 if (contains_save_expr_p (value))
305 return true;
306 return false;
309 default:
310 return false;
314 /* See if EXP contains a NULL_EXPR in an expression we use for sizes. Return
315 it if so. This is used to detect types whose sizes involve computations
316 that are known to raise Constraint_Error. */
318 static tree
319 contains_null_expr (tree exp)
321 tree tem;
323 if (TREE_CODE (exp) == NULL_EXPR)
324 return exp;
326 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
328 case tcc_unary:
329 return contains_null_expr (TREE_OPERAND (exp, 0));
331 case tcc_comparison:
332 case tcc_binary:
333 tem = contains_null_expr (TREE_OPERAND (exp, 0));
334 if (tem)
335 return tem;
337 return contains_null_expr (TREE_OPERAND (exp, 1));
339 case tcc_expression:
340 switch (TREE_CODE (exp))
342 case SAVE_EXPR:
343 return contains_null_expr (TREE_OPERAND (exp, 0));
345 case COND_EXPR:
346 tem = contains_null_expr (TREE_OPERAND (exp, 0));
347 if (tem)
348 return tem;
350 tem = contains_null_expr (TREE_OPERAND (exp, 1));
351 if (tem)
352 return tem;
354 return contains_null_expr (TREE_OPERAND (exp, 2));
356 default:
357 return 0;
360 default:
361 return 0;
365 /* Return an expression tree representing an equality comparison of
366 A1 and A2, two objects of ARRAY_TYPE. The returned expression should
367 be of type RESULT_TYPE
369 Two arrays are equal in one of two ways: (1) if both have zero length
370 in some dimension (not necessarily the same dimension) or (2) if the
371 lengths in each dimension are equal and the data is equal. We perform the
372 length tests in as efficient a manner as possible. */
374 static tree
375 compare_arrays (tree result_type, tree a1, tree a2)
377 tree t1 = TREE_TYPE (a1);
378 tree t2 = TREE_TYPE (a2);
379 tree result = convert (result_type, integer_one_node);
380 tree a1_is_null = convert (result_type, integer_zero_node);
381 tree a2_is_null = convert (result_type, integer_zero_node);
382 bool length_zero_p = false;
384 /* Process each dimension separately and compare the lengths. If any
385 dimension has a size known to be zero, set SIZE_ZERO_P to 1 to
386 suppress the comparison of the data. */
387 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
389 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
390 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
391 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
392 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
393 tree bt = get_base_type (TREE_TYPE (lb1));
394 tree length1 = fold_build2 (MINUS_EXPR, bt, ub1, lb1);
395 tree length2 = fold_build2 (MINUS_EXPR, bt, ub2, lb2);
396 tree nbt;
397 tree tem;
398 tree comparison, this_a1_is_null, this_a2_is_null;
400 /* If the length of the first array is a constant, swap our operands
401 unless the length of the second array is the constant zero.
402 Note that we have set the `length' values to the length - 1. */
403 if (TREE_CODE (length1) == INTEGER_CST
404 && !integer_zerop (fold_build2 (PLUS_EXPR, bt, length2,
405 convert (bt, integer_one_node))))
407 tem = a1, a1 = a2, a2 = tem;
408 tem = t1, t1 = t2, t2 = tem;
409 tem = lb1, lb1 = lb2, lb2 = tem;
410 tem = ub1, ub1 = ub2, ub2 = tem;
411 tem = length1, length1 = length2, length2 = tem;
412 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
415 /* If the length of this dimension in the second array is the constant
416 zero, we can just go inside the original bounds for the first
417 array and see if last < first. */
418 if (integer_zerop (fold_build2 (PLUS_EXPR, bt, length2,
419 convert (bt, integer_one_node))))
421 tree ub = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
422 tree lb = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
424 comparison = build_binary_op (LT_EXPR, result_type, ub, lb);
425 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
426 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
428 length_zero_p = true;
429 this_a1_is_null = comparison;
430 this_a2_is_null = convert (result_type, integer_one_node);
433 /* If the length is some other constant value, we know that the
434 this dimension in the first array cannot be superflat, so we
435 can just use its length from the actual stored bounds. */
436 else if (TREE_CODE (length2) == INTEGER_CST)
438 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
439 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
440 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
441 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
442 nbt = get_base_type (TREE_TYPE (ub1));
444 comparison
445 = build_binary_op (EQ_EXPR, result_type,
446 build_binary_op (MINUS_EXPR, nbt, ub1, lb1),
447 build_binary_op (MINUS_EXPR, nbt, ub2, lb2));
449 /* Note that we know that UB2 and LB2 are constant and hence
450 cannot contain a PLACEHOLDER_EXPR. */
452 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
453 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
455 this_a1_is_null = build_binary_op (LT_EXPR, result_type, ub1, lb1);
456 this_a2_is_null = convert (result_type, integer_zero_node);
459 /* Otherwise compare the computed lengths. */
460 else
462 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
463 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
465 comparison
466 = build_binary_op (EQ_EXPR, result_type, length1, length2);
468 this_a1_is_null
469 = build_binary_op (LT_EXPR, result_type, length1,
470 convert (bt, integer_zero_node));
471 this_a2_is_null
472 = build_binary_op (LT_EXPR, result_type, length2,
473 convert (bt, integer_zero_node));
476 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
477 result, comparison);
479 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
480 this_a1_is_null, a1_is_null);
481 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
482 this_a2_is_null, a2_is_null);
484 t1 = TREE_TYPE (t1);
485 t2 = TREE_TYPE (t2);
488 /* Unless the size of some bound is known to be zero, compare the
489 data in the array. */
490 if (!length_zero_p)
492 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
494 if (type)
495 a1 = convert (type, a1), a2 = convert (type, a2);
497 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result,
498 fold_build2 (EQ_EXPR, result_type, a1, a2));
502 /* The result is also true if both sizes are zero. */
503 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
504 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
505 a1_is_null, a2_is_null),
506 result);
508 /* If either operand contains SAVE_EXPRs, they have to be evaluated before
509 starting the comparison above since the place it would be otherwise
510 evaluated would be wrong. */
512 if (contains_save_expr_p (a1))
513 result = build2 (COMPOUND_EXPR, result_type, a1, result);
515 if (contains_save_expr_p (a2))
516 result = build2 (COMPOUND_EXPR, result_type, a2, result);
518 return result;
521 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
522 type TYPE. We know that TYPE is a modular type with a nonbinary
523 modulus. */
525 static tree
526 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
527 tree rhs)
529 tree modulus = TYPE_MODULUS (type);
530 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
531 unsigned int precision;
532 bool unsignedp = true;
533 tree op_type = type;
534 tree result;
536 /* If this is an addition of a constant, convert it to a subtraction
537 of a constant since we can do that faster. */
538 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
540 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
541 op_code = MINUS_EXPR;
544 /* For the logical operations, we only need PRECISION bits. For
545 addition and subtraction, we need one more and for multiplication we
546 need twice as many. But we never want to make a size smaller than
547 our size. */
548 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
549 needed_precision += 1;
550 else if (op_code == MULT_EXPR)
551 needed_precision *= 2;
553 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
555 /* Unsigned will do for everything but subtraction. */
556 if (op_code == MINUS_EXPR)
557 unsignedp = false;
559 /* If our type is the wrong signedness or isn't wide enough, make a new
560 type and convert both our operands to it. */
561 if (TYPE_PRECISION (op_type) < precision
562 || TYPE_UNSIGNED (op_type) != unsignedp)
564 /* Copy the node so we ensure it can be modified to make it modular. */
565 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
566 modulus = convert (op_type, modulus);
567 SET_TYPE_MODULUS (op_type, modulus);
568 TYPE_MODULAR_P (op_type) = 1;
569 lhs = convert (op_type, lhs);
570 rhs = convert (op_type, rhs);
573 /* Do the operation, then we'll fix it up. */
574 result = fold_build2 (op_code, op_type, lhs, rhs);
576 /* For multiplication, we have no choice but to do a full modulus
577 operation. However, we want to do this in the narrowest
578 possible size. */
579 if (op_code == MULT_EXPR)
581 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
582 modulus = convert (div_type, modulus);
583 SET_TYPE_MODULUS (div_type, modulus);
584 TYPE_MODULAR_P (div_type) = 1;
585 result = convert (op_type,
586 fold_build2 (TRUNC_MOD_EXPR, div_type,
587 convert (div_type, result), modulus));
590 /* For subtraction, add the modulus back if we are negative. */
591 else if (op_code == MINUS_EXPR)
593 result = save_expr (result);
594 result = fold_build3 (COND_EXPR, op_type,
595 fold_build2 (LT_EXPR, integer_type_node, result,
596 convert (op_type, integer_zero_node)),
597 fold_build2 (PLUS_EXPR, op_type, result, modulus),
598 result);
601 /* For the other operations, subtract the modulus if we are >= it. */
602 else
604 result = save_expr (result);
605 result = fold_build3 (COND_EXPR, op_type,
606 fold_build2 (GE_EXPR, integer_type_node,
607 result, modulus),
608 fold_build2 (MINUS_EXPR, op_type,
609 result, modulus),
610 result);
613 return convert (type, result);
616 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
617 desired for the result. Usually the operation is to be performed
618 in that type. For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
619 in which case the type to be used will be derived from the operands.
621 This function is very much unlike the ones for C and C++ since we
622 have already done any type conversion and matching required. All we
623 have to do here is validate the work done by SEM and handle subtypes. */
625 tree
626 build_binary_op (enum tree_code op_code, tree result_type,
627 tree left_operand, tree right_operand)
629 tree left_type = TREE_TYPE (left_operand);
630 tree right_type = TREE_TYPE (right_operand);
631 tree left_base_type = get_base_type (left_type);
632 tree right_base_type = get_base_type (right_type);
633 tree operation_type = result_type;
634 tree best_type = NULL_TREE;
635 tree modulus, result;
636 bool has_side_effects = false;
638 if (operation_type
639 && TREE_CODE (operation_type) == RECORD_TYPE
640 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
641 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
643 if (operation_type
644 && !AGGREGATE_TYPE_P (operation_type)
645 && TYPE_EXTRA_SUBTYPE_P (operation_type))
646 operation_type = get_base_type (operation_type);
648 modulus = (operation_type
649 && TREE_CODE (operation_type) == INTEGER_TYPE
650 && TYPE_MODULAR_P (operation_type)
651 ? TYPE_MODULUS (operation_type) : NULL_TREE);
653 switch (op_code)
655 case MODIFY_EXPR:
656 /* If there were integral or pointer conversions on the LHS, remove
657 them; we'll be putting them back below if needed. Likewise for
658 conversions between array and record types, except for justified
659 modular types. But don't do this if the right operand is not
660 BLKmode (for packed arrays) unless we are not changing the mode. */
661 while ((CONVERT_EXPR_P (left_operand)
662 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
663 && (((INTEGRAL_TYPE_P (left_type)
664 || POINTER_TYPE_P (left_type))
665 && (INTEGRAL_TYPE_P (TREE_TYPE
666 (TREE_OPERAND (left_operand, 0)))
667 || POINTER_TYPE_P (TREE_TYPE
668 (TREE_OPERAND (left_operand, 0)))))
669 || (((TREE_CODE (left_type) == RECORD_TYPE
670 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
671 || TREE_CODE (left_type) == ARRAY_TYPE)
672 && ((TREE_CODE (TREE_TYPE
673 (TREE_OPERAND (left_operand, 0)))
674 == RECORD_TYPE)
675 || (TREE_CODE (TREE_TYPE
676 (TREE_OPERAND (left_operand, 0)))
677 == ARRAY_TYPE))
678 && (TYPE_MODE (right_type) == BLKmode
679 || (TYPE_MODE (left_type)
680 == TYPE_MODE (TREE_TYPE
681 (TREE_OPERAND
682 (left_operand, 0))))))))
684 left_operand = TREE_OPERAND (left_operand, 0);
685 left_type = TREE_TYPE (left_operand);
688 /* If a class-wide type may be involved, force use of the RHS type. */
689 if ((TREE_CODE (right_type) == RECORD_TYPE
690 || TREE_CODE (right_type) == UNION_TYPE)
691 && TYPE_ALIGN_OK (right_type))
692 operation_type = right_type;
694 /* If we are copying between padded objects with compatible types, use
695 the padded view of the objects, this is very likely more efficient.
696 Likewise for a padded that is assigned a constructor, in order to
697 avoid putting a VIEW_CONVERT_EXPR on the LHS. But don't do this if
698 we wouldn't have actually copied anything. */
699 else if (TREE_CODE (left_type) == RECORD_TYPE
700 && TYPE_IS_PADDING_P (left_type)
701 && TREE_CONSTANT (TYPE_SIZE (left_type))
702 && ((TREE_CODE (right_operand) == COMPONENT_REF
703 && TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
704 == RECORD_TYPE
705 && TYPE_IS_PADDING_P
706 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
707 && gnat_types_compatible_p
708 (left_type,
709 TREE_TYPE (TREE_OPERAND (right_operand, 0))))
710 || TREE_CODE (right_operand) == CONSTRUCTOR)
711 && !integer_zerop (TYPE_SIZE (right_type)))
712 operation_type = left_type;
714 /* Find the best type to use for copying between aggregate types. */
715 else if (((TREE_CODE (left_type) == ARRAY_TYPE
716 && TREE_CODE (right_type) == ARRAY_TYPE)
717 || (TREE_CODE (left_type) == RECORD_TYPE
718 && TREE_CODE (right_type) == RECORD_TYPE))
719 && (best_type = find_common_type (left_type, right_type)))
720 operation_type = best_type;
722 /* Otherwise use the LHS type. */
723 else if (!operation_type)
724 operation_type = left_type;
726 /* Ensure everything on the LHS is valid. If we have a field reference,
727 strip anything that get_inner_reference can handle. Then remove any
728 conversions between types having the same code and mode. And mark
729 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
730 either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
731 result = left_operand;
732 while (true)
734 tree restype = TREE_TYPE (result);
736 if (TREE_CODE (result) == COMPONENT_REF
737 || TREE_CODE (result) == ARRAY_REF
738 || TREE_CODE (result) == ARRAY_RANGE_REF)
739 while (handled_component_p (result))
740 result = TREE_OPERAND (result, 0);
741 else if (TREE_CODE (result) == REALPART_EXPR
742 || TREE_CODE (result) == IMAGPART_EXPR
743 || (CONVERT_EXPR_P (result)
744 && (((TREE_CODE (restype)
745 == TREE_CODE (TREE_TYPE
746 (TREE_OPERAND (result, 0))))
747 && (TYPE_MODE (TREE_TYPE
748 (TREE_OPERAND (result, 0)))
749 == TYPE_MODE (restype)))
750 || TYPE_ALIGN_OK (restype))))
751 result = TREE_OPERAND (result, 0);
752 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
754 TREE_ADDRESSABLE (result) = 1;
755 result = TREE_OPERAND (result, 0);
757 else
758 break;
761 gcc_assert (TREE_CODE (result) == INDIRECT_REF
762 || TREE_CODE (result) == NULL_EXPR
763 || DECL_P (result));
765 /* Convert the right operand to the operation type unless it is
766 either already of the correct type or if the type involves a
767 placeholder, since the RHS may not have the same record type. */
768 if (operation_type != right_type
769 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
771 right_operand = convert (operation_type, right_operand);
772 right_type = operation_type;
775 /* If the left operand is not of the same type as the operation
776 type, wrap it up in a VIEW_CONVERT_EXPR. */
777 if (left_type != operation_type)
778 left_operand = unchecked_convert (operation_type, left_operand, false);
780 has_side_effects = true;
781 modulus = NULL_TREE;
782 break;
784 case ARRAY_REF:
785 if (!operation_type)
786 operation_type = TREE_TYPE (left_type);
788 /* ... fall through ... */
790 case ARRAY_RANGE_REF:
791 /* First look through conversion between type variants. Note that
792 this changes neither the operation type nor the type domain. */
793 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
794 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
795 == TYPE_MAIN_VARIANT (left_type))
797 left_operand = TREE_OPERAND (left_operand, 0);
798 left_type = TREE_TYPE (left_operand);
801 /* Then convert the right operand to its base type. This will
802 prevent unneeded signedness conversions when sizetype is wider than
803 integer. */
804 right_operand = convert (right_base_type, right_operand);
805 right_operand = convert (TYPE_DOMAIN (left_type), right_operand);
807 if (!TREE_CONSTANT (right_operand)
808 || !TREE_CONSTANT (TYPE_MIN_VALUE (right_type)))
809 gnat_mark_addressable (left_operand);
811 modulus = NULL_TREE;
812 break;
814 case GE_EXPR:
815 case LE_EXPR:
816 case GT_EXPR:
817 case LT_EXPR:
818 gcc_assert (!POINTER_TYPE_P (left_type));
820 /* ... fall through ... */
822 case EQ_EXPR:
823 case NE_EXPR:
824 /* If either operand is a NULL_EXPR, just return a new one. */
825 if (TREE_CODE (left_operand) == NULL_EXPR)
826 return build2 (op_code, result_type,
827 build1 (NULL_EXPR, integer_type_node,
828 TREE_OPERAND (left_operand, 0)),
829 integer_zero_node);
831 else if (TREE_CODE (right_operand) == NULL_EXPR)
832 return build2 (op_code, result_type,
833 build1 (NULL_EXPR, integer_type_node,
834 TREE_OPERAND (right_operand, 0)),
835 integer_zero_node);
837 /* If either object is a justified modular types, get the
838 fields from within. */
839 if (TREE_CODE (left_type) == RECORD_TYPE
840 && TYPE_JUSTIFIED_MODULAR_P (left_type))
842 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
843 left_operand);
844 left_type = TREE_TYPE (left_operand);
845 left_base_type = get_base_type (left_type);
848 if (TREE_CODE (right_type) == RECORD_TYPE
849 && TYPE_JUSTIFIED_MODULAR_P (right_type))
851 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
852 right_operand);
853 right_type = TREE_TYPE (right_operand);
854 right_base_type = get_base_type (right_type);
857 /* If both objects are arrays, compare them specially. */
858 if ((TREE_CODE (left_type) == ARRAY_TYPE
859 || (TREE_CODE (left_type) == INTEGER_TYPE
860 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
861 && (TREE_CODE (right_type) == ARRAY_TYPE
862 || (TREE_CODE (right_type) == INTEGER_TYPE
863 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
865 result = compare_arrays (result_type, left_operand, right_operand);
867 if (op_code == NE_EXPR)
868 result = invert_truthvalue (result);
869 else
870 gcc_assert (op_code == EQ_EXPR);
872 return result;
875 /* Otherwise, the base types must be the same unless the objects are
876 fat pointers or records. If we have records, use the best type and
877 convert both operands to that type. */
878 if (left_base_type != right_base_type)
880 if (TYPE_FAT_POINTER_P (left_base_type)
881 && TYPE_FAT_POINTER_P (right_base_type)
882 && TYPE_MAIN_VARIANT (left_base_type)
883 == TYPE_MAIN_VARIANT (right_base_type))
884 best_type = left_base_type;
885 else if (TREE_CODE (left_base_type) == RECORD_TYPE
886 && TREE_CODE (right_base_type) == RECORD_TYPE)
888 /* The only way these are permitted to be the same is if both
889 types have the same name. In that case, one of them must
890 not be self-referential. Use that one as the best type.
891 Even better is if one is of fixed size. */
892 gcc_assert (TYPE_NAME (left_base_type)
893 && (TYPE_NAME (left_base_type)
894 == TYPE_NAME (right_base_type)));
896 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
897 best_type = left_base_type;
898 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
899 best_type = right_base_type;
900 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
901 best_type = left_base_type;
902 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
903 best_type = right_base_type;
904 else
905 gcc_unreachable ();
907 else
908 gcc_unreachable ();
910 left_operand = convert (best_type, left_operand);
911 right_operand = convert (best_type, right_operand);
914 /* If we are comparing a fat pointer against zero, we need to
915 just compare the data pointer. */
916 else if (TYPE_FAT_POINTER_P (left_base_type)
917 && TREE_CODE (right_operand) == CONSTRUCTOR
918 && integer_zerop (VEC_index (constructor_elt,
919 CONSTRUCTOR_ELTS (right_operand),
921 ->value))
923 right_operand = build_component_ref (left_operand, NULL_TREE,
924 TYPE_FIELDS (left_base_type),
925 false);
926 left_operand = convert (TREE_TYPE (right_operand),
927 integer_zero_node);
929 else
931 left_operand = convert (left_base_type, left_operand);
932 right_operand = convert (right_base_type, right_operand);
935 modulus = NULL_TREE;
936 break;
938 case PREINCREMENT_EXPR:
939 case PREDECREMENT_EXPR:
940 case POSTINCREMENT_EXPR:
941 case POSTDECREMENT_EXPR:
942 /* In these, the result type and the left operand type should be the
943 same. Do the operation in the base type of those and convert the
944 right operand (which is an integer) to that type.
946 Note that these operations are only used in loop control where
947 we guarantee that no overflow can occur. So nothing special need
948 be done for modular types. */
950 gcc_assert (left_type == result_type);
951 operation_type = get_base_type (result_type);
952 left_operand = convert (operation_type, left_operand);
953 right_operand = convert (operation_type, right_operand);
954 has_side_effects = true;
955 modulus = NULL_TREE;
956 break;
958 case LSHIFT_EXPR:
959 case RSHIFT_EXPR:
960 case LROTATE_EXPR:
961 case RROTATE_EXPR:
962 /* The RHS of a shift can be any type. Also, ignore any modulus
963 (we used to abort, but this is needed for unchecked conversion
964 to modular types). Otherwise, processing is the same as normal. */
965 gcc_assert (operation_type == left_base_type);
966 modulus = NULL_TREE;
967 left_operand = convert (operation_type, left_operand);
968 break;
970 case TRUTH_ANDIF_EXPR:
971 case TRUTH_ORIF_EXPR:
972 case TRUTH_AND_EXPR:
973 case TRUTH_OR_EXPR:
974 case TRUTH_XOR_EXPR:
975 left_operand = gnat_truthvalue_conversion (left_operand);
976 right_operand = gnat_truthvalue_conversion (right_operand);
977 goto common;
979 case BIT_AND_EXPR:
980 case BIT_IOR_EXPR:
981 case BIT_XOR_EXPR:
982 /* For binary modulus, if the inputs are in range, so are the
983 outputs. */
984 if (modulus && integer_pow2p (modulus))
985 modulus = NULL_TREE;
987 goto common;
989 case COMPLEX_EXPR:
990 gcc_assert (TREE_TYPE (result_type) == left_base_type
991 && TREE_TYPE (result_type) == right_base_type);
992 left_operand = convert (left_base_type, left_operand);
993 right_operand = convert (right_base_type, right_operand);
994 break;
996 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
997 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
998 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
999 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1000 /* These always produce results lower than either operand. */
1001 modulus = NULL_TREE;
1002 goto common;
1004 case POINTER_PLUS_EXPR:
1005 gcc_assert (operation_type == left_base_type
1006 && sizetype == right_base_type);
1007 left_operand = convert (operation_type, left_operand);
1008 right_operand = convert (sizetype, right_operand);
1009 break;
1011 default:
1012 common:
1013 /* The result type should be the same as the base types of the
1014 both operands (and they should be the same). Convert
1015 everything to the result type. */
1017 gcc_assert (operation_type == left_base_type
1018 && left_base_type == right_base_type);
1019 left_operand = convert (operation_type, left_operand);
1020 right_operand = convert (operation_type, right_operand);
1023 if (modulus && !integer_pow2p (modulus))
1025 result = nonbinary_modular_operation (op_code, operation_type,
1026 left_operand, right_operand);
1027 modulus = NULL_TREE;
1029 /* If either operand is a NULL_EXPR, just return a new one. */
1030 else if (TREE_CODE (left_operand) == NULL_EXPR)
1031 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1032 else if (TREE_CODE (right_operand) == NULL_EXPR)
1033 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1034 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1035 result = fold (build4 (op_code, operation_type, left_operand,
1036 right_operand, NULL_TREE, NULL_TREE));
1037 else
1038 result
1039 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1041 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1042 TREE_CONSTANT (result)
1043 |= (TREE_CONSTANT (left_operand) & TREE_CONSTANT (right_operand)
1044 && op_code != ARRAY_REF && op_code != ARRAY_RANGE_REF);
1046 if ((op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1047 && TYPE_VOLATILE (operation_type))
1048 TREE_THIS_VOLATILE (result) = 1;
1050 /* If we are working with modular types, perform the MOD operation
1051 if something above hasn't eliminated the need for it. */
1052 if (modulus)
1053 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1054 convert (operation_type, modulus));
1056 if (result_type && result_type != operation_type)
1057 result = convert (result_type, result);
1059 return result;
1062 /* Similar, but for unary operations. */
1064 tree
1065 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1067 tree type = TREE_TYPE (operand);
1068 tree base_type = get_base_type (type);
1069 tree operation_type = result_type;
1070 tree result;
1071 bool side_effects = false;
1073 if (operation_type
1074 && TREE_CODE (operation_type) == RECORD_TYPE
1075 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1076 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1078 if (operation_type
1079 && !AGGREGATE_TYPE_P (operation_type)
1080 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1081 operation_type = get_base_type (operation_type);
1083 switch (op_code)
1085 case REALPART_EXPR:
1086 case IMAGPART_EXPR:
1087 if (!operation_type)
1088 result_type = operation_type = TREE_TYPE (type);
1089 else
1090 gcc_assert (result_type == TREE_TYPE (type));
1092 result = fold_build1 (op_code, operation_type, operand);
1093 break;
1095 case TRUTH_NOT_EXPR:
1096 gcc_assert (result_type == base_type);
1097 result = invert_truthvalue (gnat_truthvalue_conversion (operand));
1098 break;
1100 case ATTR_ADDR_EXPR:
1101 case ADDR_EXPR:
1102 switch (TREE_CODE (operand))
1104 case INDIRECT_REF:
1105 case UNCONSTRAINED_ARRAY_REF:
1106 result = TREE_OPERAND (operand, 0);
1108 /* Make sure the type here is a pointer, not a reference.
1109 GCC wants pointer types for function addresses. */
1110 if (!result_type)
1111 result_type = build_pointer_type (type);
1113 /* If the underlying object can alias everything, propagate the
1114 property since we are effectively retrieving the object. */
1115 if (POINTER_TYPE_P (TREE_TYPE (result))
1116 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1118 if (TREE_CODE (result_type) == POINTER_TYPE
1119 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1120 result_type
1121 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1122 TYPE_MODE (result_type),
1123 true);
1124 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1125 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1126 result_type
1127 = build_reference_type_for_mode (TREE_TYPE (result_type),
1128 TYPE_MODE (result_type),
1129 true);
1131 break;
1133 case NULL_EXPR:
1134 result = operand;
1135 TREE_TYPE (result) = type = build_pointer_type (type);
1136 break;
1138 case ARRAY_REF:
1139 case ARRAY_RANGE_REF:
1140 case COMPONENT_REF:
1141 case BIT_FIELD_REF:
1142 /* If this is for 'Address, find the address of the prefix and
1143 add the offset to the field. Otherwise, do this the normal
1144 way. */
1145 if (op_code == ATTR_ADDR_EXPR)
1147 HOST_WIDE_INT bitsize;
1148 HOST_WIDE_INT bitpos;
1149 tree offset, inner;
1150 enum machine_mode mode;
1151 int unsignedp, volatilep;
1153 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1154 &mode, &unsignedp, &volatilep,
1155 false);
1157 /* If INNER is a padding type whose field has a self-referential
1158 size, convert to that inner type. We know the offset is zero
1159 and we need to have that type visible. */
1160 if (TREE_CODE (TREE_TYPE (inner)) == RECORD_TYPE
1161 && TYPE_IS_PADDING_P (TREE_TYPE (inner))
1162 && (CONTAINS_PLACEHOLDER_P
1163 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1164 (TREE_TYPE (inner)))))))
1165 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1166 inner);
1168 /* Compute the offset as a byte offset from INNER. */
1169 if (!offset)
1170 offset = size_zero_node;
1172 if (bitpos % BITS_PER_UNIT != 0)
1173 post_error
1174 ("taking address of object not aligned on storage unit?",
1175 error_gnat_node);
1177 offset = size_binop (PLUS_EXPR, offset,
1178 size_int (bitpos / BITS_PER_UNIT));
1180 /* Take the address of INNER, convert the offset to void *, and
1181 add then. It will later be converted to the desired result
1182 type, if any. */
1183 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1184 inner = convert (ptr_void_type_node, inner);
1185 result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1186 inner, offset);
1187 result = convert (build_pointer_type (TREE_TYPE (operand)),
1188 result);
1189 break;
1191 goto common;
1193 case CONSTRUCTOR:
1194 /* If this is just a constructor for a padded record, we can
1195 just take the address of the single field and convert it to
1196 a pointer to our type. */
1197 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1199 result = (VEC_index (constructor_elt,
1200 CONSTRUCTOR_ELTS (operand),
1202 ->value);
1204 result = convert (build_pointer_type (TREE_TYPE (operand)),
1205 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1206 break;
1209 goto common;
1211 case NOP_EXPR:
1212 if (AGGREGATE_TYPE_P (type)
1213 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1214 return build_unary_op (ADDR_EXPR, result_type,
1215 TREE_OPERAND (operand, 0));
1217 /* ... fallthru ... */
1219 case VIEW_CONVERT_EXPR:
1220 /* If this just a variant conversion or if the conversion doesn't
1221 change the mode, get the result type from this type and go down.
1222 This is needed for conversions of CONST_DECLs, to eventually get
1223 to the address of their CORRESPONDING_VARs. */
1224 if ((TYPE_MAIN_VARIANT (type)
1225 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1226 || (TYPE_MODE (type) != BLKmode
1227 && (TYPE_MODE (type)
1228 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1229 return build_unary_op (ADDR_EXPR,
1230 (result_type ? result_type
1231 : build_pointer_type (type)),
1232 TREE_OPERAND (operand, 0));
1233 goto common;
1235 case CONST_DECL:
1236 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1238 /* ... fall through ... */
1240 default:
1241 common:
1243 /* If we are taking the address of a padded record whose field is
1244 contains a template, take the address of the template. */
1245 if (TREE_CODE (type) == RECORD_TYPE
1246 && TYPE_IS_PADDING_P (type)
1247 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1248 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1250 type = TREE_TYPE (TYPE_FIELDS (type));
1251 operand = convert (type, operand);
1254 if (type != error_mark_node)
1255 operation_type = build_pointer_type (type);
1257 gnat_mark_addressable (operand);
1258 result = fold_build1 (ADDR_EXPR, operation_type, operand);
1261 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1262 break;
1264 case INDIRECT_REF:
1265 /* If we want to refer to an entire unconstrained array,
1266 make up an expression to do so. This will never survive to
1267 the backend. If TYPE is a thin pointer, first convert the
1268 operand to a fat pointer. */
1269 if (TYPE_THIN_POINTER_P (type)
1270 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
1272 operand
1273 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1274 operand);
1275 type = TREE_TYPE (operand);
1278 if (TYPE_FAT_POINTER_P (type))
1280 result = build1 (UNCONSTRAINED_ARRAY_REF,
1281 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1282 TREE_READONLY (result) = TREE_STATIC (result)
1283 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1285 else if (TREE_CODE (operand) == ADDR_EXPR)
1286 result = TREE_OPERAND (operand, 0);
1288 else
1290 result = fold_build1 (op_code, TREE_TYPE (type), operand);
1291 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1294 side_effects
1295 = (!TYPE_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)));
1296 break;
1298 case NEGATE_EXPR:
1299 case BIT_NOT_EXPR:
1301 tree modulus = ((operation_type
1302 && TREE_CODE (operation_type) == INTEGER_TYPE
1303 && TYPE_MODULAR_P (operation_type))
1304 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1305 int mod_pow2 = modulus && integer_pow2p (modulus);
1307 /* If this is a modular type, there are various possibilities
1308 depending on the operation and whether the modulus is a
1309 power of two or not. */
1311 if (modulus)
1313 gcc_assert (operation_type == base_type);
1314 operand = convert (operation_type, operand);
1316 /* The fastest in the negate case for binary modulus is
1317 the straightforward code; the TRUNC_MOD_EXPR below
1318 is an AND operation. */
1319 if (op_code == NEGATE_EXPR && mod_pow2)
1320 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1321 fold_build1 (NEGATE_EXPR, operation_type,
1322 operand),
1323 modulus);
1325 /* For nonbinary negate case, return zero for zero operand,
1326 else return the modulus minus the operand. If the modulus
1327 is a power of two minus one, we can do the subtraction
1328 as an XOR since it is equivalent and faster on most machines. */
1329 else if (op_code == NEGATE_EXPR && !mod_pow2)
1331 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1332 modulus,
1333 convert (operation_type,
1334 integer_one_node))))
1335 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1336 operand, modulus);
1337 else
1338 result = fold_build2 (MINUS_EXPR, operation_type,
1339 modulus, operand);
1341 result = fold_build3 (COND_EXPR, operation_type,
1342 fold_build2 (NE_EXPR,
1343 integer_type_node,
1344 operand,
1345 convert
1346 (operation_type,
1347 integer_zero_node)),
1348 result, operand);
1350 else
1352 /* For the NOT cases, we need a constant equal to
1353 the modulus minus one. For a binary modulus, we
1354 XOR against the constant and subtract the operand from
1355 that constant for nonbinary modulus. */
1357 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1358 convert (operation_type,
1359 integer_one_node));
1361 if (mod_pow2)
1362 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1363 operand, cnst);
1364 else
1365 result = fold_build2 (MINUS_EXPR, operation_type,
1366 cnst, operand);
1369 break;
1373 /* ... fall through ... */
1375 default:
1376 gcc_assert (operation_type == base_type);
1377 result = fold_build1 (op_code, operation_type,
1378 convert (operation_type, operand));
1381 if (side_effects)
1383 TREE_SIDE_EFFECTS (result) = 1;
1384 if (TREE_CODE (result) == INDIRECT_REF)
1385 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1388 if (result_type && TREE_TYPE (result) != result_type)
1389 result = convert (result_type, result);
1391 return result;
1394 /* Similar, but for COND_EXPR. */
1396 tree
1397 build_cond_expr (tree result_type, tree condition_operand,
1398 tree true_operand, tree false_operand)
1400 tree result;
1401 bool addr_p = false;
1403 /* The front-end verifies that result, true and false operands have same base
1404 type. Convert everything to the result type. */
1406 true_operand = convert (result_type, true_operand);
1407 false_operand = convert (result_type, false_operand);
1409 /* If the result type is unconstrained, take the address of
1410 the operands and then dereference our result. */
1411 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1412 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1414 addr_p = true;
1415 result_type = build_pointer_type (result_type);
1416 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1417 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1420 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1421 true_operand, false_operand);
1423 /* If either operand is a SAVE_EXPR (possibly surrounded by
1424 arithmetic, make sure it gets done. */
1425 true_operand = skip_simple_arithmetic (true_operand);
1426 false_operand = skip_simple_arithmetic (false_operand);
1428 if (TREE_CODE (true_operand) == SAVE_EXPR)
1429 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1431 if (TREE_CODE (false_operand) == SAVE_EXPR)
1432 result = build2 (COMPOUND_EXPR, result_type, false_operand, result);
1434 /* ??? Seems the code above is wrong, as it may move ahead of the COND
1435 SAVE_EXPRs with side effects and not shared by both arms. */
1437 if (addr_p)
1438 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1440 return result;
1443 /* Similar, but for RETURN_EXPR. If RESULT_DECL is non-zero, build
1444 a RETURN_EXPR around the assignment of RET_VAL to RESULT_DECL.
1445 If RESULT_DECL is zero, build a bare RETURN_EXPR. */
1447 tree
1448 build_return_expr (tree result_decl, tree ret_val)
1450 tree result_expr;
1452 if (result_decl)
1454 /* The gimplifier explicitly enforces the following invariant:
1456 RETURN_EXPR
1458 MODIFY_EXPR
1461 RESULT_DECL ...
1463 As a consequence, type-homogeneity dictates that we use the type
1464 of the RESULT_DECL as the operation type. */
1466 tree operation_type = TREE_TYPE (result_decl);
1468 /* Convert the right operand to the operation type. Note that
1469 it's the same transformation as in the MODIFY_EXPR case of
1470 build_binary_op with the additional guarantee that the type
1471 cannot involve a placeholder, since otherwise the function
1472 would use the "target pointer" return mechanism. */
1474 if (operation_type != TREE_TYPE (ret_val))
1475 ret_val = convert (operation_type, ret_val);
1477 result_expr
1478 = build2 (MODIFY_EXPR, operation_type, result_decl, ret_val);
1480 else
1481 result_expr = NULL_TREE;
1483 return build1 (RETURN_EXPR, void_type_node, result_expr);
1486 /* Build a CALL_EXPR to call FUNDECL with one argument, ARG. Return
1487 the CALL_EXPR. */
1489 tree
1490 build_call_1_expr (tree fundecl, tree arg)
1492 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1493 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1494 1, arg);
1495 TREE_SIDE_EFFECTS (call) = 1;
1496 return call;
1499 /* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2. Return
1500 the CALL_EXPR. */
1502 tree
1503 build_call_2_expr (tree fundecl, tree arg1, tree arg2)
1505 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1506 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1507 2, arg1, arg2);
1508 TREE_SIDE_EFFECTS (call) = 1;
1509 return call;
1512 /* Likewise to call FUNDECL with no arguments. */
1514 tree
1515 build_call_0_expr (tree fundecl)
1517 /* We rely on build_call_nary to compute TREE_SIDE_EFFECTS. This makes
1518 it possible to propagate DECL_IS_PURE on parameterless functions. */
1519 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1520 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1522 return call;
1525 /* Call a function that raises an exception and pass the line number and file
1526 name, if requested. MSG says which exception function to call.
1528 GNAT_NODE is the gnat node conveying the source location for which the
1529 error should be signaled, or Empty in which case the error is signaled on
1530 the current ref_file_name/input_line.
1532 KIND says which kind of exception this is for
1533 (N_Raise_{Constraint,Storage,Program}_Error). */
1535 tree
1536 build_call_raise (int msg, Node_Id gnat_node, char kind)
1538 tree fndecl = gnat_raise_decls[msg];
1539 tree label = get_exception_label (kind);
1540 tree filename;
1541 int line_number;
1542 const char *str;
1543 int len;
1545 /* If this is to be done as a goto, handle that case. */
1546 if (label)
1548 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1549 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1551 /* If Local_Raise is present, generate
1552 Local_Raise (exception'Identity); */
1553 if (Present (local_raise))
1555 tree gnu_local_raise
1556 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1557 tree gnu_exception_entity
1558 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1559 tree gnu_call
1560 = build_call_1_expr (gnu_local_raise,
1561 build_unary_op (ADDR_EXPR, NULL_TREE,
1562 gnu_exception_entity));
1564 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1565 gnu_call, gnu_result);}
1567 return gnu_result;
1571 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1572 ? ""
1573 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1574 ? IDENTIFIER_POINTER
1575 (get_identifier (Get_Name_String
1576 (Debug_Source_Name
1577 (Get_Source_File_Index (Sloc (gnat_node))))))
1578 : ref_filename;
1580 len = strlen (str) + 1;
1581 filename = build_string (len, str);
1582 line_number
1583 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1584 ? Get_Logical_Line_Number (Sloc(gnat_node)) : input_line;
1586 TREE_TYPE (filename)
1587 = build_array_type (char_type_node,
1588 build_index_type (build_int_cst (NULL_TREE, len)));
1590 return
1591 build_call_2_expr (fndecl,
1592 build1 (ADDR_EXPR, build_pointer_type (char_type_node),
1593 filename),
1594 build_int_cst (NULL_TREE, line_number));
1597 /* qsort comparer for the bit positions of two constructor elements
1598 for record components. */
1600 static int
1601 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1603 const_tree const elmt1 = * (const_tree const *) rt1;
1604 const_tree const elmt2 = * (const_tree const *) rt2;
1605 const_tree const field1 = TREE_PURPOSE (elmt1);
1606 const_tree const field2 = TREE_PURPOSE (elmt2);
1607 const int ret
1608 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1610 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1613 /* Return a CONSTRUCTOR of TYPE whose list is LIST. */
1615 tree
1616 gnat_build_constructor (tree type, tree list)
1618 tree elmt;
1619 int n_elmts;
1620 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1621 bool side_effects = false;
1622 tree result;
1624 /* Scan the elements to see if they are all constant or if any has side
1625 effects, to let us set global flags on the resulting constructor. Count
1626 the elements along the way for possible sorting purposes below. */
1627 for (n_elmts = 0, elmt = list; elmt; elmt = TREE_CHAIN (elmt), n_elmts ++)
1629 if (!TREE_CONSTANT (TREE_VALUE (elmt))
1630 || (TREE_CODE (type) == RECORD_TYPE
1631 && DECL_BIT_FIELD (TREE_PURPOSE (elmt))
1632 && TREE_CODE (TREE_VALUE (elmt)) != INTEGER_CST)
1633 || !initializer_constant_valid_p (TREE_VALUE (elmt),
1634 TREE_TYPE (TREE_VALUE (elmt))))
1635 allconstant = false;
1637 if (TREE_SIDE_EFFECTS (TREE_VALUE (elmt)))
1638 side_effects = true;
1640 /* Propagate an NULL_EXPR from the size of the type. We won't ever
1641 be executing the code we generate here in that case, but handle it
1642 specially to avoid the compiler blowing up. */
1643 if (TREE_CODE (type) == RECORD_TYPE
1644 && (0 != (result
1645 = contains_null_expr (DECL_SIZE (TREE_PURPOSE (elmt))))))
1646 return build1 (NULL_EXPR, type, TREE_OPERAND (result, 0));
1649 /* For record types with constant components only, sort field list
1650 by increasing bit position. This is necessary to ensure the
1651 constructor can be output as static data. */
1652 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1654 /* Fill an array with an element tree per index, and ask qsort to order
1655 them according to what a bitpos comparison function says. */
1656 tree *gnu_arr = (tree *) alloca (sizeof (tree) * n_elmts);
1657 int i;
1659 for (i = 0, elmt = list; elmt; elmt = TREE_CHAIN (elmt), i++)
1660 gnu_arr[i] = elmt;
1662 qsort (gnu_arr, n_elmts, sizeof (tree), compare_elmt_bitpos);
1664 /* Then reconstruct the list from the sorted array contents. */
1665 list = NULL_TREE;
1666 for (i = n_elmts - 1; i >= 0; i--)
1668 TREE_CHAIN (gnu_arr[i]) = list;
1669 list = gnu_arr[i];
1673 result = build_constructor_from_list (type, list);
1674 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1675 TREE_SIDE_EFFECTS (result) = side_effects;
1676 TREE_READONLY (result) = TYPE_READONLY (type) || allconstant;
1677 return result;
1680 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1681 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1682 for the field. Don't fold the result if NO_FOLD_P is true.
1684 We also handle the fact that we might have been passed a pointer to the
1685 actual record and know how to look for fields in variant parts. */
1687 static tree
1688 build_simple_component_ref (tree record_variable, tree component,
1689 tree field, bool no_fold_p)
1691 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1692 tree ref, inner_variable;
1694 gcc_assert ((TREE_CODE (record_type) == RECORD_TYPE
1695 || TREE_CODE (record_type) == UNION_TYPE
1696 || TREE_CODE (record_type) == QUAL_UNION_TYPE)
1697 && TYPE_SIZE (record_type)
1698 && (component != 0) != (field != 0));
1700 /* If no field was specified, look for a field with the specified name
1701 in the current record only. */
1702 if (!field)
1703 for (field = TYPE_FIELDS (record_type); field;
1704 field = TREE_CHAIN (field))
1705 if (DECL_NAME (field) == component)
1706 break;
1708 if (!field)
1709 return NULL_TREE;
1711 /* If this field is not in the specified record, see if we can find
1712 something in the record whose original field is the same as this one. */
1713 if (DECL_CONTEXT (field) != record_type)
1714 /* Check if there is a field with name COMPONENT in the record. */
1716 tree new_field;
1718 /* First loop thru normal components. */
1720 for (new_field = TYPE_FIELDS (record_type); new_field;
1721 new_field = TREE_CHAIN (new_field))
1722 if (field == new_field
1723 || DECL_ORIGINAL_FIELD (new_field) == field
1724 || new_field == DECL_ORIGINAL_FIELD (field)
1725 || (DECL_ORIGINAL_FIELD (field)
1726 && (DECL_ORIGINAL_FIELD (field)
1727 == DECL_ORIGINAL_FIELD (new_field))))
1728 break;
1730 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1731 the component in the first search. Doing this search in 2 steps
1732 is required to avoiding hidden homonymous fields in the
1733 _Parent field. */
1735 if (!new_field)
1736 for (new_field = TYPE_FIELDS (record_type); new_field;
1737 new_field = TREE_CHAIN (new_field))
1738 if (DECL_INTERNAL_P (new_field))
1740 tree field_ref
1741 = build_simple_component_ref (record_variable,
1742 NULL_TREE, new_field, no_fold_p);
1743 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1744 no_fold_p);
1746 if (ref)
1747 return ref;
1750 field = new_field;
1753 if (!field)
1754 return NULL_TREE;
1756 /* If the field's offset has overflowed, do not attempt to access it
1757 as doing so may trigger sanity checks deeper in the back-end.
1758 Note that we don't need to warn since this will be done on trying
1759 to declare the object. */
1760 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1761 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1762 return NULL_TREE;
1764 /* Look through conversion between type variants. Note that this
1765 is transparent as far as the field is concerned. */
1766 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1767 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1768 == record_type)
1769 inner_variable = TREE_OPERAND (record_variable, 0);
1770 else
1771 inner_variable = record_variable;
1773 ref = build3 (COMPONENT_REF, TREE_TYPE (field), inner_variable, field,
1774 NULL_TREE);
1776 if (TREE_READONLY (record_variable) || TREE_READONLY (field))
1777 TREE_READONLY (ref) = 1;
1778 if (TREE_THIS_VOLATILE (record_variable) || TREE_THIS_VOLATILE (field)
1779 || TYPE_VOLATILE (record_type))
1780 TREE_THIS_VOLATILE (ref) = 1;
1782 if (no_fold_p)
1783 return ref;
1785 /* The generic folder may punt in this case because the inner array type
1786 can be self-referential, but folding is in fact not problematic. */
1787 else if (TREE_CODE (record_variable) == CONSTRUCTOR
1788 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record_variable)))
1790 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (record_variable);
1791 unsigned HOST_WIDE_INT idx;
1792 tree index, value;
1793 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
1794 if (index == field)
1795 return value;
1796 return ref;
1799 else
1800 return fold (ref);
1803 /* Like build_simple_component_ref, except that we give an error if the
1804 reference could not be found. */
1806 tree
1807 build_component_ref (tree record_variable, tree component,
1808 tree field, bool no_fold_p)
1810 tree ref = build_simple_component_ref (record_variable, component, field,
1811 no_fold_p);
1813 if (ref)
1814 return ref;
1816 /* If FIELD was specified, assume this is an invalid user field so
1817 raise constraint error. Otherwise, we can't find the type to return, so
1818 abort. */
1819 gcc_assert (field);
1820 return build1 (NULL_EXPR, TREE_TYPE (field),
1821 build_call_raise (CE_Discriminant_Check_Failed, Empty,
1822 N_Raise_Constraint_Error));
1825 /* Build a GCC tree to call an allocation or deallocation function.
1826 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
1827 generate an allocator.
1829 GNU_SIZE is the size of the object in bytes and ALIGN is the alignment in
1830 bits. GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the
1831 storage pool to use. If not preset, malloc and free will be used except
1832 if GNAT_PROC is the "fake" value of -1, in which case we allocate the
1833 object dynamically on the stack frame. */
1835 tree
1836 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, unsigned align,
1837 Entity_Id gnat_proc, Entity_Id gnat_pool,
1838 Node_Id gnat_node)
1840 tree gnu_align = size_int (align / BITS_PER_UNIT);
1842 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
1844 if (Present (gnat_proc))
1846 /* The storage pools are obviously always tagged types, but the
1847 secondary stack uses the same mechanism and is not tagged */
1848 if (Is_Tagged_Type (Etype (gnat_pool)))
1850 /* The size is the third parameter; the alignment is the
1851 same type. */
1852 Entity_Id gnat_size_type
1853 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1854 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1855 tree gnu_proc = gnat_to_gnu (gnat_proc);
1856 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1857 tree gnu_pool = gnat_to_gnu (gnat_pool);
1858 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1859 tree gnu_call;
1861 gnu_size = convert (gnu_size_type, gnu_size);
1862 gnu_align = convert (gnu_size_type, gnu_align);
1864 /* The first arg is always the address of the storage pool; next
1865 comes the address of the object, for a deallocator, then the
1866 size and alignment. */
1867 if (gnu_obj)
1868 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1869 gnu_proc_addr, 4, gnu_pool_addr,
1870 gnu_obj, gnu_size, gnu_align);
1871 else
1872 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1873 gnu_proc_addr, 3, gnu_pool_addr,
1874 gnu_size, gnu_align);
1875 TREE_SIDE_EFFECTS (gnu_call) = 1;
1876 return gnu_call;
1879 /* Secondary stack case. */
1880 else
1882 /* The size is the second parameter */
1883 Entity_Id gnat_size_type
1884 = Etype (Next_Formal (First_Formal (gnat_proc)));
1885 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1886 tree gnu_proc = gnat_to_gnu (gnat_proc);
1887 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1888 tree gnu_call;
1890 gnu_size = convert (gnu_size_type, gnu_size);
1892 /* The first arg is the address of the object, for a
1893 deallocator, then the size */
1894 if (gnu_obj)
1895 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1896 gnu_proc_addr, 2, gnu_obj, gnu_size);
1897 else
1898 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1899 gnu_proc_addr, 1, gnu_size);
1900 TREE_SIDE_EFFECTS (gnu_call) = 1;
1901 return gnu_call;
1905 else if (gnu_obj)
1906 return build_call_1_expr (free_decl, gnu_obj);
1908 /* ??? For now, disable variable-sized allocators in the stack since
1909 we can't yet gimplify an ALLOCATE_EXPR. */
1910 else if (gnat_pool == -1
1911 && TREE_CODE (gnu_size) == INTEGER_CST && !flag_stack_check)
1913 /* If the size is a constant, we can put it in the fixed portion of
1914 the stack frame to avoid the need to adjust the stack pointer. */
1915 if (TREE_CODE (gnu_size) == INTEGER_CST && !flag_stack_check)
1917 tree gnu_range
1918 = build_range_type (NULL_TREE, size_one_node, gnu_size);
1919 tree gnu_array_type = build_array_type (char_type_node, gnu_range);
1920 tree gnu_decl
1921 = create_var_decl (get_identifier ("RETVAL"), NULL_TREE,
1922 gnu_array_type, NULL_TREE, false, false, false,
1923 false, NULL, gnat_node);
1925 return convert (ptr_void_type_node,
1926 build_unary_op (ADDR_EXPR, NULL_TREE, gnu_decl));
1928 else
1929 gcc_unreachable ();
1930 #if 0
1931 return build2 (ALLOCATE_EXPR, ptr_void_type_node, gnu_size, gnu_align);
1932 #endif
1934 else
1936 if (Nkind (gnat_node) != N_Allocator || !Comes_From_Source (gnat_node))
1937 Check_No_Implicit_Heap_Alloc (gnat_node);
1939 /* If the allocator size is 32bits but the pointer size is 64bits then
1940 allocate 32bit memory (sometimes necessary on 64bit VMS). Otherwise
1941 default to standard malloc. */
1942 if (UI_To_Int (Esize (Etype (gnat_node))) == 32 && POINTER_SIZE == 64)
1943 return build_call_1_expr (malloc32_decl, gnu_size);
1944 else
1945 return build_call_1_expr (malloc_decl, gnu_size);
1949 /* Build a GCC tree to correspond to allocating an object of TYPE whose
1950 initial value is INIT, if INIT is nonzero. Convert the expression to
1951 RESULT_TYPE, which must be some type of pointer. Return the tree.
1952 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
1953 the storage pool to use. GNAT_NODE is used to provide an error
1954 location for restriction violations messages. If IGNORE_INIT_TYPE is
1955 true, ignore the type of INIT for the purpose of determining the size;
1956 this will cause the maximum size to be allocated if TYPE is of
1957 self-referential size. */
1959 tree
1960 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
1961 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
1963 tree size = TYPE_SIZE_UNIT (type);
1964 tree result;
1965 unsigned int default_allocator_alignment
1966 = get_target_default_allocator_alignment () * BITS_PER_UNIT;
1968 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
1969 if (init && TREE_CODE (init) == NULL_EXPR)
1970 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
1972 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
1973 sizes of the object and its template. Allocate the whole thing and
1974 fill in the parts that are known. */
1975 else if (TYPE_FAT_OR_THIN_POINTER_P (result_type))
1977 tree storage_type
1978 = build_unc_object_type_from_ptr (result_type, type,
1979 get_identifier ("ALLOC"));
1980 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
1981 tree storage_ptr_type = build_pointer_type (storage_type);
1982 tree storage;
1983 tree template_cons = NULL_TREE;
1985 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
1986 init);
1988 /* If the size overflows, pass -1 so the allocator will raise
1989 storage error. */
1990 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1991 size = ssize_int (-1);
1993 storage = build_call_alloc_dealloc (NULL_TREE, size,
1994 TYPE_ALIGN (storage_type),
1995 gnat_proc, gnat_pool, gnat_node);
1996 storage = convert (storage_ptr_type, protect_multiple_eval (storage));
1998 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2000 type = TREE_TYPE (TYPE_FIELDS (type));
2002 if (init)
2003 init = convert (type, init);
2006 /* If there is an initializing expression, make a constructor for
2007 the entire object including the bounds and copy it into the
2008 object. If there is no initializing expression, just set the
2009 bounds. */
2010 if (init)
2012 template_cons = tree_cons (TREE_CHAIN (TYPE_FIELDS (storage_type)),
2013 init, NULL_TREE);
2014 template_cons = tree_cons (TYPE_FIELDS (storage_type),
2015 build_template (template_type, type,
2016 init),
2017 template_cons);
2019 return convert
2020 (result_type,
2021 build2 (COMPOUND_EXPR, storage_ptr_type,
2022 build_binary_op
2023 (MODIFY_EXPR, storage_type,
2024 build_unary_op (INDIRECT_REF, NULL_TREE,
2025 convert (storage_ptr_type, storage)),
2026 gnat_build_constructor (storage_type, template_cons)),
2027 convert (storage_ptr_type, storage)));
2029 else
2030 return build2
2031 (COMPOUND_EXPR, result_type,
2032 build_binary_op
2033 (MODIFY_EXPR, template_type,
2034 build_component_ref
2035 (build_unary_op (INDIRECT_REF, NULL_TREE,
2036 convert (storage_ptr_type, storage)),
2037 NULL_TREE, TYPE_FIELDS (storage_type), 0),
2038 build_template (template_type, type, NULL_TREE)),
2039 convert (result_type, convert (storage_ptr_type, storage)));
2042 /* If we have an initializing expression, see if its size is simpler
2043 than the size from the type. */
2044 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2045 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2046 || CONTAINS_PLACEHOLDER_P (size)))
2047 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2049 /* If the size is still self-referential, reference the initializing
2050 expression, if it is present. If not, this must have been a
2051 call to allocate a library-level object, in which case we use
2052 the maximum size. */
2053 if (CONTAINS_PLACEHOLDER_P (size))
2055 if (!ignore_init_type && init)
2056 size = substitute_placeholder_in_expr (size, init);
2057 else
2058 size = max_size (size, true);
2061 /* If the size overflows, pass -1 so the allocator will raise
2062 storage error. */
2063 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2064 size = ssize_int (-1);
2066 /* If this is in the default storage pool and the type alignment is larger
2067 than what the default allocator supports, make an "aligning" record type
2068 with room to store a pointer before the field, allocate an object of that
2069 type, store the system's allocator return value just in front of the
2070 field and return the field's address. */
2072 if (No (gnat_proc) && TYPE_ALIGN (type) > default_allocator_alignment)
2074 /* Construct the aligning type with enough room for a pointer ahead
2075 of the field, then allocate. */
2076 tree record_type
2077 = make_aligning_type (type, TYPE_ALIGN (type), size,
2078 default_allocator_alignment,
2079 POINTER_SIZE / BITS_PER_UNIT);
2081 tree record, record_addr;
2083 record_addr
2084 = build_call_alloc_dealloc (NULL_TREE, TYPE_SIZE_UNIT (record_type),
2085 default_allocator_alignment, Empty, Empty,
2086 gnat_node);
2088 record_addr
2089 = convert (build_pointer_type (record_type),
2090 save_expr (record_addr));
2092 record = build_unary_op (INDIRECT_REF, NULL_TREE, record_addr);
2094 /* Our RESULT (the Ada allocator's value) is the super-aligned address
2095 of the internal record field ... */
2096 result
2097 = build_unary_op (ADDR_EXPR, NULL_TREE,
2098 build_component_ref
2099 (record, NULL_TREE, TYPE_FIELDS (record_type), 0));
2100 result = convert (result_type, result);
2102 /* ... with the system allocator's return value stored just in
2103 front. */
2105 tree ptr_addr
2106 = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
2107 convert (ptr_void_type_node, result),
2108 size_int (-POINTER_SIZE/BITS_PER_UNIT));
2110 tree ptr_ref
2111 = convert (build_pointer_type (ptr_void_type_node), ptr_addr);
2113 result
2114 = build2 (COMPOUND_EXPR, TREE_TYPE (result),
2115 build_binary_op (MODIFY_EXPR, NULL_TREE,
2116 build_unary_op (INDIRECT_REF, NULL_TREE,
2117 ptr_ref),
2118 convert (ptr_void_type_node,
2119 record_addr)),
2120 result);
2123 else
2124 result = convert (result_type,
2125 build_call_alloc_dealloc (NULL_TREE, size,
2126 TYPE_ALIGN (type),
2127 gnat_proc,
2128 gnat_pool,
2129 gnat_node));
2131 /* If we have an initial value, put the new address into a SAVE_EXPR, assign
2132 the value, and return the address. Do this with a COMPOUND_EXPR. */
2134 if (init)
2136 result = save_expr (result);
2137 result
2138 = build2 (COMPOUND_EXPR, TREE_TYPE (result),
2139 build_binary_op
2140 (MODIFY_EXPR, NULL_TREE,
2141 build_unary_op (INDIRECT_REF,
2142 TREE_TYPE (TREE_TYPE (result)), result),
2143 init),
2144 result);
2147 return convert (result_type, result);
2150 /* Fill in a VMS descriptor for EXPR and return a constructor for it.
2151 GNAT_FORMAL is how we find the descriptor record. */
2153 tree
2154 fill_vms_descriptor (tree expr, Entity_Id gnat_formal)
2156 tree record_type = TREE_TYPE (TREE_TYPE (get_gnu_tree (gnat_formal)));
2157 tree field;
2158 tree const_list = NULL_TREE;
2160 expr = maybe_unconstrained_array (expr);
2161 gnat_mark_addressable (expr);
2163 for (field = TYPE_FIELDS (record_type); field; field = TREE_CHAIN (field))
2164 const_list
2165 = tree_cons (field,
2166 convert (TREE_TYPE (field),
2167 SUBSTITUTE_PLACEHOLDER_IN_EXPR
2168 (DECL_INITIAL (field), expr)),
2169 const_list);
2171 return gnat_build_constructor (record_type, nreverse (const_list));
2174 /* Indicate that we need to make the address of EXPR_NODE and it therefore
2175 should not be allocated in a register. Returns true if successful. */
2177 bool
2178 gnat_mark_addressable (tree expr_node)
2180 while (1)
2181 switch (TREE_CODE (expr_node))
2183 case ADDR_EXPR:
2184 case COMPONENT_REF:
2185 case ARRAY_REF:
2186 case ARRAY_RANGE_REF:
2187 case REALPART_EXPR:
2188 case IMAGPART_EXPR:
2189 case VIEW_CONVERT_EXPR:
2190 case NON_LVALUE_EXPR:
2191 CASE_CONVERT:
2192 expr_node = TREE_OPERAND (expr_node, 0);
2193 break;
2195 case CONSTRUCTOR:
2196 TREE_ADDRESSABLE (expr_node) = 1;
2197 return true;
2199 case VAR_DECL:
2200 case PARM_DECL:
2201 case RESULT_DECL:
2202 TREE_ADDRESSABLE (expr_node) = 1;
2203 return true;
2205 case FUNCTION_DECL:
2206 TREE_ADDRESSABLE (expr_node) = 1;
2207 return true;
2209 case CONST_DECL:
2210 return (DECL_CONST_CORRESPONDING_VAR (expr_node)
2211 && (gnat_mark_addressable
2212 (DECL_CONST_CORRESPONDING_VAR (expr_node))));
2213 default:
2214 return true;