* decl.c (maybe_pad_type): Try to get a form of the type with integral
[official-gcc.git] / gcc / ada / utils2.c
blobb45c7aca107b8108e210345b6657d8ffe9b63047
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2008, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "rtl.h"
32 #include "ggc.h"
33 #include "flags.h"
34 #include "output.h"
35 #include "ada.h"
36 #include "types.h"
37 #include "atree.h"
38 #include "stringt.h"
39 #include "namet.h"
40 #include "uintp.h"
41 #include "fe.h"
42 #include "elists.h"
43 #include "nlists.h"
44 #include "sinfo.h"
45 #include "einfo.h"
46 #include "ada-tree.h"
47 #include "gigi.h"
49 static tree find_common_type (tree, tree);
50 static bool contains_save_expr_p (tree);
51 static tree contains_null_expr (tree);
52 static tree compare_arrays (tree, tree, tree);
53 static tree nonbinary_modular_operation (enum tree_code, tree, tree, tree);
54 static tree build_simple_component_ref (tree, tree, tree, bool);
56 /* Prepare expr to be an argument of a TRUTH_NOT_EXPR or other logical
57 operation.
59 This preparation consists of taking the ordinary representation of
60 an expression expr and producing a valid tree boolean expression
61 describing whether expr is nonzero. We could simply always do
63 build_binary_op (NE_EXPR, expr, integer_zero_node, 1),
65 but we optimize comparisons, &&, ||, and !.
67 The resulting type should always be the same as the input type.
68 This function is simpler than the corresponding C version since
69 the only possible operands will be things of Boolean type. */
71 tree
72 gnat_truthvalue_conversion (tree expr)
74 tree type = TREE_TYPE (expr);
76 switch (TREE_CODE (expr))
78 case EQ_EXPR: case NE_EXPR: case LE_EXPR: case GE_EXPR:
79 case LT_EXPR: case GT_EXPR:
80 case TRUTH_ANDIF_EXPR:
81 case TRUTH_ORIF_EXPR:
82 case TRUTH_AND_EXPR:
83 case TRUTH_OR_EXPR:
84 case TRUTH_XOR_EXPR:
85 case ERROR_MARK:
86 return expr;
88 case INTEGER_CST:
89 return (integer_zerop (expr)
90 ? build_int_cst (type, 0)
91 : build_int_cst (type, 1));
93 case REAL_CST:
94 return (real_zerop (expr)
95 ? fold_convert (type, integer_zero_node)
96 : fold_convert (type, integer_one_node));
98 case COND_EXPR:
99 /* Distribute the conversion into the arms of a COND_EXPR. */
101 tree arg1 = gnat_truthvalue_conversion (TREE_OPERAND (expr, 1));
102 tree arg2 = gnat_truthvalue_conversion (TREE_OPERAND (expr, 2));
103 return fold_build3 (COND_EXPR, type, TREE_OPERAND (expr, 0),
104 arg1, arg2);
107 default:
108 return build_binary_op (NE_EXPR, type, expr,
109 fold_convert (type, integer_zero_node));
113 /* Return the base type of TYPE. */
115 tree
116 get_base_type (tree type)
118 if (TREE_CODE (type) == RECORD_TYPE
119 && TYPE_JUSTIFIED_MODULAR_P (type))
120 type = TREE_TYPE (TYPE_FIELDS (type));
122 while (TREE_TYPE (type)
123 && (TREE_CODE (type) == INTEGER_TYPE
124 || TREE_CODE (type) == REAL_TYPE))
125 type = TREE_TYPE (type);
127 return type;
130 /* EXP is a GCC tree representing an address. See if we can find how
131 strictly the object at that address is aligned. Return that alignment
132 in bits. If we don't know anything about the alignment, return 0. */
134 unsigned int
135 known_alignment (tree exp)
137 unsigned int this_alignment;
138 unsigned int lhs, rhs;
139 unsigned int type_alignment;
141 /* For pointer expressions, we know that the designated object is always at
142 least as strictly aligned as the designated subtype, so we account for
143 both type and expression information in this case.
145 Beware that we can still get a dummy designated subtype here (e.g. Taft
146 Amendment types), in which the alignment information is meaningless and
147 should be ignored.
149 We always compute a type_alignment value and return the MAX of it
150 compared with what we get from the expression tree. Just set the
151 type_alignment value to 0 when the type information is to be ignored. */
152 type_alignment
153 = ((POINTER_TYPE_P (TREE_TYPE (exp))
154 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
155 ? TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))) : 0);
157 switch (TREE_CODE (exp))
159 case CONVERT_EXPR:
160 case VIEW_CONVERT_EXPR:
161 case NOP_EXPR:
162 case NON_LVALUE_EXPR:
163 /* Conversions between pointers and integers don't change the alignment
164 of the underlying object. */
165 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
166 break;
168 case COMPOUND_EXPR:
169 /* The value of a COMPOUND_EXPR is that of it's second operand. */
170 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
171 break;
173 case PLUS_EXPR:
174 case POINTER_PLUS_EXPR:
175 case MINUS_EXPR:
176 /* If two address are added, the alignment of the result is the
177 minimum of the two alignments. */
178 lhs = known_alignment (TREE_OPERAND (exp, 0));
179 rhs = known_alignment (TREE_OPERAND (exp, 1));
180 this_alignment = MIN (lhs, rhs);
181 break;
183 case COND_EXPR:
184 /* If there is a choice between two values, use the smallest one. */
185 lhs = known_alignment (TREE_OPERAND (exp, 1));
186 rhs = known_alignment (TREE_OPERAND (exp, 2));
187 this_alignment = MIN (lhs, rhs);
188 break;
190 case INTEGER_CST:
191 /* The first part of this represents the lowest bit in the constant,
192 but is it in bytes, not bits. */
193 this_alignment
194 = MIN (BITS_PER_UNIT
195 * (TREE_INT_CST_LOW (exp) & - TREE_INT_CST_LOW (exp)),
196 BIGGEST_ALIGNMENT);
197 break;
199 case MULT_EXPR:
200 /* If we know the alignment of just one side, use it. Otherwise,
201 use the product of the alignments. */
202 lhs = known_alignment (TREE_OPERAND (exp, 0));
203 rhs = known_alignment (TREE_OPERAND (exp, 1));
205 if (lhs == 0 || rhs == 0)
206 this_alignment = MIN (BIGGEST_ALIGNMENT, MAX (lhs, rhs));
207 else
208 this_alignment = MIN (BIGGEST_ALIGNMENT, lhs * rhs);
209 break;
211 case BIT_AND_EXPR:
212 /* A bit-and expression is as aligned as the maximum alignment of the
213 operands. We typically get here for a complex lhs and a constant
214 negative power of two on the rhs to force an explicit alignment, so
215 don't bother looking at the lhs. */
216 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
217 break;
219 case ADDR_EXPR:
220 this_alignment = expr_align (TREE_OPERAND (exp, 0));
221 break;
223 default:
224 this_alignment = 0;
225 break;
228 return MAX (type_alignment, this_alignment);
231 /* We have a comparison or assignment operation on two types, T1 and T2, which
232 are either both array types or both record types. T1 is assumed to be for
233 the left hand side operand, and T2 for the right hand side. Return the
234 type that both operands should be converted to for the operation, if any.
235 Otherwise return zero. */
237 static tree
238 find_common_type (tree t1, tree t2)
240 /* ??? As of today, various constructs lead here with types of different
241 sizes even when both constants (e.g. tagged types, packable vs regular
242 component types, padded vs unpadded types, ...). While some of these
243 would better be handled upstream (types should be made consistent before
244 calling into build_binary_op), some others are really expected and we
245 have to be careful. */
247 /* We must prevent writing more than what the target may hold if this is for
248 an assignment and the case of tagged types is handled in build_binary_op
249 so use the lhs type if it is known to be smaller, or of constant size and
250 the rhs type is not, whatever the modes. We also force t1 in case of
251 constant size equality to minimize occurrences of view conversions on the
252 lhs of assignments. */
253 if (TREE_CONSTANT (TYPE_SIZE (t1))
254 && (!TREE_CONSTANT (TYPE_SIZE (t2))
255 || !tree_int_cst_lt (TYPE_SIZE (t2), TYPE_SIZE (t1))))
256 return t1;
258 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
259 that we will not have any alignment problems since, if we did, the
260 non-BLKmode type could not have been used. */
261 if (TYPE_MODE (t1) != BLKmode)
262 return t1;
264 /* If the rhs type is of constant size, use it whatever the modes. At
265 this point it is known to be smaller, or of constant size and the
266 lhs type is not. */
267 if (TREE_CONSTANT (TYPE_SIZE (t2)))
268 return t2;
270 /* Otherwise, if the rhs type is non-BLKmode, use it. */
271 if (TYPE_MODE (t2) != BLKmode)
272 return t2;
274 /* In this case, both types have variable size and BLKmode. It's
275 probably best to leave the "type mismatch" because changing it
276 could cause a bad self-referential reference. */
277 return NULL_TREE;
280 /* See if EXP contains a SAVE_EXPR in a position where we would
281 normally put it.
283 ??? This is a real kludge, but is probably the best approach short
284 of some very general solution. */
286 static bool
287 contains_save_expr_p (tree exp)
289 switch (TREE_CODE (exp))
291 case SAVE_EXPR:
292 return true;
294 case ADDR_EXPR: case INDIRECT_REF:
295 case COMPONENT_REF:
296 case NOP_EXPR: case CONVERT_EXPR: case VIEW_CONVERT_EXPR:
297 return contains_save_expr_p (TREE_OPERAND (exp, 0));
299 case CONSTRUCTOR:
301 tree value;
302 unsigned HOST_WIDE_INT ix;
304 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), ix, value)
305 if (contains_save_expr_p (value))
306 return true;
307 return false;
310 default:
311 return false;
315 /* See if EXP contains a NULL_EXPR in an expression we use for sizes. Return
316 it if so. This is used to detect types whose sizes involve computations
317 that are known to raise Constraint_Error. */
319 static tree
320 contains_null_expr (tree exp)
322 tree tem;
324 if (TREE_CODE (exp) == NULL_EXPR)
325 return exp;
327 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
329 case tcc_unary:
330 return contains_null_expr (TREE_OPERAND (exp, 0));
332 case tcc_comparison:
333 case tcc_binary:
334 tem = contains_null_expr (TREE_OPERAND (exp, 0));
335 if (tem)
336 return tem;
338 return contains_null_expr (TREE_OPERAND (exp, 1));
340 case tcc_expression:
341 switch (TREE_CODE (exp))
343 case SAVE_EXPR:
344 return contains_null_expr (TREE_OPERAND (exp, 0));
346 case COND_EXPR:
347 tem = contains_null_expr (TREE_OPERAND (exp, 0));
348 if (tem)
349 return tem;
351 tem = contains_null_expr (TREE_OPERAND (exp, 1));
352 if (tem)
353 return tem;
355 return contains_null_expr (TREE_OPERAND (exp, 2));
357 default:
358 return 0;
361 default:
362 return 0;
366 /* Return an expression tree representing an equality comparison of
367 A1 and A2, two objects of ARRAY_TYPE. The returned expression should
368 be of type RESULT_TYPE
370 Two arrays are equal in one of two ways: (1) if both have zero length
371 in some dimension (not necessarily the same dimension) or (2) if the
372 lengths in each dimension are equal and the data is equal. We perform the
373 length tests in as efficient a manner as possible. */
375 static tree
376 compare_arrays (tree result_type, tree a1, tree a2)
378 tree t1 = TREE_TYPE (a1);
379 tree t2 = TREE_TYPE (a2);
380 tree result = convert (result_type, integer_one_node);
381 tree a1_is_null = convert (result_type, integer_zero_node);
382 tree a2_is_null = convert (result_type, integer_zero_node);
383 bool length_zero_p = false;
385 /* Process each dimension separately and compare the lengths. If any
386 dimension has a size known to be zero, set SIZE_ZERO_P to 1 to
387 suppress the comparison of the data. */
388 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
390 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
391 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
392 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
393 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
394 tree bt = get_base_type (TREE_TYPE (lb1));
395 tree length1 = fold_build2 (MINUS_EXPR, bt, ub1, lb1);
396 tree length2 = fold_build2 (MINUS_EXPR, bt, ub2, lb2);
397 tree nbt;
398 tree tem;
399 tree comparison, this_a1_is_null, this_a2_is_null;
401 /* If the length of the first array is a constant, swap our operands
402 unless the length of the second array is the constant zero.
403 Note that we have set the `length' values to the length - 1. */
404 if (TREE_CODE (length1) == INTEGER_CST
405 && !integer_zerop (fold_build2 (PLUS_EXPR, bt, length2,
406 convert (bt, integer_one_node))))
408 tem = a1, a1 = a2, a2 = tem;
409 tem = t1, t1 = t2, t2 = tem;
410 tem = lb1, lb1 = lb2, lb2 = tem;
411 tem = ub1, ub1 = ub2, ub2 = tem;
412 tem = length1, length1 = length2, length2 = tem;
413 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
416 /* If the length of this dimension in the second array is the constant
417 zero, we can just go inside the original bounds for the first
418 array and see if last < first. */
419 if (integer_zerop (fold_build2 (PLUS_EXPR, bt, length2,
420 convert (bt, integer_one_node))))
422 tree ub = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
423 tree lb = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
425 comparison = build_binary_op (LT_EXPR, result_type, ub, lb);
426 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
427 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
429 length_zero_p = true;
430 this_a1_is_null = comparison;
431 this_a2_is_null = convert (result_type, integer_one_node);
434 /* If the length is some other constant value, we know that the
435 this dimension in the first array cannot be superflat, so we
436 can just use its length from the actual stored bounds. */
437 else if (TREE_CODE (length2) == INTEGER_CST)
439 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
440 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
441 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
442 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
443 nbt = get_base_type (TREE_TYPE (ub1));
445 comparison
446 = build_binary_op (EQ_EXPR, result_type,
447 build_binary_op (MINUS_EXPR, nbt, ub1, lb1),
448 build_binary_op (MINUS_EXPR, nbt, ub2, lb2));
450 /* Note that we know that UB2 and LB2 are constant and hence
451 cannot contain a PLACEHOLDER_EXPR. */
453 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
454 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
456 this_a1_is_null = build_binary_op (LT_EXPR, result_type, ub1, lb1);
457 this_a2_is_null = convert (result_type, integer_zero_node);
460 /* Otherwise compare the computed lengths. */
461 else
463 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
464 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
466 comparison
467 = build_binary_op (EQ_EXPR, result_type, length1, length2);
469 this_a1_is_null
470 = build_binary_op (LT_EXPR, result_type, length1,
471 convert (bt, integer_zero_node));
472 this_a2_is_null
473 = build_binary_op (LT_EXPR, result_type, length2,
474 convert (bt, integer_zero_node));
477 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
478 result, comparison);
480 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
481 this_a1_is_null, a1_is_null);
482 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
483 this_a2_is_null, a2_is_null);
485 t1 = TREE_TYPE (t1);
486 t2 = TREE_TYPE (t2);
489 /* Unless the size of some bound is known to be zero, compare the
490 data in the array. */
491 if (!length_zero_p)
493 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
495 if (type)
496 a1 = convert (type, a1), a2 = convert (type, a2);
498 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result,
499 fold_build2 (EQ_EXPR, result_type, a1, a2));
503 /* The result is also true if both sizes are zero. */
504 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
505 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
506 a1_is_null, a2_is_null),
507 result);
509 /* If either operand contains SAVE_EXPRs, they have to be evaluated before
510 starting the comparison above since the place it would be otherwise
511 evaluated would be wrong. */
513 if (contains_save_expr_p (a1))
514 result = build2 (COMPOUND_EXPR, result_type, a1, result);
516 if (contains_save_expr_p (a2))
517 result = build2 (COMPOUND_EXPR, result_type, a2, result);
519 return result;
522 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
523 type TYPE. We know that TYPE is a modular type with a nonbinary
524 modulus. */
526 static tree
527 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
528 tree rhs)
530 tree modulus = TYPE_MODULUS (type);
531 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
532 unsigned int precision;
533 bool unsignedp = true;
534 tree op_type = type;
535 tree result;
537 /* If this is an addition of a constant, convert it to a subtraction
538 of a constant since we can do that faster. */
539 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
541 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
542 op_code = MINUS_EXPR;
545 /* For the logical operations, we only need PRECISION bits. For
546 addition and subtraction, we need one more and for multiplication we
547 need twice as many. But we never want to make a size smaller than
548 our size. */
549 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
550 needed_precision += 1;
551 else if (op_code == MULT_EXPR)
552 needed_precision *= 2;
554 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
556 /* Unsigned will do for everything but subtraction. */
557 if (op_code == MINUS_EXPR)
558 unsignedp = false;
560 /* If our type is the wrong signedness or isn't wide enough, make a new
561 type and convert both our operands to it. */
562 if (TYPE_PRECISION (op_type) < precision
563 || TYPE_UNSIGNED (op_type) != unsignedp)
565 /* Copy the node so we ensure it can be modified to make it modular. */
566 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
567 modulus = convert (op_type, modulus);
568 SET_TYPE_MODULUS (op_type, modulus);
569 TYPE_MODULAR_P (op_type) = 1;
570 lhs = convert (op_type, lhs);
571 rhs = convert (op_type, rhs);
574 /* Do the operation, then we'll fix it up. */
575 result = fold_build2 (op_code, op_type, lhs, rhs);
577 /* For multiplication, we have no choice but to do a full modulus
578 operation. However, we want to do this in the narrowest
579 possible size. */
580 if (op_code == MULT_EXPR)
582 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
583 modulus = convert (div_type, modulus);
584 SET_TYPE_MODULUS (div_type, modulus);
585 TYPE_MODULAR_P (div_type) = 1;
586 result = convert (op_type,
587 fold_build2 (TRUNC_MOD_EXPR, div_type,
588 convert (div_type, result), modulus));
591 /* For subtraction, add the modulus back if we are negative. */
592 else if (op_code == MINUS_EXPR)
594 result = save_expr (result);
595 result = fold_build3 (COND_EXPR, op_type,
596 fold_build2 (LT_EXPR, integer_type_node, result,
597 convert (op_type, integer_zero_node)),
598 fold_build2 (PLUS_EXPR, op_type, result, modulus),
599 result);
602 /* For the other operations, subtract the modulus if we are >= it. */
603 else
605 result = save_expr (result);
606 result = fold_build3 (COND_EXPR, op_type,
607 fold_build2 (GE_EXPR, integer_type_node,
608 result, modulus),
609 fold_build2 (MINUS_EXPR, op_type,
610 result, modulus),
611 result);
614 return convert (type, result);
617 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
618 desired for the result. Usually the operation is to be performed
619 in that type. For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
620 in which case the type to be used will be derived from the operands.
622 This function is very much unlike the ones for C and C++ since we
623 have already done any type conversion and matching required. All we
624 have to do here is validate the work done by SEM and handle subtypes. */
626 tree
627 build_binary_op (enum tree_code op_code, tree result_type,
628 tree left_operand, tree right_operand)
630 tree left_type = TREE_TYPE (left_operand);
631 tree right_type = TREE_TYPE (right_operand);
632 tree left_base_type = get_base_type (left_type);
633 tree right_base_type = get_base_type (right_type);
634 tree operation_type = result_type;
635 tree best_type = NULL_TREE;
636 tree modulus, result;
637 bool has_side_effects = false;
639 if (operation_type
640 && TREE_CODE (operation_type) == RECORD_TYPE
641 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
642 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
644 if (operation_type
645 && !AGGREGATE_TYPE_P (operation_type)
646 && TYPE_EXTRA_SUBTYPE_P (operation_type))
647 operation_type = get_base_type (operation_type);
649 modulus = (operation_type
650 && TREE_CODE (operation_type) == INTEGER_TYPE
651 && TYPE_MODULAR_P (operation_type)
652 ? TYPE_MODULUS (operation_type) : NULL_TREE);
654 switch (op_code)
656 case MODIFY_EXPR:
657 /* If there were integral or pointer conversions on the LHS, remove
658 them; we'll be putting them back below if needed. Likewise for
659 conversions between array and record types, except for justified
660 modular types. But don't do this if the right operand is not
661 BLKmode (for packed arrays) unless we are not changing the mode. */
662 while ((TREE_CODE (left_operand) == CONVERT_EXPR
663 || TREE_CODE (left_operand) == NOP_EXPR
664 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
665 && (((INTEGRAL_TYPE_P (left_type)
666 || POINTER_TYPE_P (left_type))
667 && (INTEGRAL_TYPE_P (TREE_TYPE
668 (TREE_OPERAND (left_operand, 0)))
669 || POINTER_TYPE_P (TREE_TYPE
670 (TREE_OPERAND (left_operand, 0)))))
671 || (((TREE_CODE (left_type) == RECORD_TYPE
672 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
673 || TREE_CODE (left_type) == ARRAY_TYPE)
674 && ((TREE_CODE (TREE_TYPE
675 (TREE_OPERAND (left_operand, 0)))
676 == RECORD_TYPE)
677 || (TREE_CODE (TREE_TYPE
678 (TREE_OPERAND (left_operand, 0)))
679 == ARRAY_TYPE))
680 && (TYPE_MODE (right_type) == BLKmode
681 || (TYPE_MODE (left_type)
682 == TYPE_MODE (TREE_TYPE
683 (TREE_OPERAND
684 (left_operand, 0))))))))
686 left_operand = TREE_OPERAND (left_operand, 0);
687 left_type = TREE_TYPE (left_operand);
690 /* If a class-wide type may be involved, force use of the RHS type. */
691 if ((TREE_CODE (right_type) == RECORD_TYPE
692 || TREE_CODE (right_type) == UNION_TYPE)
693 && TYPE_ALIGN_OK (right_type))
694 operation_type = right_type;
696 /* If we are copying between padded objects with compatible types, use
697 the padded view of the objects, this is very likely more efficient.
698 Likewise for a padded that is assigned a constructor, in order to
699 avoid putting a VIEW_CONVERT_EXPR on the LHS. But don't do this if
700 we wouldn't have actually copied anything. */
701 else if (TREE_CODE (left_type) == RECORD_TYPE
702 && TYPE_IS_PADDING_P (left_type)
703 && TREE_CONSTANT (TYPE_SIZE (left_type))
704 && ((TREE_CODE (right_operand) == COMPONENT_REF
705 && TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
706 == RECORD_TYPE
707 && TYPE_IS_PADDING_P
708 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
709 && gnat_types_compatible_p
710 (left_type,
711 TREE_TYPE (TREE_OPERAND (right_operand, 0))))
712 || TREE_CODE (right_operand) == CONSTRUCTOR)
713 && !integer_zerop (TYPE_SIZE (right_type)))
714 operation_type = left_type;
716 /* Find the best type to use for copying between aggregate types. */
717 else if (((TREE_CODE (left_type) == ARRAY_TYPE
718 && TREE_CODE (right_type) == ARRAY_TYPE)
719 || (TREE_CODE (left_type) == RECORD_TYPE
720 && TREE_CODE (right_type) == RECORD_TYPE))
721 && (best_type = find_common_type (left_type, right_type)))
722 operation_type = best_type;
724 /* Otherwise use the LHS type. */
725 else if (!operation_type)
726 operation_type = left_type;
728 /* Ensure everything on the LHS is valid. If we have a field reference,
729 strip anything that get_inner_reference can handle. Then remove any
730 conversions between types having the same code and mode. And mark
731 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
732 either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
733 result = left_operand;
734 while (true)
736 tree restype = TREE_TYPE (result);
738 if (TREE_CODE (result) == COMPONENT_REF
739 || TREE_CODE (result) == ARRAY_REF
740 || TREE_CODE (result) == ARRAY_RANGE_REF)
741 while (handled_component_p (result))
742 result = TREE_OPERAND (result, 0);
743 else if (TREE_CODE (result) == REALPART_EXPR
744 || TREE_CODE (result) == IMAGPART_EXPR
745 || ((TREE_CODE (result) == NOP_EXPR
746 || TREE_CODE (result) == CONVERT_EXPR)
747 && (((TREE_CODE (restype)
748 == TREE_CODE (TREE_TYPE
749 (TREE_OPERAND (result, 0))))
750 && (TYPE_MODE (TREE_TYPE
751 (TREE_OPERAND (result, 0)))
752 == TYPE_MODE (restype)))
753 || TYPE_ALIGN_OK (restype))))
754 result = TREE_OPERAND (result, 0);
755 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
757 TREE_ADDRESSABLE (result) = 1;
758 result = TREE_OPERAND (result, 0);
760 else
761 break;
764 gcc_assert (TREE_CODE (result) == INDIRECT_REF
765 || TREE_CODE (result) == NULL_EXPR
766 || DECL_P (result));
768 /* Convert the right operand to the operation type unless it is
769 either already of the correct type or if the type involves a
770 placeholder, since the RHS may not have the same record type. */
771 if (operation_type != right_type
772 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
774 right_operand = convert (operation_type, right_operand);
775 right_type = operation_type;
778 /* If the left operand is not of the same type as the operation
779 type, wrap it up in a VIEW_CONVERT_EXPR. */
780 if (left_type != operation_type)
781 left_operand = unchecked_convert (operation_type, left_operand, false);
783 has_side_effects = true;
784 modulus = NULL_TREE;
785 break;
787 case ARRAY_REF:
788 if (!operation_type)
789 operation_type = TREE_TYPE (left_type);
791 /* ... fall through ... */
793 case ARRAY_RANGE_REF:
794 /* First look through conversion between type variants. Note that
795 this changes neither the operation type nor the type domain. */
796 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
797 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
798 == TYPE_MAIN_VARIANT (left_type))
800 left_operand = TREE_OPERAND (left_operand, 0);
801 left_type = TREE_TYPE (left_operand);
804 /* Then convert the right operand to its base type. This will
805 prevent unneeded signedness conversions when sizetype is wider than
806 integer. */
807 right_operand = convert (right_base_type, right_operand);
808 right_operand = convert (TYPE_DOMAIN (left_type), right_operand);
810 if (!TREE_CONSTANT (right_operand)
811 || !TREE_CONSTANT (TYPE_MIN_VALUE (right_type)))
812 gnat_mark_addressable (left_operand);
814 modulus = NULL_TREE;
815 break;
817 case GE_EXPR:
818 case LE_EXPR:
819 case GT_EXPR:
820 case LT_EXPR:
821 gcc_assert (!POINTER_TYPE_P (left_type));
823 /* ... fall through ... */
825 case EQ_EXPR:
826 case NE_EXPR:
827 /* If either operand is a NULL_EXPR, just return a new one. */
828 if (TREE_CODE (left_operand) == NULL_EXPR)
829 return build2 (op_code, result_type,
830 build1 (NULL_EXPR, integer_type_node,
831 TREE_OPERAND (left_operand, 0)),
832 integer_zero_node);
834 else if (TREE_CODE (right_operand) == NULL_EXPR)
835 return build2 (op_code, result_type,
836 build1 (NULL_EXPR, integer_type_node,
837 TREE_OPERAND (right_operand, 0)),
838 integer_zero_node);
840 /* If either object is a justified modular types, get the
841 fields from within. */
842 if (TREE_CODE (left_type) == RECORD_TYPE
843 && TYPE_JUSTIFIED_MODULAR_P (left_type))
845 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
846 left_operand);
847 left_type = TREE_TYPE (left_operand);
848 left_base_type = get_base_type (left_type);
851 if (TREE_CODE (right_type) == RECORD_TYPE
852 && TYPE_JUSTIFIED_MODULAR_P (right_type))
854 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
855 right_operand);
856 right_type = TREE_TYPE (right_operand);
857 right_base_type = get_base_type (right_type);
860 /* If both objects are arrays, compare them specially. */
861 if ((TREE_CODE (left_type) == ARRAY_TYPE
862 || (TREE_CODE (left_type) == INTEGER_TYPE
863 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
864 && (TREE_CODE (right_type) == ARRAY_TYPE
865 || (TREE_CODE (right_type) == INTEGER_TYPE
866 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
868 result = compare_arrays (result_type, left_operand, right_operand);
870 if (op_code == NE_EXPR)
871 result = invert_truthvalue (result);
872 else
873 gcc_assert (op_code == EQ_EXPR);
875 return result;
878 /* Otherwise, the base types must be the same unless the objects are
879 fat pointers or records. If we have records, use the best type and
880 convert both operands to that type. */
881 if (left_base_type != right_base_type)
883 if (TYPE_FAT_POINTER_P (left_base_type)
884 && TYPE_FAT_POINTER_P (right_base_type)
885 && TYPE_MAIN_VARIANT (left_base_type)
886 == TYPE_MAIN_VARIANT (right_base_type))
887 best_type = left_base_type;
888 else if (TREE_CODE (left_base_type) == RECORD_TYPE
889 && TREE_CODE (right_base_type) == RECORD_TYPE)
891 /* The only way these are permitted to be the same is if both
892 types have the same name. In that case, one of them must
893 not be self-referential. Use that one as the best type.
894 Even better is if one is of fixed size. */
895 gcc_assert (TYPE_NAME (left_base_type)
896 && (TYPE_NAME (left_base_type)
897 == TYPE_NAME (right_base_type)));
899 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
900 best_type = left_base_type;
901 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
902 best_type = right_base_type;
903 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
904 best_type = left_base_type;
905 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
906 best_type = right_base_type;
907 else
908 gcc_unreachable ();
910 else
911 gcc_unreachable ();
913 left_operand = convert (best_type, left_operand);
914 right_operand = convert (best_type, right_operand);
917 /* If we are comparing a fat pointer against zero, we need to
918 just compare the data pointer. */
919 else if (TYPE_FAT_POINTER_P (left_base_type)
920 && TREE_CODE (right_operand) == CONSTRUCTOR
921 && integer_zerop (VEC_index (constructor_elt,
922 CONSTRUCTOR_ELTS (right_operand),
924 ->value))
926 right_operand = build_component_ref (left_operand, NULL_TREE,
927 TYPE_FIELDS (left_base_type),
928 false);
929 left_operand = convert (TREE_TYPE (right_operand),
930 integer_zero_node);
932 else
934 left_operand = convert (left_base_type, left_operand);
935 right_operand = convert (right_base_type, right_operand);
938 modulus = NULL_TREE;
939 break;
941 case PREINCREMENT_EXPR:
942 case PREDECREMENT_EXPR:
943 case POSTINCREMENT_EXPR:
944 case POSTDECREMENT_EXPR:
945 /* In these, the result type and the left operand type should be the
946 same. Do the operation in the base type of those and convert the
947 right operand (which is an integer) to that type.
949 Note that these operations are only used in loop control where
950 we guarantee that no overflow can occur. So nothing special need
951 be done for modular types. */
953 gcc_assert (left_type == result_type);
954 operation_type = get_base_type (result_type);
955 left_operand = convert (operation_type, left_operand);
956 right_operand = convert (operation_type, right_operand);
957 has_side_effects = true;
958 modulus = NULL_TREE;
959 break;
961 case LSHIFT_EXPR:
962 case RSHIFT_EXPR:
963 case LROTATE_EXPR:
964 case RROTATE_EXPR:
965 /* The RHS of a shift can be any type. Also, ignore any modulus
966 (we used to abort, but this is needed for unchecked conversion
967 to modular types). Otherwise, processing is the same as normal. */
968 gcc_assert (operation_type == left_base_type);
969 modulus = NULL_TREE;
970 left_operand = convert (operation_type, left_operand);
971 break;
973 case TRUTH_ANDIF_EXPR:
974 case TRUTH_ORIF_EXPR:
975 case TRUTH_AND_EXPR:
976 case TRUTH_OR_EXPR:
977 case TRUTH_XOR_EXPR:
978 left_operand = gnat_truthvalue_conversion (left_operand);
979 right_operand = gnat_truthvalue_conversion (right_operand);
980 goto common;
982 case BIT_AND_EXPR:
983 case BIT_IOR_EXPR:
984 case BIT_XOR_EXPR:
985 /* For binary modulus, if the inputs are in range, so are the
986 outputs. */
987 if (modulus && integer_pow2p (modulus))
988 modulus = NULL_TREE;
990 goto common;
992 case COMPLEX_EXPR:
993 gcc_assert (TREE_TYPE (result_type) == left_base_type
994 && TREE_TYPE (result_type) == right_base_type);
995 left_operand = convert (left_base_type, left_operand);
996 right_operand = convert (right_base_type, right_operand);
997 break;
999 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1000 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1001 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1002 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1003 /* These always produce results lower than either operand. */
1004 modulus = NULL_TREE;
1005 goto common;
1007 case POINTER_PLUS_EXPR:
1008 gcc_assert (operation_type == left_base_type
1009 && sizetype == right_base_type);
1010 left_operand = convert (operation_type, left_operand);
1011 right_operand = convert (sizetype, right_operand);
1012 break;
1014 default:
1015 common:
1016 /* The result type should be the same as the base types of the
1017 both operands (and they should be the same). Convert
1018 everything to the result type. */
1020 gcc_assert (operation_type == left_base_type
1021 && left_base_type == right_base_type);
1022 left_operand = convert (operation_type, left_operand);
1023 right_operand = convert (operation_type, right_operand);
1026 if (modulus && !integer_pow2p (modulus))
1028 result = nonbinary_modular_operation (op_code, operation_type,
1029 left_operand, right_operand);
1030 modulus = NULL_TREE;
1032 /* If either operand is a NULL_EXPR, just return a new one. */
1033 else if (TREE_CODE (left_operand) == NULL_EXPR)
1034 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1035 else if (TREE_CODE (right_operand) == NULL_EXPR)
1036 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1037 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1038 result = fold (build4 (op_code, operation_type, left_operand,
1039 right_operand, NULL_TREE, NULL_TREE));
1040 else
1041 result
1042 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1044 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1045 TREE_CONSTANT (result)
1046 |= (TREE_CONSTANT (left_operand) & TREE_CONSTANT (right_operand)
1047 && op_code != ARRAY_REF && op_code != ARRAY_RANGE_REF);
1049 if ((op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1050 && TYPE_VOLATILE (operation_type))
1051 TREE_THIS_VOLATILE (result) = 1;
1053 /* If we are working with modular types, perform the MOD operation
1054 if something above hasn't eliminated the need for it. */
1055 if (modulus)
1056 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1057 convert (operation_type, modulus));
1059 if (result_type && result_type != operation_type)
1060 result = convert (result_type, result);
1062 return result;
1065 /* Similar, but for unary operations. */
1067 tree
1068 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1070 tree type = TREE_TYPE (operand);
1071 tree base_type = get_base_type (type);
1072 tree operation_type = result_type;
1073 tree result;
1074 bool side_effects = false;
1076 if (operation_type
1077 && TREE_CODE (operation_type) == RECORD_TYPE
1078 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1079 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1081 if (operation_type
1082 && !AGGREGATE_TYPE_P (operation_type)
1083 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1084 operation_type = get_base_type (operation_type);
1086 switch (op_code)
1088 case REALPART_EXPR:
1089 case IMAGPART_EXPR:
1090 if (!operation_type)
1091 result_type = operation_type = TREE_TYPE (type);
1092 else
1093 gcc_assert (result_type == TREE_TYPE (type));
1095 result = fold_build1 (op_code, operation_type, operand);
1096 break;
1098 case TRUTH_NOT_EXPR:
1099 gcc_assert (result_type == base_type);
1100 result = invert_truthvalue (gnat_truthvalue_conversion (operand));
1101 break;
1103 case ATTR_ADDR_EXPR:
1104 case ADDR_EXPR:
1105 switch (TREE_CODE (operand))
1107 case INDIRECT_REF:
1108 case UNCONSTRAINED_ARRAY_REF:
1109 result = TREE_OPERAND (operand, 0);
1111 /* Make sure the type here is a pointer, not a reference.
1112 GCC wants pointer types for function addresses. */
1113 if (!result_type)
1114 result_type = build_pointer_type (type);
1116 /* If the underlying object can alias everything, propagate the
1117 property since we are effectively retrieving the object. */
1118 if (POINTER_TYPE_P (TREE_TYPE (result))
1119 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1121 if (TREE_CODE (result_type) == POINTER_TYPE
1122 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1123 result_type
1124 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1125 TYPE_MODE (result_type),
1126 true);
1127 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1128 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1129 result_type
1130 = build_reference_type_for_mode (TREE_TYPE (result_type),
1131 TYPE_MODE (result_type),
1132 true);
1134 break;
1136 case NULL_EXPR:
1137 result = operand;
1138 TREE_TYPE (result) = type = build_pointer_type (type);
1139 break;
1141 case ARRAY_REF:
1142 case ARRAY_RANGE_REF:
1143 case COMPONENT_REF:
1144 case BIT_FIELD_REF:
1145 /* If this is for 'Address, find the address of the prefix and
1146 add the offset to the field. Otherwise, do this the normal
1147 way. */
1148 if (op_code == ATTR_ADDR_EXPR)
1150 HOST_WIDE_INT bitsize;
1151 HOST_WIDE_INT bitpos;
1152 tree offset, inner;
1153 enum machine_mode mode;
1154 int unsignedp, volatilep;
1156 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1157 &mode, &unsignedp, &volatilep,
1158 false);
1160 /* If INNER is a padding type whose field has a self-referential
1161 size, convert to that inner type. We know the offset is zero
1162 and we need to have that type visible. */
1163 if (TREE_CODE (TREE_TYPE (inner)) == RECORD_TYPE
1164 && TYPE_IS_PADDING_P (TREE_TYPE (inner))
1165 && (CONTAINS_PLACEHOLDER_P
1166 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1167 (TREE_TYPE (inner)))))))
1168 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1169 inner);
1171 /* Compute the offset as a byte offset from INNER. */
1172 if (!offset)
1173 offset = size_zero_node;
1175 if (bitpos % BITS_PER_UNIT != 0)
1176 post_error
1177 ("taking address of object not aligned on storage unit?",
1178 error_gnat_node);
1180 offset = size_binop (PLUS_EXPR, offset,
1181 size_int (bitpos / BITS_PER_UNIT));
1183 /* Take the address of INNER, convert the offset to void *, and
1184 add then. It will later be converted to the desired result
1185 type, if any. */
1186 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1187 inner = convert (ptr_void_type_node, inner);
1188 result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1189 inner, offset);
1190 result = convert (build_pointer_type (TREE_TYPE (operand)),
1191 result);
1192 break;
1194 goto common;
1196 case CONSTRUCTOR:
1197 /* If this is just a constructor for a padded record, we can
1198 just take the address of the single field and convert it to
1199 a pointer to our type. */
1200 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1202 result = (VEC_index (constructor_elt,
1203 CONSTRUCTOR_ELTS (operand),
1205 ->value);
1207 result = convert (build_pointer_type (TREE_TYPE (operand)),
1208 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1209 break;
1212 goto common;
1214 case NOP_EXPR:
1215 if (AGGREGATE_TYPE_P (type)
1216 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1217 return build_unary_op (ADDR_EXPR, result_type,
1218 TREE_OPERAND (operand, 0));
1220 /* ... fallthru ... */
1222 case VIEW_CONVERT_EXPR:
1223 /* If this just a variant conversion or if the conversion doesn't
1224 change the mode, get the result type from this type and go down.
1225 This is needed for conversions of CONST_DECLs, to eventually get
1226 to the address of their CORRESPONDING_VARs. */
1227 if ((TYPE_MAIN_VARIANT (type)
1228 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1229 || (TYPE_MODE (type) != BLKmode
1230 && (TYPE_MODE (type)
1231 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1232 return build_unary_op (ADDR_EXPR,
1233 (result_type ? result_type
1234 : build_pointer_type (type)),
1235 TREE_OPERAND (operand, 0));
1236 goto common;
1238 case CONST_DECL:
1239 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1241 /* ... fall through ... */
1243 default:
1244 common:
1246 /* If we are taking the address of a padded record whose field is
1247 contains a template, take the address of the template. */
1248 if (TREE_CODE (type) == RECORD_TYPE
1249 && TYPE_IS_PADDING_P (type)
1250 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1251 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1253 type = TREE_TYPE (TYPE_FIELDS (type));
1254 operand = convert (type, operand);
1257 if (type != error_mark_node)
1258 operation_type = build_pointer_type (type);
1260 gnat_mark_addressable (operand);
1261 result = fold_build1 (ADDR_EXPR, operation_type, operand);
1264 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1265 break;
1267 case INDIRECT_REF:
1268 /* If we want to refer to an entire unconstrained array,
1269 make up an expression to do so. This will never survive to
1270 the backend. If TYPE is a thin pointer, first convert the
1271 operand to a fat pointer. */
1272 if (TYPE_THIN_POINTER_P (type)
1273 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
1275 operand
1276 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1277 operand);
1278 type = TREE_TYPE (operand);
1281 if (TYPE_FAT_POINTER_P (type))
1283 result = build1 (UNCONSTRAINED_ARRAY_REF,
1284 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1285 TREE_READONLY (result) = TREE_STATIC (result)
1286 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1288 else if (TREE_CODE (operand) == ADDR_EXPR)
1289 result = TREE_OPERAND (operand, 0);
1291 else
1293 result = fold_build1 (op_code, TREE_TYPE (type), operand);
1294 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1297 side_effects
1298 = (!TYPE_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)));
1299 break;
1301 case NEGATE_EXPR:
1302 case BIT_NOT_EXPR:
1304 tree modulus = ((operation_type
1305 && TREE_CODE (operation_type) == INTEGER_TYPE
1306 && TYPE_MODULAR_P (operation_type))
1307 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1308 int mod_pow2 = modulus && integer_pow2p (modulus);
1310 /* If this is a modular type, there are various possibilities
1311 depending on the operation and whether the modulus is a
1312 power of two or not. */
1314 if (modulus)
1316 gcc_assert (operation_type == base_type);
1317 operand = convert (operation_type, operand);
1319 /* The fastest in the negate case for binary modulus is
1320 the straightforward code; the TRUNC_MOD_EXPR below
1321 is an AND operation. */
1322 if (op_code == NEGATE_EXPR && mod_pow2)
1323 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1324 fold_build1 (NEGATE_EXPR, operation_type,
1325 operand),
1326 modulus);
1328 /* For nonbinary negate case, return zero for zero operand,
1329 else return the modulus minus the operand. If the modulus
1330 is a power of two minus one, we can do the subtraction
1331 as an XOR since it is equivalent and faster on most machines. */
1332 else if (op_code == NEGATE_EXPR && !mod_pow2)
1334 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1335 modulus,
1336 convert (operation_type,
1337 integer_one_node))))
1338 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1339 operand, modulus);
1340 else
1341 result = fold_build2 (MINUS_EXPR, operation_type,
1342 modulus, operand);
1344 result = fold_build3 (COND_EXPR, operation_type,
1345 fold_build2 (NE_EXPR,
1346 integer_type_node,
1347 operand,
1348 convert
1349 (operation_type,
1350 integer_zero_node)),
1351 result, operand);
1353 else
1355 /* For the NOT cases, we need a constant equal to
1356 the modulus minus one. For a binary modulus, we
1357 XOR against the constant and subtract the operand from
1358 that constant for nonbinary modulus. */
1360 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1361 convert (operation_type,
1362 integer_one_node));
1364 if (mod_pow2)
1365 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1366 operand, cnst);
1367 else
1368 result = fold_build2 (MINUS_EXPR, operation_type,
1369 cnst, operand);
1372 break;
1376 /* ... fall through ... */
1378 default:
1379 gcc_assert (operation_type == base_type);
1380 result = fold_build1 (op_code, operation_type,
1381 convert (operation_type, operand));
1384 if (side_effects)
1386 TREE_SIDE_EFFECTS (result) = 1;
1387 if (TREE_CODE (result) == INDIRECT_REF)
1388 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1391 if (result_type && TREE_TYPE (result) != result_type)
1392 result = convert (result_type, result);
1394 return result;
1397 /* Similar, but for COND_EXPR. */
1399 tree
1400 build_cond_expr (tree result_type, tree condition_operand,
1401 tree true_operand, tree false_operand)
1403 tree result;
1404 bool addr_p = false;
1406 /* The front-end verifies that result, true and false operands have same base
1407 type. Convert everything to the result type. */
1409 true_operand = convert (result_type, true_operand);
1410 false_operand = convert (result_type, false_operand);
1412 /* If the result type is unconstrained, take the address of
1413 the operands and then dereference our result. */
1414 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1415 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1417 addr_p = true;
1418 result_type = build_pointer_type (result_type);
1419 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1420 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1423 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1424 true_operand, false_operand);
1426 /* If either operand is a SAVE_EXPR (possibly surrounded by
1427 arithmetic, make sure it gets done. */
1428 true_operand = skip_simple_arithmetic (true_operand);
1429 false_operand = skip_simple_arithmetic (false_operand);
1431 if (TREE_CODE (true_operand) == SAVE_EXPR)
1432 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1434 if (TREE_CODE (false_operand) == SAVE_EXPR)
1435 result = build2 (COMPOUND_EXPR, result_type, false_operand, result);
1437 /* ??? Seems the code above is wrong, as it may move ahead of the COND
1438 SAVE_EXPRs with side effects and not shared by both arms. */
1440 if (addr_p)
1441 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1443 return result;
1446 /* Similar, but for RETURN_EXPR. If RESULT_DECL is non-zero, build
1447 a RETURN_EXPR around the assignment of RET_VAL to RESULT_DECL.
1448 If RESULT_DECL is zero, build a bare RETURN_EXPR. */
1450 tree
1451 build_return_expr (tree result_decl, tree ret_val)
1453 tree result_expr;
1455 if (result_decl)
1457 /* The gimplifier explicitly enforces the following invariant:
1459 RETURN_EXPR
1461 MODIFY_EXPR
1464 RESULT_DECL ...
1466 As a consequence, type-homogeneity dictates that we use the type
1467 of the RESULT_DECL as the operation type. */
1469 tree operation_type = TREE_TYPE (result_decl);
1471 /* Convert the right operand to the operation type. Note that
1472 it's the same transformation as in the MODIFY_EXPR case of
1473 build_binary_op with the additional guarantee that the type
1474 cannot involve a placeholder, since otherwise the function
1475 would use the "target pointer" return mechanism. */
1477 if (operation_type != TREE_TYPE (ret_val))
1478 ret_val = convert (operation_type, ret_val);
1480 result_expr
1481 = build2 (MODIFY_EXPR, operation_type, result_decl, ret_val);
1483 else
1484 result_expr = NULL_TREE;
1486 return build1 (RETURN_EXPR, void_type_node, result_expr);
1489 /* Build a CALL_EXPR to call FUNDECL with one argument, ARG. Return
1490 the CALL_EXPR. */
1492 tree
1493 build_call_1_expr (tree fundecl, tree arg)
1495 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1496 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1497 1, arg);
1498 TREE_SIDE_EFFECTS (call) = 1;
1499 return call;
1502 /* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2. Return
1503 the CALL_EXPR. */
1505 tree
1506 build_call_2_expr (tree fundecl, tree arg1, tree arg2)
1508 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1509 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1510 2, arg1, arg2);
1511 TREE_SIDE_EFFECTS (call) = 1;
1512 return call;
1515 /* Likewise to call FUNDECL with no arguments. */
1517 tree
1518 build_call_0_expr (tree fundecl)
1520 /* We rely on build_call_nary to compute TREE_SIDE_EFFECTS. This makes
1521 it possible to propagate DECL_IS_PURE on parameterless functions. */
1522 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1523 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1525 return call;
1528 /* Call a function that raises an exception and pass the line number and file
1529 name, if requested. MSG says which exception function to call.
1531 GNAT_NODE is the gnat node conveying the source location for which the
1532 error should be signaled, or Empty in which case the error is signaled on
1533 the current ref_file_name/input_line.
1535 KIND says which kind of exception this is for
1536 (N_Raise_{Constraint,Storage,Program}_Error). */
1538 tree
1539 build_call_raise (int msg, Node_Id gnat_node, char kind)
1541 tree fndecl = gnat_raise_decls[msg];
1542 tree label = get_exception_label (kind);
1543 tree filename;
1544 int line_number;
1545 const char *str;
1546 int len;
1548 /* If this is to be done as a goto, handle that case. */
1549 if (label)
1551 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1552 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1554 /* If Local_Raise is present, generate
1555 Local_Raise (exception'Identity); */
1556 if (Present (local_raise))
1558 tree gnu_local_raise
1559 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1560 tree gnu_exception_entity
1561 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1562 tree gnu_call
1563 = build_call_1_expr (gnu_local_raise,
1564 build_unary_op (ADDR_EXPR, NULL_TREE,
1565 gnu_exception_entity));
1567 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1568 gnu_call, gnu_result);}
1570 return gnu_result;
1574 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1575 ? ""
1576 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1577 ? IDENTIFIER_POINTER
1578 (get_identifier (Get_Name_String
1579 (Debug_Source_Name
1580 (Get_Source_File_Index (Sloc (gnat_node))))))
1581 : ref_filename;
1583 len = strlen (str) + 1;
1584 filename = build_string (len, str);
1585 line_number
1586 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1587 ? Get_Logical_Line_Number (Sloc(gnat_node)) : input_line;
1589 TREE_TYPE (filename)
1590 = build_array_type (char_type_node,
1591 build_index_type (build_int_cst (NULL_TREE, len)));
1593 return
1594 build_call_2_expr (fndecl,
1595 build1 (ADDR_EXPR, build_pointer_type (char_type_node),
1596 filename),
1597 build_int_cst (NULL_TREE, line_number));
1600 /* qsort comparer for the bit positions of two constructor elements
1601 for record components. */
1603 static int
1604 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1606 const_tree const elmt1 = * (const_tree const *) rt1;
1607 const_tree const elmt2 = * (const_tree const *) rt2;
1608 const_tree const field1 = TREE_PURPOSE (elmt1);
1609 const_tree const field2 = TREE_PURPOSE (elmt2);
1610 const int ret
1611 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1613 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1616 /* Return a CONSTRUCTOR of TYPE whose list is LIST. */
1618 tree
1619 gnat_build_constructor (tree type, tree list)
1621 tree elmt;
1622 int n_elmts;
1623 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1624 bool side_effects = false;
1625 tree result;
1627 /* Scan the elements to see if they are all constant or if any has side
1628 effects, to let us set global flags on the resulting constructor. Count
1629 the elements along the way for possible sorting purposes below. */
1630 for (n_elmts = 0, elmt = list; elmt; elmt = TREE_CHAIN (elmt), n_elmts ++)
1632 if (!TREE_CONSTANT (TREE_VALUE (elmt))
1633 || (TREE_CODE (type) == RECORD_TYPE
1634 && DECL_BIT_FIELD (TREE_PURPOSE (elmt))
1635 && TREE_CODE (TREE_VALUE (elmt)) != INTEGER_CST)
1636 || !initializer_constant_valid_p (TREE_VALUE (elmt),
1637 TREE_TYPE (TREE_VALUE (elmt))))
1638 allconstant = false;
1640 if (TREE_SIDE_EFFECTS (TREE_VALUE (elmt)))
1641 side_effects = true;
1643 /* Propagate an NULL_EXPR from the size of the type. We won't ever
1644 be executing the code we generate here in that case, but handle it
1645 specially to avoid the compiler blowing up. */
1646 if (TREE_CODE (type) == RECORD_TYPE
1647 && (0 != (result
1648 = contains_null_expr (DECL_SIZE (TREE_PURPOSE (elmt))))))
1649 return build1 (NULL_EXPR, type, TREE_OPERAND (result, 0));
1652 /* For record types with constant components only, sort field list
1653 by increasing bit position. This is necessary to ensure the
1654 constructor can be output as static data. */
1655 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1657 /* Fill an array with an element tree per index, and ask qsort to order
1658 them according to what a bitpos comparison function says. */
1659 tree *gnu_arr = (tree *) alloca (sizeof (tree) * n_elmts);
1660 int i;
1662 for (i = 0, elmt = list; elmt; elmt = TREE_CHAIN (elmt), i++)
1663 gnu_arr[i] = elmt;
1665 qsort (gnu_arr, n_elmts, sizeof (tree), compare_elmt_bitpos);
1667 /* Then reconstruct the list from the sorted array contents. */
1668 list = NULL_TREE;
1669 for (i = n_elmts - 1; i >= 0; i--)
1671 TREE_CHAIN (gnu_arr[i]) = list;
1672 list = gnu_arr[i];
1676 result = build_constructor_from_list (type, list);
1677 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1678 TREE_SIDE_EFFECTS (result) = side_effects;
1679 TREE_READONLY (result) = TYPE_READONLY (type) || allconstant;
1680 return result;
1683 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1684 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1685 for the field. Don't fold the result if NO_FOLD_P is true.
1687 We also handle the fact that we might have been passed a pointer to the
1688 actual record and know how to look for fields in variant parts. */
1690 static tree
1691 build_simple_component_ref (tree record_variable, tree component,
1692 tree field, bool no_fold_p)
1694 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1695 tree ref, inner_variable;
1697 gcc_assert ((TREE_CODE (record_type) == RECORD_TYPE
1698 || TREE_CODE (record_type) == UNION_TYPE
1699 || TREE_CODE (record_type) == QUAL_UNION_TYPE)
1700 && TYPE_SIZE (record_type)
1701 && (component != 0) != (field != 0));
1703 /* If no field was specified, look for a field with the specified name
1704 in the current record only. */
1705 if (!field)
1706 for (field = TYPE_FIELDS (record_type); field;
1707 field = TREE_CHAIN (field))
1708 if (DECL_NAME (field) == component)
1709 break;
1711 if (!field)
1712 return NULL_TREE;
1714 /* If this field is not in the specified record, see if we can find
1715 something in the record whose original field is the same as this one. */
1716 if (DECL_CONTEXT (field) != record_type)
1717 /* Check if there is a field with name COMPONENT in the record. */
1719 tree new_field;
1721 /* First loop thru normal components. */
1723 for (new_field = TYPE_FIELDS (record_type); new_field;
1724 new_field = TREE_CHAIN (new_field))
1725 if (field == new_field
1726 || DECL_ORIGINAL_FIELD (new_field) == field
1727 || new_field == DECL_ORIGINAL_FIELD (field)
1728 || (DECL_ORIGINAL_FIELD (field)
1729 && (DECL_ORIGINAL_FIELD (field)
1730 == DECL_ORIGINAL_FIELD (new_field))))
1731 break;
1733 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1734 the component in the first search. Doing this search in 2 steps
1735 is required to avoiding hidden homonymous fields in the
1736 _Parent field. */
1738 if (!new_field)
1739 for (new_field = TYPE_FIELDS (record_type); new_field;
1740 new_field = TREE_CHAIN (new_field))
1741 if (DECL_INTERNAL_P (new_field))
1743 tree field_ref
1744 = build_simple_component_ref (record_variable,
1745 NULL_TREE, new_field, no_fold_p);
1746 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1747 no_fold_p);
1749 if (ref)
1750 return ref;
1753 field = new_field;
1756 if (!field)
1757 return NULL_TREE;
1759 /* If the field's offset has overflowed, do not attempt to access it
1760 as doing so may trigger sanity checks deeper in the back-end.
1761 Note that we don't need to warn since this will be done on trying
1762 to declare the object. */
1763 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1764 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1765 return NULL_TREE;
1767 /* Look through conversion between type variants. Note that this
1768 is transparent as far as the field is concerned. */
1769 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1770 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1771 == record_type)
1772 inner_variable = TREE_OPERAND (record_variable, 0);
1773 else
1774 inner_variable = record_variable;
1776 ref = build3 (COMPONENT_REF, TREE_TYPE (field), inner_variable, field,
1777 NULL_TREE);
1779 if (TREE_READONLY (record_variable) || TREE_READONLY (field))
1780 TREE_READONLY (ref) = 1;
1781 if (TREE_THIS_VOLATILE (record_variable) || TREE_THIS_VOLATILE (field)
1782 || TYPE_VOLATILE (record_type))
1783 TREE_THIS_VOLATILE (ref) = 1;
1785 if (no_fold_p)
1786 return ref;
1788 /* The generic folder may punt in this case because the inner array type
1789 can be self-referential, but folding is in fact not problematic. */
1790 else if (TREE_CODE (record_variable) == CONSTRUCTOR
1791 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record_variable)))
1793 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (record_variable);
1794 unsigned HOST_WIDE_INT idx;
1795 tree index, value;
1796 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
1797 if (index == field)
1798 return value;
1799 return ref;
1802 else
1803 return fold (ref);
1806 /* Like build_simple_component_ref, except that we give an error if the
1807 reference could not be found. */
1809 tree
1810 build_component_ref (tree record_variable, tree component,
1811 tree field, bool no_fold_p)
1813 tree ref = build_simple_component_ref (record_variable, component, field,
1814 no_fold_p);
1816 if (ref)
1817 return ref;
1819 /* If FIELD was specified, assume this is an invalid user field so
1820 raise constraint error. Otherwise, we can't find the type to return, so
1821 abort. */
1822 gcc_assert (field);
1823 return build1 (NULL_EXPR, TREE_TYPE (field),
1824 build_call_raise (CE_Discriminant_Check_Failed, Empty,
1825 N_Raise_Constraint_Error));
1828 /* Build a GCC tree to call an allocation or deallocation function.
1829 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
1830 generate an allocator.
1832 GNU_SIZE is the size of the object in bytes and ALIGN is the alignment in
1833 bits. GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the
1834 storage pool to use. If not preset, malloc and free will be used except
1835 if GNAT_PROC is the "fake" value of -1, in which case we allocate the
1836 object dynamically on the stack frame. */
1838 tree
1839 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, unsigned align,
1840 Entity_Id gnat_proc, Entity_Id gnat_pool,
1841 Node_Id gnat_node)
1843 tree gnu_align = size_int (align / BITS_PER_UNIT);
1845 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
1847 if (Present (gnat_proc))
1849 /* The storage pools are obviously always tagged types, but the
1850 secondary stack uses the same mechanism and is not tagged */
1851 if (Is_Tagged_Type (Etype (gnat_pool)))
1853 /* The size is the third parameter; the alignment is the
1854 same type. */
1855 Entity_Id gnat_size_type
1856 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1857 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1858 tree gnu_proc = gnat_to_gnu (gnat_proc);
1859 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1860 tree gnu_pool = gnat_to_gnu (gnat_pool);
1861 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1862 tree gnu_call;
1864 gnu_size = convert (gnu_size_type, gnu_size);
1865 gnu_align = convert (gnu_size_type, gnu_align);
1867 /* The first arg is always the address of the storage pool; next
1868 comes the address of the object, for a deallocator, then the
1869 size and alignment. */
1870 if (gnu_obj)
1871 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1872 gnu_proc_addr, 4, gnu_pool_addr,
1873 gnu_obj, gnu_size, gnu_align);
1874 else
1875 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1876 gnu_proc_addr, 3, gnu_pool_addr,
1877 gnu_size, gnu_align);
1878 TREE_SIDE_EFFECTS (gnu_call) = 1;
1879 return gnu_call;
1882 /* Secondary stack case. */
1883 else
1885 /* The size is the second parameter */
1886 Entity_Id gnat_size_type
1887 = Etype (Next_Formal (First_Formal (gnat_proc)));
1888 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1889 tree gnu_proc = gnat_to_gnu (gnat_proc);
1890 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1891 tree gnu_call;
1893 gnu_size = convert (gnu_size_type, gnu_size);
1895 /* The first arg is the address of the object, for a
1896 deallocator, then the size */
1897 if (gnu_obj)
1898 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1899 gnu_proc_addr, 2, gnu_obj, gnu_size);
1900 else
1901 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1902 gnu_proc_addr, 1, gnu_size);
1903 TREE_SIDE_EFFECTS (gnu_call) = 1;
1904 return gnu_call;
1908 else if (gnu_obj)
1909 return build_call_1_expr (free_decl, gnu_obj);
1911 /* ??? For now, disable variable-sized allocators in the stack since
1912 we can't yet gimplify an ALLOCATE_EXPR. */
1913 else if (gnat_pool == -1
1914 && TREE_CODE (gnu_size) == INTEGER_CST && !flag_stack_check)
1916 /* If the size is a constant, we can put it in the fixed portion of
1917 the stack frame to avoid the need to adjust the stack pointer. */
1918 if (TREE_CODE (gnu_size) == INTEGER_CST && !flag_stack_check)
1920 tree gnu_range
1921 = build_range_type (NULL_TREE, size_one_node, gnu_size);
1922 tree gnu_array_type = build_array_type (char_type_node, gnu_range);
1923 tree gnu_decl
1924 = create_var_decl (get_identifier ("RETVAL"), NULL_TREE,
1925 gnu_array_type, NULL_TREE, false, false, false,
1926 false, NULL, gnat_node);
1928 return convert (ptr_void_type_node,
1929 build_unary_op (ADDR_EXPR, NULL_TREE, gnu_decl));
1931 else
1932 gcc_unreachable ();
1933 #if 0
1934 return build2 (ALLOCATE_EXPR, ptr_void_type_node, gnu_size, gnu_align);
1935 #endif
1937 else
1939 if (Nkind (gnat_node) != N_Allocator || !Comes_From_Source (gnat_node))
1940 Check_No_Implicit_Heap_Alloc (gnat_node);
1942 /* If the allocator size is 32bits but the pointer size is 64bits then
1943 allocate 32bit memory (sometimes necessary on 64bit VMS). Otherwise
1944 default to standard malloc. */
1945 if (UI_To_Int (Esize (Etype (gnat_node))) == 32 && POINTER_SIZE == 64)
1946 return build_call_1_expr (malloc32_decl, gnu_size);
1947 else
1948 return build_call_1_expr (malloc_decl, gnu_size);
1952 /* Build a GCC tree to correspond to allocating an object of TYPE whose
1953 initial value is INIT, if INIT is nonzero. Convert the expression to
1954 RESULT_TYPE, which must be some type of pointer. Return the tree.
1955 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
1956 the storage pool to use. GNAT_NODE is used to provide an error
1957 location for restriction violations messages. If IGNORE_INIT_TYPE is
1958 true, ignore the type of INIT for the purpose of determining the size;
1959 this will cause the maximum size to be allocated if TYPE is of
1960 self-referential size. */
1962 tree
1963 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
1964 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
1966 tree size = TYPE_SIZE_UNIT (type);
1967 tree result;
1968 unsigned int default_allocator_alignment
1969 = get_target_default_allocator_alignment () * BITS_PER_UNIT;
1971 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
1972 if (init && TREE_CODE (init) == NULL_EXPR)
1973 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
1975 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
1976 sizes of the object and its template. Allocate the whole thing and
1977 fill in the parts that are known. */
1978 else if (TYPE_FAT_OR_THIN_POINTER_P (result_type))
1980 tree storage_type
1981 = build_unc_object_type_from_ptr (result_type, type,
1982 get_identifier ("ALLOC"));
1983 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
1984 tree storage_ptr_type = build_pointer_type (storage_type);
1985 tree storage;
1986 tree template_cons = NULL_TREE;
1988 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
1989 init);
1991 /* If the size overflows, pass -1 so the allocator will raise
1992 storage error. */
1993 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1994 size = ssize_int (-1);
1996 storage = build_call_alloc_dealloc (NULL_TREE, size,
1997 TYPE_ALIGN (storage_type),
1998 gnat_proc, gnat_pool, gnat_node);
1999 storage = convert (storage_ptr_type, protect_multiple_eval (storage));
2001 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2003 type = TREE_TYPE (TYPE_FIELDS (type));
2005 if (init)
2006 init = convert (type, init);
2009 /* If there is an initializing expression, make a constructor for
2010 the entire object including the bounds and copy it into the
2011 object. If there is no initializing expression, just set the
2012 bounds. */
2013 if (init)
2015 template_cons = tree_cons (TREE_CHAIN (TYPE_FIELDS (storage_type)),
2016 init, NULL_TREE);
2017 template_cons = tree_cons (TYPE_FIELDS (storage_type),
2018 build_template (template_type, type,
2019 init),
2020 template_cons);
2022 return convert
2023 (result_type,
2024 build2 (COMPOUND_EXPR, storage_ptr_type,
2025 build_binary_op
2026 (MODIFY_EXPR, storage_type,
2027 build_unary_op (INDIRECT_REF, NULL_TREE,
2028 convert (storage_ptr_type, storage)),
2029 gnat_build_constructor (storage_type, template_cons)),
2030 convert (storage_ptr_type, storage)));
2032 else
2033 return build2
2034 (COMPOUND_EXPR, result_type,
2035 build_binary_op
2036 (MODIFY_EXPR, template_type,
2037 build_component_ref
2038 (build_unary_op (INDIRECT_REF, NULL_TREE,
2039 convert (storage_ptr_type, storage)),
2040 NULL_TREE, TYPE_FIELDS (storage_type), 0),
2041 build_template (template_type, type, NULL_TREE)),
2042 convert (result_type, convert (storage_ptr_type, storage)));
2045 /* If we have an initializing expression, see if its size is simpler
2046 than the size from the type. */
2047 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2048 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2049 || CONTAINS_PLACEHOLDER_P (size)))
2050 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2052 /* If the size is still self-referential, reference the initializing
2053 expression, if it is present. If not, this must have been a
2054 call to allocate a library-level object, in which case we use
2055 the maximum size. */
2056 if (CONTAINS_PLACEHOLDER_P (size))
2058 if (!ignore_init_type && init)
2059 size = substitute_placeholder_in_expr (size, init);
2060 else
2061 size = max_size (size, true);
2064 /* If the size overflows, pass -1 so the allocator will raise
2065 storage error. */
2066 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2067 size = ssize_int (-1);
2069 /* If this is in the default storage pool and the type alignment is larger
2070 than what the default allocator supports, make an "aligning" record type
2071 with room to store a pointer before the field, allocate an object of that
2072 type, store the system's allocator return value just in front of the
2073 field and return the field's address. */
2075 if (No (gnat_proc) && TYPE_ALIGN (type) > default_allocator_alignment)
2077 /* Construct the aligning type with enough room for a pointer ahead
2078 of the field, then allocate. */
2079 tree record_type
2080 = make_aligning_type (type, TYPE_ALIGN (type), size,
2081 default_allocator_alignment,
2082 POINTER_SIZE / BITS_PER_UNIT);
2084 tree record, record_addr;
2086 record_addr
2087 = build_call_alloc_dealloc (NULL_TREE, TYPE_SIZE_UNIT (record_type),
2088 default_allocator_alignment, Empty, Empty,
2089 gnat_node);
2091 record_addr
2092 = convert (build_pointer_type (record_type),
2093 save_expr (record_addr));
2095 record = build_unary_op (INDIRECT_REF, NULL_TREE, record_addr);
2097 /* Our RESULT (the Ada allocator's value) is the super-aligned address
2098 of the internal record field ... */
2099 result
2100 = build_unary_op (ADDR_EXPR, NULL_TREE,
2101 build_component_ref
2102 (record, NULL_TREE, TYPE_FIELDS (record_type), 0));
2103 result = convert (result_type, result);
2105 /* ... with the system allocator's return value stored just in
2106 front. */
2108 tree ptr_addr
2109 = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
2110 convert (ptr_void_type_node, result),
2111 size_int (-POINTER_SIZE/BITS_PER_UNIT));
2113 tree ptr_ref
2114 = convert (build_pointer_type (ptr_void_type_node), ptr_addr);
2116 result
2117 = build2 (COMPOUND_EXPR, TREE_TYPE (result),
2118 build_binary_op (MODIFY_EXPR, NULL_TREE,
2119 build_unary_op (INDIRECT_REF, NULL_TREE,
2120 ptr_ref),
2121 convert (ptr_void_type_node,
2122 record_addr)),
2123 result);
2126 else
2127 result = convert (result_type,
2128 build_call_alloc_dealloc (NULL_TREE, size,
2129 TYPE_ALIGN (type),
2130 gnat_proc,
2131 gnat_pool,
2132 gnat_node));
2134 /* If we have an initial value, put the new address into a SAVE_EXPR, assign
2135 the value, and return the address. Do this with a COMPOUND_EXPR. */
2137 if (init)
2139 result = save_expr (result);
2140 result
2141 = build2 (COMPOUND_EXPR, TREE_TYPE (result),
2142 build_binary_op
2143 (MODIFY_EXPR, NULL_TREE,
2144 build_unary_op (INDIRECT_REF,
2145 TREE_TYPE (TREE_TYPE (result)), result),
2146 init),
2147 result);
2150 return convert (result_type, result);
2153 /* Fill in a VMS descriptor for EXPR and return a constructor for it.
2154 GNAT_FORMAL is how we find the descriptor record. */
2156 tree
2157 fill_vms_descriptor (tree expr, Entity_Id gnat_formal)
2159 tree record_type = TREE_TYPE (TREE_TYPE (get_gnu_tree (gnat_formal)));
2160 tree field;
2161 tree const_list = NULL_TREE;
2163 expr = maybe_unconstrained_array (expr);
2164 gnat_mark_addressable (expr);
2166 for (field = TYPE_FIELDS (record_type); field; field = TREE_CHAIN (field))
2167 const_list
2168 = tree_cons (field,
2169 convert (TREE_TYPE (field),
2170 SUBSTITUTE_PLACEHOLDER_IN_EXPR
2171 (DECL_INITIAL (field), expr)),
2172 const_list);
2174 return gnat_build_constructor (record_type, nreverse (const_list));
2177 /* Indicate that we need to make the address of EXPR_NODE and it therefore
2178 should not be allocated in a register. Returns true if successful. */
2180 bool
2181 gnat_mark_addressable (tree expr_node)
2183 while (1)
2184 switch (TREE_CODE (expr_node))
2186 case ADDR_EXPR:
2187 case COMPONENT_REF:
2188 case ARRAY_REF:
2189 case ARRAY_RANGE_REF:
2190 case REALPART_EXPR:
2191 case IMAGPART_EXPR:
2192 case VIEW_CONVERT_EXPR:
2193 case CONVERT_EXPR:
2194 case NON_LVALUE_EXPR:
2195 case NOP_EXPR:
2196 expr_node = TREE_OPERAND (expr_node, 0);
2197 break;
2199 case CONSTRUCTOR:
2200 TREE_ADDRESSABLE (expr_node) = 1;
2201 return true;
2203 case VAR_DECL:
2204 case PARM_DECL:
2205 case RESULT_DECL:
2206 TREE_ADDRESSABLE (expr_node) = 1;
2207 return true;
2209 case FUNCTION_DECL:
2210 TREE_ADDRESSABLE (expr_node) = 1;
2211 return true;
2213 case CONST_DECL:
2214 return (DECL_CONST_CORRESPONDING_VAR (expr_node)
2215 && (gnat_mark_addressable
2216 (DECL_CONST_CORRESPONDING_VAR (expr_node))));
2217 default:
2218 return true;