Merged r158465 through r158660 into branch.
[official-gcc.git] / gcc / ada / gcc-interface / utils2.c
blob9b00c0dfced9107b9d6302dc13cd8912d4414d5a
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2010, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "ggc.h"
32 #include "flags.h"
33 #include "output.h"
34 #include "tree-inline.h"
36 #include "ada.h"
37 #include "types.h"
38 #include "atree.h"
39 #include "elists.h"
40 #include "namet.h"
41 #include "nlists.h"
42 #include "snames.h"
43 #include "stringt.h"
44 #include "uintp.h"
45 #include "fe.h"
46 #include "sinfo.h"
47 #include "einfo.h"
48 #include "ada-tree.h"
49 #include "gigi.h"
51 static tree find_common_type (tree, tree);
52 static tree compare_arrays (tree, tree, tree);
53 static tree nonbinary_modular_operation (enum tree_code, tree, tree, tree);
54 static tree build_simple_component_ref (tree, tree, tree, bool);
56 /* Return the base type of TYPE. */
58 tree
59 get_base_type (tree type)
61 if (TREE_CODE (type) == RECORD_TYPE
62 && TYPE_JUSTIFIED_MODULAR_P (type))
63 type = TREE_TYPE (TYPE_FIELDS (type));
65 while (TREE_TYPE (type)
66 && (TREE_CODE (type) == INTEGER_TYPE
67 || TREE_CODE (type) == REAL_TYPE))
68 type = TREE_TYPE (type);
70 return type;
73 /* EXP is a GCC tree representing an address. See if we can find how
74 strictly the object at that address is aligned. Return that alignment
75 in bits. If we don't know anything about the alignment, return 0. */
77 unsigned int
78 known_alignment (tree exp)
80 unsigned int this_alignment;
81 unsigned int lhs, rhs;
83 switch (TREE_CODE (exp))
85 CASE_CONVERT:
86 case VIEW_CONVERT_EXPR:
87 case NON_LVALUE_EXPR:
88 /* Conversions between pointers and integers don't change the alignment
89 of the underlying object. */
90 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
91 break;
93 case COMPOUND_EXPR:
94 /* The value of a COMPOUND_EXPR is that of it's second operand. */
95 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
96 break;
98 case PLUS_EXPR:
99 case MINUS_EXPR:
100 /* If two address are added, the alignment of the result is the
101 minimum of the two alignments. */
102 lhs = known_alignment (TREE_OPERAND (exp, 0));
103 rhs = known_alignment (TREE_OPERAND (exp, 1));
104 this_alignment = MIN (lhs, rhs);
105 break;
107 case POINTER_PLUS_EXPR:
108 lhs = known_alignment (TREE_OPERAND (exp, 0));
109 rhs = known_alignment (TREE_OPERAND (exp, 1));
110 /* If we don't know the alignment of the offset, we assume that
111 of the base. */
112 if (rhs == 0)
113 this_alignment = lhs;
114 else
115 this_alignment = MIN (lhs, rhs);
116 break;
118 case COND_EXPR:
119 /* If there is a choice between two values, use the smallest one. */
120 lhs = known_alignment (TREE_OPERAND (exp, 1));
121 rhs = known_alignment (TREE_OPERAND (exp, 2));
122 this_alignment = MIN (lhs, rhs);
123 break;
125 case INTEGER_CST:
127 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
128 /* The first part of this represents the lowest bit in the constant,
129 but it is originally in bytes, not bits. */
130 this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
132 break;
134 case MULT_EXPR:
135 /* If we know the alignment of just one side, use it. Otherwise,
136 use the product of the alignments. */
137 lhs = known_alignment (TREE_OPERAND (exp, 0));
138 rhs = known_alignment (TREE_OPERAND (exp, 1));
140 if (lhs == 0)
141 this_alignment = rhs;
142 else if (rhs == 0)
143 this_alignment = lhs;
144 else
145 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
146 break;
148 case BIT_AND_EXPR:
149 /* A bit-and expression is as aligned as the maximum alignment of the
150 operands. We typically get here for a complex lhs and a constant
151 negative power of two on the rhs to force an explicit alignment, so
152 don't bother looking at the lhs. */
153 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
154 break;
156 case ADDR_EXPR:
157 this_alignment = expr_align (TREE_OPERAND (exp, 0));
158 break;
160 case CALL_EXPR:
162 tree t = maybe_inline_call_in_expr (exp);
163 if (t)
164 return known_alignment (t);
167 /* Fall through... */
169 default:
170 /* For other pointer expressions, we assume that the pointed-to object
171 is at least as aligned as the pointed-to type. Beware that we can
172 have a dummy type here (e.g. a Taft Amendment type), for which the
173 alignment is meaningless and should be ignored. */
174 if (POINTER_TYPE_P (TREE_TYPE (exp))
175 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
176 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
177 else
178 this_alignment = 0;
179 break;
182 return this_alignment;
185 /* We have a comparison or assignment operation on two types, T1 and T2, which
186 are either both array types or both record types. T1 is assumed to be for
187 the left hand side operand, and T2 for the right hand side. Return the
188 type that both operands should be converted to for the operation, if any.
189 Otherwise return zero. */
191 static tree
192 find_common_type (tree t1, tree t2)
194 /* ??? As of today, various constructs lead here with types of different
195 sizes even when both constants (e.g. tagged types, packable vs regular
196 component types, padded vs unpadded types, ...). While some of these
197 would better be handled upstream (types should be made consistent before
198 calling into build_binary_op), some others are really expected and we
199 have to be careful. */
201 /* We must prevent writing more than what the target may hold if this is for
202 an assignment and the case of tagged types is handled in build_binary_op
203 so use the lhs type if it is known to be smaller, or of constant size and
204 the rhs type is not, whatever the modes. We also force t1 in case of
205 constant size equality to minimize occurrences of view conversions on the
206 lhs of assignments. */
207 if (TREE_CONSTANT (TYPE_SIZE (t1))
208 && (!TREE_CONSTANT (TYPE_SIZE (t2))
209 || !tree_int_cst_lt (TYPE_SIZE (t2), TYPE_SIZE (t1))))
210 return t1;
212 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
213 that we will not have any alignment problems since, if we did, the
214 non-BLKmode type could not have been used. */
215 if (TYPE_MODE (t1) != BLKmode)
216 return t1;
218 /* If the rhs type is of constant size, use it whatever the modes. At
219 this point it is known to be smaller, or of constant size and the
220 lhs type is not. */
221 if (TREE_CONSTANT (TYPE_SIZE (t2)))
222 return t2;
224 /* Otherwise, if the rhs type is non-BLKmode, use it. */
225 if (TYPE_MODE (t2) != BLKmode)
226 return t2;
228 /* In this case, both types have variable size and BLKmode. It's
229 probably best to leave the "type mismatch" because changing it
230 could cause a bad self-referential reference. */
231 return NULL_TREE;
234 /* Return an expression tree representing an equality comparison of A1 and A2,
235 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
237 Two arrays are equal in one of two ways: (1) if both have zero length in
238 some dimension (not necessarily the same dimension) or (2) if the lengths
239 in each dimension are equal and the data is equal. We perform the length
240 tests in as efficient a manner as possible. */
242 static tree
243 compare_arrays (tree result_type, tree a1, tree a2)
245 tree result = convert (result_type, boolean_true_node);
246 tree a1_is_null = convert (result_type, boolean_false_node);
247 tree a2_is_null = convert (result_type, boolean_false_node);
248 tree t1 = TREE_TYPE (a1);
249 tree t2 = TREE_TYPE (a2);
250 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
251 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
252 bool length_zero_p = false;
254 /* If either operand has side-effects, they have to be evaluated only once
255 in spite of the multiple references to the operand in the comparison. */
256 if (a1_side_effects_p)
257 a1 = gnat_protect_expr (a1);
259 if (a2_side_effects_p)
260 a2 = gnat_protect_expr (a2);
262 /* Process each dimension separately and compare the lengths. If any
263 dimension has a length known to be zero, set LENGTH_ZERO_P to true
264 in order to suppress the comparison of the data at the end. */
265 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
267 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
268 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
269 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
270 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
271 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
272 size_one_node);
273 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
274 size_one_node);
275 tree comparison, this_a1_is_null, this_a2_is_null;
277 /* If the length of the first array is a constant, swap our operands
278 unless the length of the second array is the constant zero. */
279 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
281 tree tem;
282 bool btem;
284 tem = a1, a1 = a2, a2 = tem;
285 tem = t1, t1 = t2, t2 = tem;
286 tem = lb1, lb1 = lb2, lb2 = tem;
287 tem = ub1, ub1 = ub2, ub2 = tem;
288 tem = length1, length1 = length2, length2 = tem;
289 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
290 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
291 a2_side_effects_p = btem;
294 /* If the length of the second array is the constant zero, we can just
295 use the original stored bounds for the first array and see whether
296 last < first holds. */
297 if (integer_zerop (length2))
299 length_zero_p = true;
301 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
302 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
304 comparison = build_binary_op (LT_EXPR, result_type, ub1, lb1);
305 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
306 if (EXPR_P (comparison))
307 SET_EXPR_LOCATION (comparison, input_location);
309 this_a1_is_null = comparison;
310 this_a2_is_null = convert (result_type, boolean_true_node);
313 /* Otherwise, if the length is some other constant value, we know that
314 this dimension in the second array cannot be superflat, so we can
315 just use its length computed from the actual stored bounds. */
316 else if (TREE_CODE (length2) == INTEGER_CST)
318 tree bt;
320 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
321 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
322 /* Note that we know that UB2 and LB2 are constant and hence
323 cannot contain a PLACEHOLDER_EXPR. */
324 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
325 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
326 bt = get_base_type (TREE_TYPE (ub1));
328 comparison
329 = build_binary_op (EQ_EXPR, result_type,
330 build_binary_op (MINUS_EXPR, bt, ub1, lb1),
331 build_binary_op (MINUS_EXPR, bt, ub2, lb2));
332 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
333 if (EXPR_P (comparison))
334 SET_EXPR_LOCATION (comparison, input_location);
336 this_a1_is_null = build_binary_op (LT_EXPR, result_type, ub1, lb1);
337 if (EXPR_P (this_a1_is_null))
338 SET_EXPR_LOCATION (this_a1_is_null, input_location);
340 this_a2_is_null = convert (result_type, boolean_false_node);
343 /* Otherwise, compare the computed lengths. */
344 else
346 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
347 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
349 comparison
350 = build_binary_op (EQ_EXPR, result_type, length1, length2);
351 if (EXPR_P (comparison))
352 SET_EXPR_LOCATION (comparison, input_location);
354 /* If the length expression is of the form (cond ? val : 0), assume
355 that cond is equivalent to (length != 0). That's guaranteed by
356 construction of the array types in gnat_to_gnu_entity. */
357 if (TREE_CODE (length1) == COND_EXPR
358 && integer_zerop (TREE_OPERAND (length1, 2)))
359 this_a1_is_null = invert_truthvalue (TREE_OPERAND (length1, 0));
360 else
361 this_a1_is_null = build_binary_op (EQ_EXPR, result_type, length1,
362 size_zero_node);
363 if (EXPR_P (this_a1_is_null))
364 SET_EXPR_LOCATION (this_a1_is_null, input_location);
366 /* Likewise for the second array. */
367 if (TREE_CODE (length2) == COND_EXPR
368 && integer_zerop (TREE_OPERAND (length2, 2)))
369 this_a2_is_null = invert_truthvalue (TREE_OPERAND (length2, 0));
370 else
371 this_a2_is_null = build_binary_op (EQ_EXPR, result_type, length2,
372 size_zero_node);
373 if (EXPR_P (this_a2_is_null))
374 SET_EXPR_LOCATION (this_a2_is_null, input_location);
377 /* Append expressions for this dimension to the final expressions. */
378 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
379 result, comparison);
381 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
382 this_a1_is_null, a1_is_null);
384 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
385 this_a2_is_null, a2_is_null);
387 t1 = TREE_TYPE (t1);
388 t2 = TREE_TYPE (t2);
391 /* Unless the length of some dimension is known to be zero, compare the
392 data in the array. */
393 if (!length_zero_p)
395 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
396 tree comparison;
398 if (type)
400 a1 = convert (type, a1),
401 a2 = convert (type, a2);
404 comparison = fold_build2 (EQ_EXPR, result_type, a1, a2);
405 if (EXPR_P (comparison))
406 SET_EXPR_LOCATION (comparison, input_location);
408 result
409 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
412 /* The result is also true if both sizes are zero. */
413 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
414 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
415 a1_is_null, a2_is_null),
416 result);
418 /* If either operand has side-effects, they have to be evaluated before
419 starting the comparison above since the place they would be otherwise
420 evaluated could be wrong. */
421 if (a1_side_effects_p)
422 result = build2 (COMPOUND_EXPR, result_type, a1, result);
424 if (a2_side_effects_p)
425 result = build2 (COMPOUND_EXPR, result_type, a2, result);
427 return result;
430 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
431 type TYPE. We know that TYPE is a modular type with a nonbinary
432 modulus. */
434 static tree
435 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
436 tree rhs)
438 tree modulus = TYPE_MODULUS (type);
439 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
440 unsigned int precision;
441 bool unsignedp = true;
442 tree op_type = type;
443 tree result;
445 /* If this is an addition of a constant, convert it to a subtraction
446 of a constant since we can do that faster. */
447 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
449 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
450 op_code = MINUS_EXPR;
453 /* For the logical operations, we only need PRECISION bits. For
454 addition and subtraction, we need one more and for multiplication we
455 need twice as many. But we never want to make a size smaller than
456 our size. */
457 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
458 needed_precision += 1;
459 else if (op_code == MULT_EXPR)
460 needed_precision *= 2;
462 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
464 /* Unsigned will do for everything but subtraction. */
465 if (op_code == MINUS_EXPR)
466 unsignedp = false;
468 /* If our type is the wrong signedness or isn't wide enough, make a new
469 type and convert both our operands to it. */
470 if (TYPE_PRECISION (op_type) < precision
471 || TYPE_UNSIGNED (op_type) != unsignedp)
473 /* Copy the node so we ensure it can be modified to make it modular. */
474 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
475 modulus = convert (op_type, modulus);
476 SET_TYPE_MODULUS (op_type, modulus);
477 TYPE_MODULAR_P (op_type) = 1;
478 lhs = convert (op_type, lhs);
479 rhs = convert (op_type, rhs);
482 /* Do the operation, then we'll fix it up. */
483 result = fold_build2 (op_code, op_type, lhs, rhs);
485 /* For multiplication, we have no choice but to do a full modulus
486 operation. However, we want to do this in the narrowest
487 possible size. */
488 if (op_code == MULT_EXPR)
490 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
491 modulus = convert (div_type, modulus);
492 SET_TYPE_MODULUS (div_type, modulus);
493 TYPE_MODULAR_P (div_type) = 1;
494 result = convert (op_type,
495 fold_build2 (TRUNC_MOD_EXPR, div_type,
496 convert (div_type, result), modulus));
499 /* For subtraction, add the modulus back if we are negative. */
500 else if (op_code == MINUS_EXPR)
502 result = gnat_protect_expr (result);
503 result = fold_build3 (COND_EXPR, op_type,
504 fold_build2 (LT_EXPR, boolean_type_node, result,
505 convert (op_type, integer_zero_node)),
506 fold_build2 (PLUS_EXPR, op_type, result, modulus),
507 result);
510 /* For the other operations, subtract the modulus if we are >= it. */
511 else
513 result = gnat_protect_expr (result);
514 result = fold_build3 (COND_EXPR, op_type,
515 fold_build2 (GE_EXPR, boolean_type_node,
516 result, modulus),
517 fold_build2 (MINUS_EXPR, op_type,
518 result, modulus),
519 result);
522 return convert (type, result);
525 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
526 desired for the result. Usually the operation is to be performed
527 in that type. For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
528 in which case the type to be used will be derived from the operands.
530 This function is very much unlike the ones for C and C++ since we
531 have already done any type conversion and matching required. All we
532 have to do here is validate the work done by SEM and handle subtypes. */
534 tree
535 build_binary_op (enum tree_code op_code, tree result_type,
536 tree left_operand, tree right_operand)
538 tree left_type = TREE_TYPE (left_operand);
539 tree right_type = TREE_TYPE (right_operand);
540 tree left_base_type = get_base_type (left_type);
541 tree right_base_type = get_base_type (right_type);
542 tree operation_type = result_type;
543 tree best_type = NULL_TREE;
544 tree modulus, result;
545 bool has_side_effects = false;
547 if (operation_type
548 && TREE_CODE (operation_type) == RECORD_TYPE
549 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
550 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
552 if (operation_type
553 && !AGGREGATE_TYPE_P (operation_type)
554 && TYPE_EXTRA_SUBTYPE_P (operation_type))
555 operation_type = get_base_type (operation_type);
557 modulus = (operation_type
558 && TREE_CODE (operation_type) == INTEGER_TYPE
559 && TYPE_MODULAR_P (operation_type)
560 ? TYPE_MODULUS (operation_type) : NULL_TREE);
562 switch (op_code)
564 case INIT_EXPR:
565 case MODIFY_EXPR:
566 /* If there were integral or pointer conversions on the LHS, remove
567 them; we'll be putting them back below if needed. Likewise for
568 conversions between array and record types, except for justified
569 modular types. But don't do this if the right operand is not
570 BLKmode (for packed arrays) unless we are not changing the mode. */
571 while ((CONVERT_EXPR_P (left_operand)
572 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
573 && (((INTEGRAL_TYPE_P (left_type)
574 || POINTER_TYPE_P (left_type))
575 && (INTEGRAL_TYPE_P (TREE_TYPE
576 (TREE_OPERAND (left_operand, 0)))
577 || POINTER_TYPE_P (TREE_TYPE
578 (TREE_OPERAND (left_operand, 0)))))
579 || (((TREE_CODE (left_type) == RECORD_TYPE
580 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
581 || TREE_CODE (left_type) == ARRAY_TYPE)
582 && ((TREE_CODE (TREE_TYPE
583 (TREE_OPERAND (left_operand, 0)))
584 == RECORD_TYPE)
585 || (TREE_CODE (TREE_TYPE
586 (TREE_OPERAND (left_operand, 0)))
587 == ARRAY_TYPE))
588 && (TYPE_MODE (right_type) == BLKmode
589 || (TYPE_MODE (left_type)
590 == TYPE_MODE (TREE_TYPE
591 (TREE_OPERAND
592 (left_operand, 0))))))))
594 left_operand = TREE_OPERAND (left_operand, 0);
595 left_type = TREE_TYPE (left_operand);
598 /* If a class-wide type may be involved, force use of the RHS type. */
599 if ((TREE_CODE (right_type) == RECORD_TYPE
600 || TREE_CODE (right_type) == UNION_TYPE)
601 && TYPE_ALIGN_OK (right_type))
602 operation_type = right_type;
604 /* If we are copying between padded objects with compatible types, use
605 the padded view of the objects, this is very likely more efficient.
606 Likewise for a padded object that is assigned a constructor, if we
607 can convert the constructor to the inner type, to avoid putting a
608 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
609 actually copied anything. */
610 else if (TYPE_IS_PADDING_P (left_type)
611 && TREE_CONSTANT (TYPE_SIZE (left_type))
612 && ((TREE_CODE (right_operand) == COMPONENT_REF
613 && TYPE_IS_PADDING_P
614 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
615 && gnat_types_compatible_p
616 (left_type,
617 TREE_TYPE (TREE_OPERAND (right_operand, 0))))
618 || (TREE_CODE (right_operand) == CONSTRUCTOR
619 && !CONTAINS_PLACEHOLDER_P
620 (DECL_SIZE (TYPE_FIELDS (left_type)))))
621 && !integer_zerop (TYPE_SIZE (right_type)))
622 operation_type = left_type;
624 /* Find the best type to use for copying between aggregate types. */
625 else if (((TREE_CODE (left_type) == ARRAY_TYPE
626 && TREE_CODE (right_type) == ARRAY_TYPE)
627 || (TREE_CODE (left_type) == RECORD_TYPE
628 && TREE_CODE (right_type) == RECORD_TYPE))
629 && (best_type = find_common_type (left_type, right_type)))
630 operation_type = best_type;
632 /* Otherwise use the LHS type. */
633 else if (!operation_type)
634 operation_type = left_type;
636 /* Ensure everything on the LHS is valid. If we have a field reference,
637 strip anything that get_inner_reference can handle. Then remove any
638 conversions between types having the same code and mode. And mark
639 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
640 either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
641 result = left_operand;
642 while (true)
644 tree restype = TREE_TYPE (result);
646 if (TREE_CODE (result) == COMPONENT_REF
647 || TREE_CODE (result) == ARRAY_REF
648 || TREE_CODE (result) == ARRAY_RANGE_REF)
649 while (handled_component_p (result))
650 result = TREE_OPERAND (result, 0);
651 else if (TREE_CODE (result) == REALPART_EXPR
652 || TREE_CODE (result) == IMAGPART_EXPR
653 || (CONVERT_EXPR_P (result)
654 && (((TREE_CODE (restype)
655 == TREE_CODE (TREE_TYPE
656 (TREE_OPERAND (result, 0))))
657 && (TYPE_MODE (TREE_TYPE
658 (TREE_OPERAND (result, 0)))
659 == TYPE_MODE (restype)))
660 || TYPE_ALIGN_OK (restype))))
661 result = TREE_OPERAND (result, 0);
662 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
664 TREE_ADDRESSABLE (result) = 1;
665 result = TREE_OPERAND (result, 0);
667 else
668 break;
671 gcc_assert (TREE_CODE (result) == INDIRECT_REF
672 || TREE_CODE (result) == NULL_EXPR
673 || DECL_P (result));
675 /* Convert the right operand to the operation type unless it is
676 either already of the correct type or if the type involves a
677 placeholder, since the RHS may not have the same record type. */
678 if (operation_type != right_type
679 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
681 right_operand = convert (operation_type, right_operand);
682 right_type = operation_type;
685 /* If the left operand is not of the same type as the operation
686 type, wrap it up in a VIEW_CONVERT_EXPR. */
687 if (left_type != operation_type)
688 left_operand = unchecked_convert (operation_type, left_operand, false);
690 has_side_effects = true;
691 modulus = NULL_TREE;
692 break;
694 case ARRAY_REF:
695 if (!operation_type)
696 operation_type = TREE_TYPE (left_type);
698 /* ... fall through ... */
700 case ARRAY_RANGE_REF:
701 /* First look through conversion between type variants. Note that
702 this changes neither the operation type nor the type domain. */
703 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
704 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
705 == TYPE_MAIN_VARIANT (left_type))
707 left_operand = TREE_OPERAND (left_operand, 0);
708 left_type = TREE_TYPE (left_operand);
711 /* For a range, make sure the element type is consistent. */
712 if (op_code == ARRAY_RANGE_REF
713 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
714 operation_type = build_array_type (TREE_TYPE (left_type),
715 TYPE_DOMAIN (operation_type));
717 /* Then convert the right operand to its base type. This will prevent
718 unneeded sign conversions when sizetype is wider than integer. */
719 right_operand = convert (right_base_type, right_operand);
720 right_operand = convert (sizetype, right_operand);
722 if (!TREE_CONSTANT (right_operand)
723 || !TREE_CONSTANT (TYPE_MIN_VALUE (right_type)))
724 gnat_mark_addressable (left_operand);
726 modulus = NULL_TREE;
727 break;
729 case TRUTH_ANDIF_EXPR:
730 case TRUTH_ORIF_EXPR:
731 case TRUTH_AND_EXPR:
732 case TRUTH_OR_EXPR:
733 case TRUTH_XOR_EXPR:
734 #ifdef ENABLE_CHECKING
735 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
736 #endif
737 operation_type = left_base_type;
738 left_operand = convert (operation_type, left_operand);
739 right_operand = convert (operation_type, right_operand);
740 break;
742 case GE_EXPR:
743 case LE_EXPR:
744 case GT_EXPR:
745 case LT_EXPR:
746 case EQ_EXPR:
747 case NE_EXPR:
748 #ifdef ENABLE_CHECKING
749 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
750 #endif
751 /* If either operand is a NULL_EXPR, just return a new one. */
752 if (TREE_CODE (left_operand) == NULL_EXPR)
753 return build2 (op_code, result_type,
754 build1 (NULL_EXPR, integer_type_node,
755 TREE_OPERAND (left_operand, 0)),
756 integer_zero_node);
758 else if (TREE_CODE (right_operand) == NULL_EXPR)
759 return build2 (op_code, result_type,
760 build1 (NULL_EXPR, integer_type_node,
761 TREE_OPERAND (right_operand, 0)),
762 integer_zero_node);
764 /* If either object is a justified modular types, get the
765 fields from within. */
766 if (TREE_CODE (left_type) == RECORD_TYPE
767 && TYPE_JUSTIFIED_MODULAR_P (left_type))
769 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
770 left_operand);
771 left_type = TREE_TYPE (left_operand);
772 left_base_type = get_base_type (left_type);
775 if (TREE_CODE (right_type) == RECORD_TYPE
776 && TYPE_JUSTIFIED_MODULAR_P (right_type))
778 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
779 right_operand);
780 right_type = TREE_TYPE (right_operand);
781 right_base_type = get_base_type (right_type);
784 /* If both objects are arrays, compare them specially. */
785 if ((TREE_CODE (left_type) == ARRAY_TYPE
786 || (TREE_CODE (left_type) == INTEGER_TYPE
787 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
788 && (TREE_CODE (right_type) == ARRAY_TYPE
789 || (TREE_CODE (right_type) == INTEGER_TYPE
790 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
792 result = compare_arrays (result_type, left_operand, right_operand);
794 if (op_code == NE_EXPR)
795 result = invert_truthvalue (result);
796 else
797 gcc_assert (op_code == EQ_EXPR);
799 return result;
802 /* Otherwise, the base types must be the same, unless they are both fat
803 pointer types or record types. In the latter case, use the best type
804 and convert both operands to that type. */
805 if (left_base_type != right_base_type)
807 if (TYPE_IS_FAT_POINTER_P (left_base_type)
808 && TYPE_IS_FAT_POINTER_P (right_base_type))
810 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
811 == TYPE_MAIN_VARIANT (right_base_type));
812 best_type = left_base_type;
815 else if (TREE_CODE (left_base_type) == RECORD_TYPE
816 && TREE_CODE (right_base_type) == RECORD_TYPE)
818 /* The only way this is permitted is if both types have the same
819 name. In that case, one of them must not be self-referential.
820 Use it as the best type. Even better with a fixed size. */
821 gcc_assert (TYPE_NAME (left_base_type)
822 && TYPE_NAME (left_base_type)
823 == TYPE_NAME (right_base_type));
825 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
826 best_type = left_base_type;
827 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
828 best_type = right_base_type;
829 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
830 best_type = left_base_type;
831 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
832 best_type = right_base_type;
833 else
834 gcc_unreachable ();
837 else
838 gcc_unreachable ();
840 left_operand = convert (best_type, left_operand);
841 right_operand = convert (best_type, right_operand);
843 else
845 left_operand = convert (left_base_type, left_operand);
846 right_operand = convert (right_base_type, right_operand);
849 /* If we are comparing a fat pointer against zero, we just need to
850 compare the data pointer. */
851 if (TYPE_IS_FAT_POINTER_P (left_base_type)
852 && TREE_CODE (right_operand) == CONSTRUCTOR
853 && integer_zerop (VEC_index (constructor_elt,
854 CONSTRUCTOR_ELTS (right_operand),
855 0)->value))
857 left_operand
858 = build_component_ref (left_operand, NULL_TREE,
859 TYPE_FIELDS (left_base_type), false);
860 right_operand
861 = convert (TREE_TYPE (left_operand), integer_zero_node);
864 modulus = NULL_TREE;
865 break;
867 case LSHIFT_EXPR:
868 case RSHIFT_EXPR:
869 case LROTATE_EXPR:
870 case RROTATE_EXPR:
871 /* The RHS of a shift can be any type. Also, ignore any modulus
872 (we used to abort, but this is needed for unchecked conversion
873 to modular types). Otherwise, processing is the same as normal. */
874 gcc_assert (operation_type == left_base_type);
875 modulus = NULL_TREE;
876 left_operand = convert (operation_type, left_operand);
877 break;
879 case BIT_AND_EXPR:
880 case BIT_IOR_EXPR:
881 case BIT_XOR_EXPR:
882 /* For binary modulus, if the inputs are in range, so are the
883 outputs. */
884 if (modulus && integer_pow2p (modulus))
885 modulus = NULL_TREE;
886 goto common;
888 case COMPLEX_EXPR:
889 gcc_assert (TREE_TYPE (result_type) == left_base_type
890 && TREE_TYPE (result_type) == right_base_type);
891 left_operand = convert (left_base_type, left_operand);
892 right_operand = convert (right_base_type, right_operand);
893 break;
895 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
896 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
897 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
898 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
899 /* These always produce results lower than either operand. */
900 modulus = NULL_TREE;
901 goto common;
903 case POINTER_PLUS_EXPR:
904 gcc_assert (operation_type == left_base_type
905 && sizetype == right_base_type);
906 left_operand = convert (operation_type, left_operand);
907 right_operand = convert (sizetype, right_operand);
908 break;
910 case PLUS_NOMOD_EXPR:
911 case MINUS_NOMOD_EXPR:
912 if (op_code == PLUS_NOMOD_EXPR)
913 op_code = PLUS_EXPR;
914 else
915 op_code = MINUS_EXPR;
916 modulus = NULL_TREE;
918 /* ... fall through ... */
920 case PLUS_EXPR:
921 case MINUS_EXPR:
922 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
923 other compilers. Contrary to C, Ada doesn't allow arithmetics in
924 these types but can generate addition/subtraction for Succ/Pred. */
925 if (operation_type
926 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
927 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
928 operation_type = left_base_type = right_base_type
929 = gnat_type_for_mode (TYPE_MODE (operation_type),
930 TYPE_UNSIGNED (operation_type));
932 /* ... fall through ... */
934 default:
935 common:
936 /* The result type should be the same as the base types of the
937 both operands (and they should be the same). Convert
938 everything to the result type. */
940 gcc_assert (operation_type == left_base_type
941 && left_base_type == right_base_type);
942 left_operand = convert (operation_type, left_operand);
943 right_operand = convert (operation_type, right_operand);
946 if (modulus && !integer_pow2p (modulus))
948 result = nonbinary_modular_operation (op_code, operation_type,
949 left_operand, right_operand);
950 modulus = NULL_TREE;
952 /* If either operand is a NULL_EXPR, just return a new one. */
953 else if (TREE_CODE (left_operand) == NULL_EXPR)
954 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
955 else if (TREE_CODE (right_operand) == NULL_EXPR)
956 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
957 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
958 result = fold (build4 (op_code, operation_type, left_operand,
959 right_operand, NULL_TREE, NULL_TREE));
960 else
961 result
962 = fold_build2 (op_code, operation_type, left_operand, right_operand);
964 TREE_SIDE_EFFECTS (result) |= has_side_effects;
965 TREE_CONSTANT (result)
966 |= (TREE_CONSTANT (left_operand) & TREE_CONSTANT (right_operand)
967 && op_code != ARRAY_REF && op_code != ARRAY_RANGE_REF);
969 if ((op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
970 && TYPE_VOLATILE (operation_type))
971 TREE_THIS_VOLATILE (result) = 1;
973 /* If we are working with modular types, perform the MOD operation
974 if something above hasn't eliminated the need for it. */
975 if (modulus)
976 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
977 convert (operation_type, modulus));
979 if (result_type && result_type != operation_type)
980 result = convert (result_type, result);
982 return result;
985 /* Similar, but for unary operations. */
987 tree
988 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
990 tree type = TREE_TYPE (operand);
991 tree base_type = get_base_type (type);
992 tree operation_type = result_type;
993 tree result;
994 bool side_effects = false;
996 if (operation_type
997 && TREE_CODE (operation_type) == RECORD_TYPE
998 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
999 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1001 if (operation_type
1002 && !AGGREGATE_TYPE_P (operation_type)
1003 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1004 operation_type = get_base_type (operation_type);
1006 switch (op_code)
1008 case REALPART_EXPR:
1009 case IMAGPART_EXPR:
1010 if (!operation_type)
1011 result_type = operation_type = TREE_TYPE (type);
1012 else
1013 gcc_assert (result_type == TREE_TYPE (type));
1015 result = fold_build1 (op_code, operation_type, operand);
1016 break;
1018 case TRUTH_NOT_EXPR:
1019 #ifdef ENABLE_CHECKING
1020 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1021 #endif
1022 result = invert_truthvalue (operand);
1023 break;
1025 case ATTR_ADDR_EXPR:
1026 case ADDR_EXPR:
1027 switch (TREE_CODE (operand))
1029 case INDIRECT_REF:
1030 case UNCONSTRAINED_ARRAY_REF:
1031 result = TREE_OPERAND (operand, 0);
1033 /* Make sure the type here is a pointer, not a reference.
1034 GCC wants pointer types for function addresses. */
1035 if (!result_type)
1036 result_type = build_pointer_type (type);
1038 /* If the underlying object can alias everything, propagate the
1039 property since we are effectively retrieving the object. */
1040 if (POINTER_TYPE_P (TREE_TYPE (result))
1041 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1043 if (TREE_CODE (result_type) == POINTER_TYPE
1044 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1045 result_type
1046 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1047 TYPE_MODE (result_type),
1048 true);
1049 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1050 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1051 result_type
1052 = build_reference_type_for_mode (TREE_TYPE (result_type),
1053 TYPE_MODE (result_type),
1054 true);
1056 break;
1058 case NULL_EXPR:
1059 result = operand;
1060 TREE_TYPE (result) = type = build_pointer_type (type);
1061 break;
1063 case COMPOUND_EXPR:
1064 /* Fold a compound expression if it has unconstrained array type
1065 since the middle-end cannot handle it. But we don't it in the
1066 general case because it may introduce aliasing issues if the
1067 first operand is an indirect assignment and the second operand
1068 the corresponding address, e.g. for an allocator. */
1069 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1071 result = build_unary_op (ADDR_EXPR, result_type,
1072 TREE_OPERAND (operand, 1));
1073 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1074 TREE_OPERAND (operand, 0), result);
1075 break;
1077 goto common;
1079 case ARRAY_REF:
1080 case ARRAY_RANGE_REF:
1081 case COMPONENT_REF:
1082 case BIT_FIELD_REF:
1083 /* If this is for 'Address, find the address of the prefix and add
1084 the offset to the field. Otherwise, do this the normal way. */
1085 if (op_code == ATTR_ADDR_EXPR)
1087 HOST_WIDE_INT bitsize;
1088 HOST_WIDE_INT bitpos;
1089 tree offset, inner;
1090 enum machine_mode mode;
1091 int unsignedp, volatilep;
1093 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1094 &mode, &unsignedp, &volatilep,
1095 false);
1097 /* If INNER is a padding type whose field has a self-referential
1098 size, convert to that inner type. We know the offset is zero
1099 and we need to have that type visible. */
1100 if (TYPE_IS_PADDING_P (TREE_TYPE (inner))
1101 && CONTAINS_PLACEHOLDER_P
1102 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1103 (TREE_TYPE (inner))))))
1104 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1105 inner);
1107 /* Compute the offset as a byte offset from INNER. */
1108 if (!offset)
1109 offset = size_zero_node;
1111 offset = size_binop (PLUS_EXPR, offset,
1112 size_int (bitpos / BITS_PER_UNIT));
1114 /* Take the address of INNER, convert the offset to void *, and
1115 add then. It will later be converted to the desired result
1116 type, if any. */
1117 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1118 inner = convert (ptr_void_type_node, inner);
1119 result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1120 inner, offset);
1121 result = convert (build_pointer_type (TREE_TYPE (operand)),
1122 result);
1123 break;
1125 goto common;
1127 case CONSTRUCTOR:
1128 /* If this is just a constructor for a padded record, we can
1129 just take the address of the single field and convert it to
1130 a pointer to our type. */
1131 if (TYPE_IS_PADDING_P (type))
1133 result = VEC_index (constructor_elt,
1134 CONSTRUCTOR_ELTS (operand),
1135 0)->value;
1136 result = convert (build_pointer_type (TREE_TYPE (operand)),
1137 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1138 break;
1141 goto common;
1143 case NOP_EXPR:
1144 if (AGGREGATE_TYPE_P (type)
1145 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1146 return build_unary_op (ADDR_EXPR, result_type,
1147 TREE_OPERAND (operand, 0));
1149 /* ... fallthru ... */
1151 case VIEW_CONVERT_EXPR:
1152 /* If this just a variant conversion or if the conversion doesn't
1153 change the mode, get the result type from this type and go down.
1154 This is needed for conversions of CONST_DECLs, to eventually get
1155 to the address of their CORRESPONDING_VARs. */
1156 if ((TYPE_MAIN_VARIANT (type)
1157 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1158 || (TYPE_MODE (type) != BLKmode
1159 && (TYPE_MODE (type)
1160 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1161 return build_unary_op (ADDR_EXPR,
1162 (result_type ? result_type
1163 : build_pointer_type (type)),
1164 TREE_OPERAND (operand, 0));
1165 goto common;
1167 case CONST_DECL:
1168 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1170 /* ... fall through ... */
1172 default:
1173 common:
1175 /* If we are taking the address of a padded record whose field is
1176 contains a template, take the address of the template. */
1177 if (TYPE_IS_PADDING_P (type)
1178 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1179 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1181 type = TREE_TYPE (TYPE_FIELDS (type));
1182 operand = convert (type, operand);
1185 gnat_mark_addressable (operand);
1186 result = build_fold_addr_expr (operand);
1189 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1190 break;
1192 case INDIRECT_REF:
1193 /* If we want to refer to an unconstrained array, use the appropriate
1194 expression to do so. This will never survive down to the back-end.
1195 But if TYPE is a thin pointer, first convert to a fat pointer. */
1196 if (TYPE_IS_THIN_POINTER_P (type)
1197 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
1199 operand
1200 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1201 operand);
1202 type = TREE_TYPE (operand);
1205 if (TYPE_IS_FAT_POINTER_P (type))
1207 result = build1 (UNCONSTRAINED_ARRAY_REF,
1208 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1209 TREE_READONLY (result)
1210 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1213 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1214 else if (TREE_CODE (operand) == ADDR_EXPR)
1215 result = TREE_OPERAND (operand, 0);
1217 /* Otherwise, build and fold the indirect reference. */
1218 else
1220 result = build_fold_indirect_ref (operand);
1221 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1224 side_effects
1225 = (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)));
1226 break;
1228 case NEGATE_EXPR:
1229 case BIT_NOT_EXPR:
1231 tree modulus = ((operation_type
1232 && TREE_CODE (operation_type) == INTEGER_TYPE
1233 && TYPE_MODULAR_P (operation_type))
1234 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1235 int mod_pow2 = modulus && integer_pow2p (modulus);
1237 /* If this is a modular type, there are various possibilities
1238 depending on the operation and whether the modulus is a
1239 power of two or not. */
1241 if (modulus)
1243 gcc_assert (operation_type == base_type);
1244 operand = convert (operation_type, operand);
1246 /* The fastest in the negate case for binary modulus is
1247 the straightforward code; the TRUNC_MOD_EXPR below
1248 is an AND operation. */
1249 if (op_code == NEGATE_EXPR && mod_pow2)
1250 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1251 fold_build1 (NEGATE_EXPR, operation_type,
1252 operand),
1253 modulus);
1255 /* For nonbinary negate case, return zero for zero operand,
1256 else return the modulus minus the operand. If the modulus
1257 is a power of two minus one, we can do the subtraction
1258 as an XOR since it is equivalent and faster on most machines. */
1259 else if (op_code == NEGATE_EXPR && !mod_pow2)
1261 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1262 modulus,
1263 convert (operation_type,
1264 integer_one_node))))
1265 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1266 operand, modulus);
1267 else
1268 result = fold_build2 (MINUS_EXPR, operation_type,
1269 modulus, operand);
1271 result = fold_build3 (COND_EXPR, operation_type,
1272 fold_build2 (NE_EXPR,
1273 boolean_type_node,
1274 operand,
1275 convert
1276 (operation_type,
1277 integer_zero_node)),
1278 result, operand);
1280 else
1282 /* For the NOT cases, we need a constant equal to
1283 the modulus minus one. For a binary modulus, we
1284 XOR against the constant and subtract the operand from
1285 that constant for nonbinary modulus. */
1287 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1288 convert (operation_type,
1289 integer_one_node));
1291 if (mod_pow2)
1292 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1293 operand, cnst);
1294 else
1295 result = fold_build2 (MINUS_EXPR, operation_type,
1296 cnst, operand);
1299 break;
1303 /* ... fall through ... */
1305 default:
1306 gcc_assert (operation_type == base_type);
1307 result = fold_build1 (op_code, operation_type,
1308 convert (operation_type, operand));
1311 if (side_effects)
1313 TREE_SIDE_EFFECTS (result) = 1;
1314 if (TREE_CODE (result) == INDIRECT_REF)
1315 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1318 if (result_type && TREE_TYPE (result) != result_type)
1319 result = convert (result_type, result);
1321 return result;
1324 /* Similar, but for COND_EXPR. */
1326 tree
1327 build_cond_expr (tree result_type, tree condition_operand,
1328 tree true_operand, tree false_operand)
1330 bool addr_p = false;
1331 tree result;
1333 /* The front-end verified that result, true and false operands have
1334 same base type. Convert everything to the result type. */
1335 true_operand = convert (result_type, true_operand);
1336 false_operand = convert (result_type, false_operand);
1338 /* If the result type is unconstrained, take the address of the operands and
1339 then dereference the result. Likewise if the result type is passed by
1340 reference, but this is natively handled in the gimplifier. */
1341 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1342 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1344 result_type = build_pointer_type (result_type);
1345 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1346 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1347 addr_p = true;
1350 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1351 true_operand, false_operand);
1353 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1354 in both arms, make sure it gets evaluated by moving it ahead of the
1355 conditional expression. This is necessary because it is evaluated
1356 in only one place at run time and would otherwise be uninitialized
1357 in one of the arms. */
1358 true_operand = skip_simple_arithmetic (true_operand);
1359 false_operand = skip_simple_arithmetic (false_operand);
1361 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1362 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1364 if (addr_p)
1365 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1367 return result;
1370 /* Similar, but for RETURN_EXPR. If RET_VAL is non-null, build a RETURN_EXPR
1371 around the assignment of RET_VAL to RET_OBJ. Otherwise just build a bare
1372 RETURN_EXPR around RESULT_OBJ, which may be null in this case. */
1374 tree
1375 build_return_expr (tree ret_obj, tree ret_val)
1377 tree result_expr;
1379 if (ret_val)
1381 /* The gimplifier explicitly enforces the following invariant:
1383 RETURN_EXPR
1385 MODIFY_EXPR
1388 RET_OBJ ...
1390 As a consequence, type consistency dictates that we use the type
1391 of the RET_OBJ as the operation type. */
1392 tree operation_type = TREE_TYPE (ret_obj);
1394 /* Convert the right operand to the operation type. Note that it's the
1395 same transformation as in the MODIFY_EXPR case of build_binary_op,
1396 with the assumption that the type cannot involve a placeholder. */
1397 if (operation_type != TREE_TYPE (ret_val))
1398 ret_val = convert (operation_type, ret_val);
1400 result_expr = build2 (MODIFY_EXPR, operation_type, ret_obj, ret_val);
1402 else
1403 result_expr = ret_obj;
1405 return build1 (RETURN_EXPR, void_type_node, result_expr);
1408 /* Build a CALL_EXPR to call FUNDECL with one argument, ARG. Return
1409 the CALL_EXPR. */
1411 tree
1412 build_call_1_expr (tree fundecl, tree arg)
1414 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1415 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1416 1, arg);
1417 TREE_SIDE_EFFECTS (call) = 1;
1418 return call;
1421 /* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2. Return
1422 the CALL_EXPR. */
1424 tree
1425 build_call_2_expr (tree fundecl, tree arg1, tree arg2)
1427 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1428 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1429 2, arg1, arg2);
1430 TREE_SIDE_EFFECTS (call) = 1;
1431 return call;
1434 /* Likewise to call FUNDECL with no arguments. */
1436 tree
1437 build_call_0_expr (tree fundecl)
1439 /* We rely on build_call_nary to compute TREE_SIDE_EFFECTS. This makes
1440 it possible to propagate DECL_IS_PURE on parameterless functions. */
1441 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1442 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1444 return call;
1447 /* Call a function that raises an exception and pass the line number and file
1448 name, if requested. MSG says which exception function to call.
1450 GNAT_NODE is the gnat node conveying the source location for which the
1451 error should be signaled, or Empty in which case the error is signaled on
1452 the current ref_file_name/input_line.
1454 KIND says which kind of exception this is for
1455 (N_Raise_{Constraint,Storage,Program}_Error). */
1457 tree
1458 build_call_raise (int msg, Node_Id gnat_node, char kind)
1460 tree fndecl = gnat_raise_decls[msg];
1461 tree label = get_exception_label (kind);
1462 tree filename;
1463 int line_number;
1464 const char *str;
1465 int len;
1467 /* If this is to be done as a goto, handle that case. */
1468 if (label)
1470 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1471 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1473 /* If Local_Raise is present, generate
1474 Local_Raise (exception'Identity); */
1475 if (Present (local_raise))
1477 tree gnu_local_raise
1478 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1479 tree gnu_exception_entity
1480 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1481 tree gnu_call
1482 = build_call_1_expr (gnu_local_raise,
1483 build_unary_op (ADDR_EXPR, NULL_TREE,
1484 gnu_exception_entity));
1486 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1487 gnu_call, gnu_result);}
1489 return gnu_result;
1493 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1494 ? ""
1495 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1496 ? IDENTIFIER_POINTER
1497 (get_identifier (Get_Name_String
1498 (Debug_Source_Name
1499 (Get_Source_File_Index (Sloc (gnat_node))))))
1500 : ref_filename;
1502 len = strlen (str);
1503 filename = build_string (len, str);
1504 line_number
1505 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1506 ? Get_Logical_Line_Number (Sloc(gnat_node)) : input_line;
1508 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1509 build_index_type (size_int (len)));
1511 return
1512 build_call_2_expr (fndecl,
1513 build1 (ADDR_EXPR,
1514 build_pointer_type (unsigned_char_type_node),
1515 filename),
1516 build_int_cst (NULL_TREE, line_number));
1519 /* qsort comparer for the bit positions of two constructor elements
1520 for record components. */
1522 static int
1523 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1525 const_tree const elmt1 = * (const_tree const *) rt1;
1526 const_tree const elmt2 = * (const_tree const *) rt2;
1527 const_tree const field1 = TREE_PURPOSE (elmt1);
1528 const_tree const field2 = TREE_PURPOSE (elmt2);
1529 const int ret
1530 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1532 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1535 /* Return a CONSTRUCTOR of TYPE whose list is LIST. */
1537 tree
1538 gnat_build_constructor (tree type, tree list)
1540 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1541 bool side_effects = false;
1542 tree elmt, result;
1543 int n_elmts;
1545 /* Scan the elements to see if they are all constant or if any has side
1546 effects, to let us set global flags on the resulting constructor. Count
1547 the elements along the way for possible sorting purposes below. */
1548 for (n_elmts = 0, elmt = list; elmt; elmt = TREE_CHAIN (elmt), n_elmts ++)
1550 tree obj = TREE_PURPOSE (elmt);
1551 tree val = TREE_VALUE (elmt);
1553 /* The predicate must be in keeping with output_constructor. */
1554 if (!TREE_CONSTANT (val)
1555 || (TREE_CODE (type) == RECORD_TYPE
1556 && CONSTRUCTOR_BITFIELD_P (obj)
1557 && !initializer_constant_valid_for_bitfield_p (val))
1558 || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1559 allconstant = false;
1561 if (TREE_SIDE_EFFECTS (val))
1562 side_effects = true;
1565 /* For record types with constant components only, sort field list
1566 by increasing bit position. This is necessary to ensure the
1567 constructor can be output as static data. */
1568 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1570 /* Fill an array with an element tree per index, and ask qsort to order
1571 them according to what a bitpos comparison function says. */
1572 tree *gnu_arr = (tree *) alloca (sizeof (tree) * n_elmts);
1573 int i;
1575 for (i = 0, elmt = list; elmt; elmt = TREE_CHAIN (elmt), i++)
1576 gnu_arr[i] = elmt;
1578 qsort (gnu_arr, n_elmts, sizeof (tree), compare_elmt_bitpos);
1580 /* Then reconstruct the list from the sorted array contents. */
1581 list = NULL_TREE;
1582 for (i = n_elmts - 1; i >= 0; i--)
1584 TREE_CHAIN (gnu_arr[i]) = list;
1585 list = gnu_arr[i];
1589 result = build_constructor_from_list (type, list);
1590 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1591 TREE_SIDE_EFFECTS (result) = side_effects;
1592 TREE_READONLY (result) = TYPE_READONLY (type) || allconstant;
1593 return result;
1596 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1597 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1598 for the field. Don't fold the result if NO_FOLD_P is true.
1600 We also handle the fact that we might have been passed a pointer to the
1601 actual record and know how to look for fields in variant parts. */
1603 static tree
1604 build_simple_component_ref (tree record_variable, tree component,
1605 tree field, bool no_fold_p)
1607 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1608 tree ref, inner_variable;
1610 gcc_assert ((TREE_CODE (record_type) == RECORD_TYPE
1611 || TREE_CODE (record_type) == UNION_TYPE
1612 || TREE_CODE (record_type) == QUAL_UNION_TYPE)
1613 && TYPE_SIZE (record_type)
1614 && (component != 0) != (field != 0));
1616 /* If no field was specified, look for a field with the specified name
1617 in the current record only. */
1618 if (!field)
1619 for (field = TYPE_FIELDS (record_type); field;
1620 field = TREE_CHAIN (field))
1621 if (DECL_NAME (field) == component)
1622 break;
1624 if (!field)
1625 return NULL_TREE;
1627 /* If this field is not in the specified record, see if we can find
1628 something in the record whose original field is the same as this one. */
1629 if (DECL_CONTEXT (field) != record_type)
1630 /* Check if there is a field with name COMPONENT in the record. */
1632 tree new_field;
1634 /* First loop thru normal components. */
1635 for (new_field = TYPE_FIELDS (record_type); new_field;
1636 new_field = TREE_CHAIN (new_field))
1637 if (SAME_FIELD_P (field, new_field))
1638 break;
1640 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1641 the component in the first search. Doing this search in 2 steps
1642 is required to avoiding hidden homonymous fields in the
1643 _Parent field. */
1644 if (!new_field)
1645 for (new_field = TYPE_FIELDS (record_type); new_field;
1646 new_field = TREE_CHAIN (new_field))
1647 if (DECL_INTERNAL_P (new_field))
1649 tree field_ref
1650 = build_simple_component_ref (record_variable,
1651 NULL_TREE, new_field, no_fold_p);
1652 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1653 no_fold_p);
1655 if (ref)
1656 return ref;
1659 field = new_field;
1662 if (!field)
1663 return NULL_TREE;
1665 /* If the field's offset has overflowed, do not attempt to access it
1666 as doing so may trigger sanity checks deeper in the back-end.
1667 Note that we don't need to warn since this will be done on trying
1668 to declare the object. */
1669 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1670 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1671 return NULL_TREE;
1673 /* Look through conversion between type variants. Note that this
1674 is transparent as far as the field is concerned. */
1675 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1676 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1677 == record_type)
1678 inner_variable = TREE_OPERAND (record_variable, 0);
1679 else
1680 inner_variable = record_variable;
1682 ref = build3 (COMPONENT_REF, TREE_TYPE (field), inner_variable, field,
1683 NULL_TREE);
1685 if (TREE_READONLY (record_variable) || TREE_READONLY (field))
1686 TREE_READONLY (ref) = 1;
1687 if (TREE_THIS_VOLATILE (record_variable) || TREE_THIS_VOLATILE (field)
1688 || TYPE_VOLATILE (record_type))
1689 TREE_THIS_VOLATILE (ref) = 1;
1691 if (no_fold_p)
1692 return ref;
1694 /* The generic folder may punt in this case because the inner array type
1695 can be self-referential, but folding is in fact not problematic. */
1696 else if (TREE_CODE (record_variable) == CONSTRUCTOR
1697 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record_variable)))
1699 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (record_variable);
1700 unsigned HOST_WIDE_INT idx;
1701 tree index, value;
1702 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
1703 if (index == field)
1704 return value;
1705 return ref;
1708 else
1709 return fold (ref);
1712 /* Like build_simple_component_ref, except that we give an error if the
1713 reference could not be found. */
1715 tree
1716 build_component_ref (tree record_variable, tree component,
1717 tree field, bool no_fold_p)
1719 tree ref = build_simple_component_ref (record_variable, component, field,
1720 no_fold_p);
1722 if (ref)
1723 return ref;
1725 /* If FIELD was specified, assume this is an invalid user field so raise
1726 Constraint_Error. Otherwise, we have no type to return so abort. */
1727 gcc_assert (field);
1728 return build1 (NULL_EXPR, TREE_TYPE (field),
1729 build_call_raise (CE_Discriminant_Check_Failed, Empty,
1730 N_Raise_Constraint_Error));
1733 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
1734 identically. Process the case where a GNAT_PROC to call is provided. */
1736 static inline tree
1737 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
1738 Entity_Id gnat_proc, Entity_Id gnat_pool)
1740 tree gnu_proc = gnat_to_gnu (gnat_proc);
1741 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1742 tree gnu_call;
1744 /* The storage pools are obviously always tagged types, but the
1745 secondary stack uses the same mechanism and is not tagged. */
1746 if (Is_Tagged_Type (Etype (gnat_pool)))
1748 /* The size is the third parameter; the alignment is the
1749 same type. */
1750 Entity_Id gnat_size_type
1751 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1752 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1754 tree gnu_pool = gnat_to_gnu (gnat_pool);
1755 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1756 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
1758 gnu_size = convert (gnu_size_type, gnu_size);
1759 gnu_align = convert (gnu_size_type, gnu_align);
1761 /* The first arg is always the address of the storage pool; next
1762 comes the address of the object, for a deallocator, then the
1763 size and alignment. */
1764 if (gnu_obj)
1765 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1766 gnu_proc_addr, 4, gnu_pool_addr,
1767 gnu_obj, gnu_size, gnu_align);
1768 else
1769 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1770 gnu_proc_addr, 3, gnu_pool_addr,
1771 gnu_size, gnu_align);
1774 /* Secondary stack case. */
1775 else
1777 /* The size is the second parameter. */
1778 Entity_Id gnat_size_type
1779 = Etype (Next_Formal (First_Formal (gnat_proc)));
1780 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1782 gnu_size = convert (gnu_size_type, gnu_size);
1784 /* The first arg is the address of the object, for a deallocator,
1785 then the size. */
1786 if (gnu_obj)
1787 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1788 gnu_proc_addr, 2, gnu_obj, gnu_size);
1789 else
1790 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1791 gnu_proc_addr, 1, gnu_size);
1794 TREE_SIDE_EFFECTS (gnu_call) = 1;
1795 return gnu_call;
1798 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
1799 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
1800 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
1801 latter offers. */
1803 static inline tree
1804 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
1806 /* When the DATA_TYPE alignment is stricter than what malloc offers
1807 (super-aligned case), we allocate an "aligning" wrapper type and return
1808 the address of its single data field with the malloc's return value
1809 stored just in front. */
1811 unsigned int data_align = TYPE_ALIGN (data_type);
1812 unsigned int default_allocator_alignment
1813 = get_target_default_allocator_alignment () * BITS_PER_UNIT;
1815 tree aligning_type
1816 = ((data_align > default_allocator_alignment)
1817 ? make_aligning_type (data_type, data_align, data_size,
1818 default_allocator_alignment,
1819 POINTER_SIZE / BITS_PER_UNIT)
1820 : NULL_TREE);
1822 tree size_to_malloc
1823 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
1825 tree malloc_ptr;
1827 /* On VMS, if 64-bit memory is disabled or pointers are 64-bit and the
1828 allocator size is 32-bit or Convention C, allocate 32-bit memory. */
1829 if (TARGET_ABI_OPEN_VMS
1830 && (!TARGET_MALLOC64
1831 || (POINTER_SIZE == 64
1832 && (UI_To_Int (Esize (Etype (gnat_node))) == 32
1833 || Convention (Etype (gnat_node)) == Convention_C))))
1834 malloc_ptr = build_call_1_expr (malloc32_decl, size_to_malloc);
1835 else
1836 malloc_ptr = build_call_1_expr (malloc_decl, size_to_malloc);
1838 if (aligning_type)
1840 /* Latch malloc's return value and get a pointer to the aligning field
1841 first. */
1842 tree storage_ptr = gnat_protect_expr (malloc_ptr);
1844 tree aligning_record_addr
1845 = convert (build_pointer_type (aligning_type), storage_ptr);
1847 tree aligning_record
1848 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
1850 tree aligning_field
1851 = build_component_ref (aligning_record, NULL_TREE,
1852 TYPE_FIELDS (aligning_type), false);
1854 tree aligning_field_addr
1855 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
1857 /* Then arrange to store the allocator's return value ahead
1858 and return. */
1859 tree storage_ptr_slot_addr
1860 = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1861 convert (ptr_void_type_node, aligning_field_addr),
1862 size_int (-(HOST_WIDE_INT) POINTER_SIZE
1863 / BITS_PER_UNIT));
1865 tree storage_ptr_slot
1866 = build_unary_op (INDIRECT_REF, NULL_TREE,
1867 convert (build_pointer_type (ptr_void_type_node),
1868 storage_ptr_slot_addr));
1870 return
1871 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
1872 build_binary_op (MODIFY_EXPR, NULL_TREE,
1873 storage_ptr_slot, storage_ptr),
1874 aligning_field_addr);
1876 else
1877 return malloc_ptr;
1880 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
1881 designated by DATA_PTR using the __gnat_free entry point. */
1883 static inline tree
1884 maybe_wrap_free (tree data_ptr, tree data_type)
1886 /* In the regular alignment case, we pass the data pointer straight to free.
1887 In the superaligned case, we need to retrieve the initial allocator
1888 return value, stored in front of the data block at allocation time. */
1890 unsigned int data_align = TYPE_ALIGN (data_type);
1891 unsigned int default_allocator_alignment
1892 = get_target_default_allocator_alignment () * BITS_PER_UNIT;
1894 tree free_ptr;
1896 if (data_align > default_allocator_alignment)
1898 /* DATA_FRONT_PTR (void *)
1899 = (void *)DATA_PTR - (void *)sizeof (void *)) */
1900 tree data_front_ptr
1901 = build_binary_op
1902 (POINTER_PLUS_EXPR, ptr_void_type_node,
1903 convert (ptr_void_type_node, data_ptr),
1904 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
1906 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
1907 free_ptr
1908 = build_unary_op
1909 (INDIRECT_REF, NULL_TREE,
1910 convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
1912 else
1913 free_ptr = data_ptr;
1915 return build_call_1_expr (free_decl, free_ptr);
1918 /* Build a GCC tree to call an allocation or deallocation function.
1919 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
1920 generate an allocator.
1922 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
1923 object type, used to determine the to-be-honored address alignment.
1924 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
1925 pool to use. If not present, malloc and free are used. GNAT_NODE is used
1926 to provide an error location for restriction violation messages. */
1928 tree
1929 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
1930 Entity_Id gnat_proc, Entity_Id gnat_pool,
1931 Node_Id gnat_node)
1933 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
1935 /* Explicit proc to call ? This one is assumed to deal with the type
1936 alignment constraints. */
1937 if (Present (gnat_proc))
1938 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
1939 gnat_proc, gnat_pool);
1941 /* Otherwise, object to "free" or "malloc" with possible special processing
1942 for alignments stricter than what the default allocator honors. */
1943 else if (gnu_obj)
1944 return maybe_wrap_free (gnu_obj, gnu_type);
1945 else
1947 /* Assert that we no longer can be called with this special pool. */
1948 gcc_assert (gnat_pool != -1);
1950 /* Check that we aren't violating the associated restriction. */
1951 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
1952 Check_No_Implicit_Heap_Alloc (gnat_node);
1954 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
1958 /* Build a GCC tree to correspond to allocating an object of TYPE whose
1959 initial value is INIT, if INIT is nonzero. Convert the expression to
1960 RESULT_TYPE, which must be some type of pointer. Return the tree.
1962 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
1963 the storage pool to use. GNAT_NODE is used to provide an error
1964 location for restriction violation messages. If IGNORE_INIT_TYPE is
1965 true, ignore the type of INIT for the purpose of determining the size;
1966 this will cause the maximum size to be allocated if TYPE is of
1967 self-referential size. */
1969 tree
1970 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
1971 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
1973 tree size = TYPE_SIZE_UNIT (type);
1974 tree result;
1976 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
1977 if (init && TREE_CODE (init) == NULL_EXPR)
1978 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
1980 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
1981 sizes of the object and its template. Allocate the whole thing and
1982 fill in the parts that are known. */
1983 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
1985 tree storage_type
1986 = build_unc_object_type_from_ptr (result_type, type,
1987 get_identifier ("ALLOC"));
1988 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
1989 tree storage_ptr_type = build_pointer_type (storage_type);
1990 tree storage;
1991 tree template_cons = NULL_TREE;
1993 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
1994 init);
1996 /* If the size overflows, pass -1 so the allocator will raise
1997 storage error. */
1998 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1999 size = ssize_int (-1);
2001 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2002 gnat_proc, gnat_pool, gnat_node);
2003 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2005 if (TYPE_IS_PADDING_P (type))
2007 type = TREE_TYPE (TYPE_FIELDS (type));
2008 if (init)
2009 init = convert (type, init);
2012 /* If there is an initializing expression, make a constructor for
2013 the entire object including the bounds and copy it into the
2014 object. If there is no initializing expression, just set the
2015 bounds. */
2016 if (init)
2018 template_cons = tree_cons (TREE_CHAIN (TYPE_FIELDS (storage_type)),
2019 init, NULL_TREE);
2020 template_cons = tree_cons (TYPE_FIELDS (storage_type),
2021 build_template (template_type, type,
2022 init),
2023 template_cons);
2025 return convert
2026 (result_type,
2027 build2 (COMPOUND_EXPR, storage_ptr_type,
2028 build_binary_op
2029 (MODIFY_EXPR, storage_type,
2030 build_unary_op (INDIRECT_REF, NULL_TREE,
2031 convert (storage_ptr_type, storage)),
2032 gnat_build_constructor (storage_type, template_cons)),
2033 convert (storage_ptr_type, storage)));
2035 else
2036 return build2
2037 (COMPOUND_EXPR, result_type,
2038 build_binary_op
2039 (MODIFY_EXPR, template_type,
2040 build_component_ref
2041 (build_unary_op (INDIRECT_REF, NULL_TREE,
2042 convert (storage_ptr_type, storage)),
2043 NULL_TREE, TYPE_FIELDS (storage_type), false),
2044 build_template (template_type, type, NULL_TREE)),
2045 convert (result_type, convert (storage_ptr_type, storage)));
2048 /* If we have an initializing expression, see if its size is simpler
2049 than the size from the type. */
2050 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2051 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2052 || CONTAINS_PLACEHOLDER_P (size)))
2053 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2055 /* If the size is still self-referential, reference the initializing
2056 expression, if it is present. If not, this must have been a
2057 call to allocate a library-level object, in which case we use
2058 the maximum size. */
2059 if (CONTAINS_PLACEHOLDER_P (size))
2061 if (!ignore_init_type && init)
2062 size = substitute_placeholder_in_expr (size, init);
2063 else
2064 size = max_size (size, true);
2067 /* If the size overflows, pass -1 so the allocator will raise
2068 storage error. */
2069 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2070 size = ssize_int (-1);
2072 result = convert (result_type,
2073 build_call_alloc_dealloc (NULL_TREE, size, type,
2074 gnat_proc, gnat_pool,
2075 gnat_node));
2077 /* If we have an initial value, protect the new address, assign the value
2078 and return the address with a COMPOUND_EXPR. */
2079 if (init)
2081 result = gnat_protect_expr (result);
2082 result
2083 = build2 (COMPOUND_EXPR, TREE_TYPE (result),
2084 build_binary_op
2085 (MODIFY_EXPR, NULL_TREE,
2086 build_unary_op (INDIRECT_REF,
2087 TREE_TYPE (TREE_TYPE (result)), result),
2088 init),
2089 result);
2092 return convert (result_type, result);
2095 /* Fill in a VMS descriptor for EXPR and return a constructor for it.
2096 GNAT_FORMAL is how we find the descriptor record. GNAT_ACTUAL is
2097 how we derive the source location to raise C_E on an out of range
2098 pointer. */
2100 tree
2101 fill_vms_descriptor (tree expr, Entity_Id gnat_formal, Node_Id gnat_actual)
2103 tree parm_decl = get_gnu_tree (gnat_formal);
2104 tree record_type = TREE_TYPE (TREE_TYPE (parm_decl));
2105 tree const_list = NULL_TREE, field;
2106 const bool do_range_check
2107 = strcmp ("MBO",
2108 IDENTIFIER_POINTER (DECL_NAME (TYPE_FIELDS (record_type))));
2110 expr = maybe_unconstrained_array (expr);
2111 gnat_mark_addressable (expr);
2113 for (field = TYPE_FIELDS (record_type); field; field = TREE_CHAIN (field))
2115 tree conexpr = convert (TREE_TYPE (field),
2116 SUBSTITUTE_PLACEHOLDER_IN_EXPR
2117 (DECL_INITIAL (field), expr));
2119 /* Check to ensure that only 32-bit pointers are passed in
2120 32-bit descriptors */
2121 if (do_range_check
2122 && strcmp (IDENTIFIER_POINTER (DECL_NAME (field)), "POINTER") == 0)
2124 tree pointer64type
2125 = build_pointer_type_for_mode (void_type_node, DImode, false);
2126 tree addr64expr = build_unary_op (ADDR_EXPR, pointer64type, expr);
2127 tree malloc64low
2128 = build_int_cstu (long_integer_type_node, 0x80000000);
2130 add_stmt (build3 (COND_EXPR, void_type_node,
2131 build_binary_op (GE_EXPR, boolean_type_node,
2132 convert (long_integer_type_node,
2133 addr64expr),
2134 malloc64low),
2135 build_call_raise (CE_Range_Check_Failed,
2136 gnat_actual,
2137 N_Raise_Constraint_Error),
2138 NULL_TREE));
2140 const_list = tree_cons (field, conexpr, const_list);
2143 return gnat_build_constructor (record_type, nreverse (const_list));
2146 /* Indicate that we need to take the address of T and that it therefore
2147 should not be allocated in a register. Returns true if successful. */
2149 bool
2150 gnat_mark_addressable (tree t)
2152 while (true)
2153 switch (TREE_CODE (t))
2155 case ADDR_EXPR:
2156 case COMPONENT_REF:
2157 case ARRAY_REF:
2158 case ARRAY_RANGE_REF:
2159 case REALPART_EXPR:
2160 case IMAGPART_EXPR:
2161 case VIEW_CONVERT_EXPR:
2162 case NON_LVALUE_EXPR:
2163 CASE_CONVERT:
2164 t = TREE_OPERAND (t, 0);
2165 break;
2167 case COMPOUND_EXPR:
2168 t = TREE_OPERAND (t, 1);
2169 break;
2171 case CONSTRUCTOR:
2172 TREE_ADDRESSABLE (t) = 1;
2173 return true;
2175 case VAR_DECL:
2176 case PARM_DECL:
2177 case RESULT_DECL:
2178 TREE_ADDRESSABLE (t) = 1;
2179 return true;
2181 case FUNCTION_DECL:
2182 TREE_ADDRESSABLE (t) = 1;
2183 return true;
2185 case CONST_DECL:
2186 return DECL_CONST_CORRESPONDING_VAR (t)
2187 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2189 default:
2190 return true;
2194 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2195 but we know how to handle our own nodes. */
2197 tree
2198 gnat_save_expr (tree exp)
2200 tree type = TREE_TYPE (exp);
2201 enum tree_code code = TREE_CODE (exp);
2203 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2204 return exp;
2206 if (code == UNCONSTRAINED_ARRAY_REF)
2208 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2209 TREE_READONLY (t) = TYPE_READONLY (type);
2210 return t;
2213 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2214 This may be more efficient, but will also allow us to more easily find
2215 the match for the PLACEHOLDER_EXPR. */
2216 if (code == COMPONENT_REF
2217 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2218 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2219 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2221 return save_expr (exp);
2224 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2225 is optimized under the assumption that EXP's value doesn't change before
2226 its subsequent reuse(s) except through its potential reevaluation. */
2228 tree
2229 gnat_protect_expr (tree exp)
2231 tree type = TREE_TYPE (exp);
2232 enum tree_code code = TREE_CODE (exp);
2234 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2235 return exp;
2237 /* If EXP has no side effects, we theoritically don't need to do anything.
2238 However, we may be recursively passed more and more complex expressions
2239 involving checks which will be reused multiple times and eventually be
2240 unshared for gimplification; in order to avoid a complexity explosion
2241 at that point, we protect any expressions more complex than a simple
2242 arithmetic expression. */
2243 if (!TREE_SIDE_EFFECTS (exp))
2245 tree inner = skip_simple_arithmetic (exp);
2246 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2247 return exp;
2250 /* If this is a conversion, protect what's inside the conversion. */
2251 if (code == NON_LVALUE_EXPR
2252 || CONVERT_EXPR_CODE_P (code)
2253 || code == VIEW_CONVERT_EXPR)
2254 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2256 /* If we're indirectly referencing something, we only need to protect the
2257 address since the data itself can't change in these situations. */
2258 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2260 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2261 TREE_READONLY (t) = TYPE_READONLY (type);
2262 return t;
2265 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2266 This may be more efficient, but will also allow us to more easily find
2267 the match for the PLACEHOLDER_EXPR. */
2268 if (code == COMPONENT_REF
2269 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2270 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2271 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2273 /* If this is a fat pointer or something that can be placed in a register,
2274 just make a SAVE_EXPR. Likewise for a CALL_EXPR as large objects are
2275 returned via invisible reference in most ABIs so the temporary will
2276 directly be filled by the callee. */
2277 if (TYPE_IS_FAT_POINTER_P (type)
2278 || TYPE_MODE (type) != BLKmode
2279 || code == CALL_EXPR)
2280 return save_expr (exp);
2282 /* Otherwise reference, protect the address and dereference. */
2283 return
2284 build_unary_op (INDIRECT_REF, type,
2285 save_expr (build_unary_op (ADDR_EXPR,
2286 build_reference_type (type),
2287 exp)));
2290 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2291 argument to force evaluation of everything. */
2293 static tree
2294 gnat_stabilize_reference_1 (tree e, bool force)
2296 enum tree_code code = TREE_CODE (e);
2297 tree type = TREE_TYPE (e);
2298 tree result;
2300 /* We cannot ignore const expressions because it might be a reference
2301 to a const array but whose index contains side-effects. But we can
2302 ignore things that are actual constant or that already have been
2303 handled by this function. */
2304 if (TREE_CONSTANT (e) || code == SAVE_EXPR)
2305 return e;
2307 switch (TREE_CODE_CLASS (code))
2309 case tcc_exceptional:
2310 case tcc_declaration:
2311 case tcc_comparison:
2312 case tcc_expression:
2313 case tcc_reference:
2314 case tcc_vl_exp:
2315 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2316 fat pointer. This may be more efficient, but will also allow
2317 us to more easily find the match for the PLACEHOLDER_EXPR. */
2318 if (code == COMPONENT_REF
2319 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2320 result
2321 = build3 (code, type,
2322 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2323 TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2324 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2325 so that it will only be evaluated once. */
2326 /* The tcc_reference and tcc_comparison classes could be handled as
2327 below, but it is generally faster to only evaluate them once. */
2328 else if (TREE_SIDE_EFFECTS (e) || force)
2329 return save_expr (e);
2330 else
2331 return e;
2332 break;
2334 case tcc_binary:
2335 /* Recursively stabilize each operand. */
2336 result
2337 = build2 (code, type,
2338 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2339 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), force));
2340 break;
2342 case tcc_unary:
2343 /* Recursively stabilize each operand. */
2344 result
2345 = build1 (code, type,
2346 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force));
2347 break;
2349 default:
2350 gcc_unreachable ();
2353 /* See similar handling in gnat_stabilize_reference. */
2354 TREE_READONLY (result) = TREE_READONLY (e);
2355 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2356 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2358 return result;
2361 /* This is equivalent to stabilize_reference in tree.c but we know how to
2362 handle our own nodes and we take extra arguments. FORCE says whether to
2363 force evaluation of everything. We set SUCCESS to true unless we walk
2364 through something we don't know how to stabilize. */
2366 tree
2367 gnat_stabilize_reference (tree ref, bool force, bool *success)
2369 tree type = TREE_TYPE (ref);
2370 enum tree_code code = TREE_CODE (ref);
2371 tree result;
2373 /* Assume we'll success unless proven otherwise. */
2374 if (success)
2375 *success = true;
2377 switch (code)
2379 case CONST_DECL:
2380 case VAR_DECL:
2381 case PARM_DECL:
2382 case RESULT_DECL:
2383 /* No action is needed in this case. */
2384 return ref;
2386 case ADDR_EXPR:
2387 CASE_CONVERT:
2388 case FLOAT_EXPR:
2389 case FIX_TRUNC_EXPR:
2390 case VIEW_CONVERT_EXPR:
2391 result
2392 = build1 (code, type,
2393 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2394 success));
2395 break;
2397 case INDIRECT_REF:
2398 case UNCONSTRAINED_ARRAY_REF:
2399 result = build1 (code, type,
2400 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 0),
2401 force));
2402 break;
2404 case COMPONENT_REF:
2405 result = build3 (COMPONENT_REF, type,
2406 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2407 success),
2408 TREE_OPERAND (ref, 1), NULL_TREE);
2409 break;
2411 case BIT_FIELD_REF:
2412 result = build3 (BIT_FIELD_REF, type,
2413 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2414 success),
2415 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2416 force),
2417 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 2),
2418 force));
2419 break;
2421 case ARRAY_REF:
2422 case ARRAY_RANGE_REF:
2423 result = build4 (code, type,
2424 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2425 success),
2426 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2427 force),
2428 NULL_TREE, NULL_TREE);
2429 break;
2431 case CALL_EXPR:
2432 result = gnat_stabilize_reference_1 (ref, force);
2433 break;
2435 case COMPOUND_EXPR:
2436 result = build2 (COMPOUND_EXPR, type,
2437 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2438 success),
2439 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2440 force));
2441 break;
2443 case CONSTRUCTOR:
2444 /* Constructors with 1 element are used extensively to formally
2445 convert objects to special wrapping types. */
2446 if (TREE_CODE (type) == RECORD_TYPE
2447 && VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ref)) == 1)
2449 tree index
2450 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->index;
2451 tree value
2452 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->value;
2453 result
2454 = build_constructor_single (type, index,
2455 gnat_stabilize_reference_1 (value,
2456 force));
2458 else
2460 if (success)
2461 *success = false;
2462 return ref;
2464 break;
2466 case ERROR_MARK:
2467 ref = error_mark_node;
2469 /* ... fall through to failure ... */
2471 /* If arg isn't a kind of lvalue we recognize, make no change.
2472 Caller should recognize the error for an invalid lvalue. */
2473 default:
2474 if (success)
2475 *success = false;
2476 return ref;
2479 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2480 may not be sustained across some paths, such as the way via build1 for
2481 INDIRECT_REF. We reset those flags here in the general case, which is
2482 consistent with the GCC version of this routine.
2484 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2485 paths introduce side-effects where there was none initially (e.g. if a
2486 SAVE_EXPR is built) and we also want to keep track of that. */
2487 TREE_READONLY (result) = TREE_READONLY (ref);
2488 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2489 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2491 return result;