gcc: docs: Fix documentation of two hooks
[official-gcc.git] / gcc / ada / gcc-interface / utils2.cc
blob0d7e03ec6b07cf659fe42f6388f95ec387907c0c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2024, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "memmodel.h"
30 #include "tm.h"
31 #include "vec.h"
32 #include "alias.h"
33 #include "tree.h"
34 #include "inchash.h"
35 #include "builtins.h"
36 #include "expmed.h"
37 #include "fold-const.h"
38 #include "optabs-query.h"
39 #include "stor-layout.h"
40 #include "stringpool.h"
41 #include "varasm.h"
42 #include "flags.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "tree-inline.h"
47 #include "ada.h"
48 #include "types.h"
49 #include "atree.h"
50 #include "elists.h"
51 #include "namet.h"
52 #include "nlists.h"
53 #include "snames.h"
54 #include "stringt.h"
55 #include "uintp.h"
56 #include "fe.h"
57 #include "sinfo.h"
58 #include "einfo.h"
59 #include "ada-tree.h"
60 #include "gigi.h"
62 /* Return the base type of TYPE. */
64 tree
65 get_base_type (tree type)
67 if (TREE_CODE (type) == RECORD_TYPE
68 && TYPE_JUSTIFIED_MODULAR_P (type))
69 type = TREE_TYPE (TYPE_FIELDS (type));
71 while (TREE_TYPE (type)
72 && (TREE_CODE (type) == INTEGER_TYPE
73 || SCALAR_FLOAT_TYPE_P (type)))
74 type = TREE_TYPE (type);
76 return type;
79 /* EXP is a GCC tree representing an address. See if we can find how strictly
80 the object at this address is aligned and, if so, return the alignment of
81 the object in bits. Otherwise return 0. */
83 unsigned int
84 known_alignment (tree exp)
86 unsigned int this_alignment;
87 unsigned int lhs, rhs;
89 switch (TREE_CODE (exp))
91 CASE_CONVERT:
92 case VIEW_CONVERT_EXPR:
93 case NON_LVALUE_EXPR:
94 /* Conversions between pointers and integers don't change the alignment
95 of the underlying object. */
96 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
97 break;
99 case COMPOUND_EXPR:
100 /* The value of a COMPOUND_EXPR is that of its second operand. */
101 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
102 break;
104 case PLUS_EXPR:
105 case MINUS_EXPR:
106 /* If two addresses are added, the alignment of the result is the
107 minimum of the two alignments. */
108 lhs = known_alignment (TREE_OPERAND (exp, 0));
109 rhs = known_alignment (TREE_OPERAND (exp, 1));
110 this_alignment = MIN (lhs, rhs);
111 break;
113 case POINTER_PLUS_EXPR:
114 /* If this is the pattern built for aligning types, decode it. */
115 if (TREE_CODE (TREE_OPERAND (exp, 1)) == BIT_AND_EXPR
116 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)) == NEGATE_EXPR)
118 tree op = TREE_OPERAND (TREE_OPERAND (exp, 1), 1);
119 return
120 known_alignment (fold_build1 (BIT_NOT_EXPR, TREE_TYPE (op), op));
123 /* If we don't know the alignment of the offset, we assume that
124 of the base. */
125 lhs = known_alignment (TREE_OPERAND (exp, 0));
126 rhs = known_alignment (TREE_OPERAND (exp, 1));
128 if (rhs == 0)
129 this_alignment = lhs;
130 else
131 this_alignment = MIN (lhs, rhs);
132 break;
134 case COND_EXPR:
135 /* If there is a choice between two values, use the smaller one. */
136 lhs = known_alignment (TREE_OPERAND (exp, 1));
137 rhs = known_alignment (TREE_OPERAND (exp, 2));
138 this_alignment = MIN (lhs, rhs);
139 break;
141 case INTEGER_CST:
143 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
144 /* The first part of this represents the lowest bit in the constant,
145 but it is originally in bytes, not bits. */
146 this_alignment = (c & -c) * BITS_PER_UNIT;
148 break;
150 case MULT_EXPR:
151 /* If we know the alignment of just one side, use it. Otherwise,
152 use the product of the alignments. */
153 lhs = known_alignment (TREE_OPERAND (exp, 0));
154 rhs = known_alignment (TREE_OPERAND (exp, 1));
156 if (lhs == 0)
157 this_alignment = rhs;
158 else if (rhs == 0)
159 this_alignment = lhs;
160 else
161 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
162 break;
164 case BIT_AND_EXPR:
165 /* A bit-and expression is as aligned as the maximum alignment of the
166 operands. We typically get here for a complex lhs and a constant
167 negative power of two on the rhs to force an explicit alignment, so
168 don't bother looking at the lhs. */
169 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
170 break;
172 case ADDR_EXPR:
173 if (DECL_P (TREE_OPERAND (exp, 0)))
174 this_alignment = DECL_ALIGN (TREE_OPERAND (exp, 0));
175 else
176 this_alignment = get_object_alignment (TREE_OPERAND (exp, 0));
177 break;
179 case CALL_EXPR:
181 tree fndecl = get_callee_fndecl (exp);
182 if (fndecl == malloc_decl || fndecl == realloc_decl)
183 return get_target_system_allocator_alignment () * BITS_PER_UNIT;
185 tree t = maybe_inline_call_in_expr (exp);
186 if (t)
187 return known_alignment (t);
190 /* ... fall through ... */
192 default:
193 /* For other pointer expressions, we assume that the pointed-to object
194 is at least as aligned as the pointed-to type. Beware that we can
195 have a dummy type here (e.g. a Taft Amendment type), for which the
196 alignment is meaningless and should be ignored. */
197 if (POINTER_TYPE_P (TREE_TYPE (exp))
198 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp)))
199 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (exp))))
200 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
201 else
202 this_alignment = 0;
203 break;
206 return this_alignment;
209 /* We have a comparison or assignment operation on two types, T1 and T2, which
210 are either both array types or both record types. T1 is assumed to be for
211 the left hand side operand, and T2 for the right hand side. Return the
212 type that both operands should be converted to for the operation, if any.
213 Otherwise return zero. */
215 static tree
216 find_common_type (tree t1, tree t2)
218 /* ??? As of today, various constructs lead to here with types of different
219 sizes even when both constants (e.g. tagged types, packable vs regular
220 component types, padded vs unpadded types, ...). While some of these
221 would better be handled upstream (types should be made consistent before
222 calling into build_binary_op), some others are really expected and we
223 have to be careful. */
225 const bool variable_record_on_lhs
226 = (TREE_CODE (t1) == RECORD_TYPE
227 && TREE_CODE (t2) == RECORD_TYPE
228 && get_variant_part (t1)
229 && !get_variant_part (t2));
231 const bool variable_array_on_lhs
232 = (TREE_CODE (t1) == ARRAY_TYPE
233 && TREE_CODE (t2) == ARRAY_TYPE
234 && !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)))
235 && TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t2))));
237 /* We must avoid writing more than what the target can hold if this is for
238 an assignment and the case of tagged types is handled in build_binary_op
239 so we use the lhs type if it is known to be smaller or of constant size
240 and the rhs type is not, whatever the modes. We also force t1 in case of
241 constant size equality to minimize occurrences of view conversions on the
242 lhs of an assignment, except for the case of types with a variable part
243 on the lhs but not on the rhs to make the conversion simpler. */
244 if (TREE_CONSTANT (TYPE_SIZE (t1))
245 && (!TREE_CONSTANT (TYPE_SIZE (t2))
246 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
247 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
248 && !variable_record_on_lhs
249 && !variable_array_on_lhs)))
250 return t1;
252 /* Otherwise, if the lhs type is non-BLKmode, use it, except for the case of
253 a non-BLKmode rhs and array types with a variable part on the lhs but not
254 on the rhs to make sure the conversion is preserved during gimplification.
255 Note that we know that we will not have any alignment problems since, if
256 we did, the non-BLKmode type could not have been used. */
257 if (TYPE_MODE (t1) != BLKmode
258 && (TYPE_MODE (t2) == BLKmode || !variable_array_on_lhs))
259 return t1;
261 /* If the rhs type is of constant size, use it whatever the modes. At
262 this point it is known to be smaller, or of constant size and the
263 lhs type is not. */
264 if (TREE_CONSTANT (TYPE_SIZE (t2)))
265 return t2;
267 /* Otherwise, if the rhs type is non-BLKmode, use it. */
268 if (TYPE_MODE (t2) != BLKmode)
269 return t2;
271 /* In this case, both types have variable size and BLKmode. It's
272 probably best to leave the "type mismatch" because changing it
273 could cause a bad self-referential reference. */
274 return NULL_TREE;
277 /* Return an expression tree representing an equality comparison of A1 and A2,
278 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
280 Two arrays are equal in one of two ways: (1) if both have zero length in
281 some dimension (not necessarily the same dimension) or (2) if the lengths
282 in each dimension are equal and the data is equal. We perform the length
283 tests in as efficient a manner as possible. */
285 static tree
286 compare_arrays_for_equality (location_t loc, tree result_type, tree a1, tree a2)
288 tree result = convert (result_type, boolean_true_node);
289 tree a1_is_null = convert (result_type, boolean_false_node);
290 tree a2_is_null = convert (result_type, boolean_false_node);
291 tree t1 = TREE_TYPE (a1);
292 tree t2 = TREE_TYPE (a2);
293 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
294 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
295 bool length_zero_p = false;
297 /* If the operands have side-effects, they need to be evaluated only once
298 in spite of the multiple references in the comparison. */
299 if (a1_side_effects_p)
300 a1 = gnat_protect_expr (a1);
302 if (a2_side_effects_p)
303 a2 = gnat_protect_expr (a2);
305 /* Process each dimension separately and compare the lengths. If any
306 dimension has a length known to be zero, set LENGTH_ZERO_P to true
307 in order to suppress the comparison of the data at the end. */
308 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
310 tree dom1 = TYPE_DOMAIN (t1);
311 tree dom2 = TYPE_DOMAIN (t2);
312 tree length1 = size_binop (PLUS_EXPR,
313 size_binop (MINUS_EXPR,
314 TYPE_MAX_VALUE (dom1),
315 TYPE_MIN_VALUE (dom1)),
316 size_one_node);
317 tree length2 = size_binop (PLUS_EXPR,
318 size_binop (MINUS_EXPR,
319 TYPE_MAX_VALUE (dom2),
320 TYPE_MIN_VALUE (dom2)),
321 size_one_node);
322 tree ind1 = TYPE_INDEX_TYPE (dom1);
323 tree ind2 = TYPE_INDEX_TYPE (dom2);
324 tree base_type = maybe_character_type (get_base_type (ind1));
325 tree lb1 = convert (base_type, TYPE_MIN_VALUE (ind1));
326 tree ub1 = convert (base_type, TYPE_MAX_VALUE (ind1));
327 tree lb2 = convert (base_type, TYPE_MIN_VALUE (ind2));
328 tree ub2 = convert (base_type, TYPE_MAX_VALUE (ind2));
329 tree comparison, this_a1_is_null, this_a2_is_null;
331 /* If the length of the first array is a constant and that of the second
332 array is not, swap our operands to have the constant second. */
333 if (TREE_CODE (length1) == INTEGER_CST
334 && TREE_CODE (length2) != INTEGER_CST)
336 tree tem;
337 bool btem;
339 tem = a1, a1 = a2, a2 = tem;
340 tem = t1, t1 = t2, t2 = tem;
341 tem = lb1, lb1 = lb2, lb2 = tem;
342 tem = ub1, ub1 = ub2, ub2 = tem;
343 tem = length1, length1 = length2, length2 = tem;
344 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
345 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
346 a2_side_effects_p = btem;
349 /* If the length of the second array is the constant zero, we can just
350 use the original stored bounds for the first array and see whether
351 last < first holds. */
352 if (integer_zerop (length2))
354 length_zero_p = true;
356 lb1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1, a1);
357 ub1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1, a1);
359 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
361 this_a1_is_null = comparison;
362 this_a2_is_null = convert (result_type, boolean_true_node);
365 /* Otherwise, if the length is some other constant value, we know that
366 this dimension in the second array cannot be superflat, so we can
367 just use its length computed from the actual stored bounds. */
368 else if (TREE_CODE (length2) == INTEGER_CST)
370 /* Note that we know that LB2 and UB2 are constant and hence
371 cannot contain a PLACEHOLDER_EXPR. */
372 lb1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1, a1);
373 ub1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1, a1);
375 comparison
376 = fold_build2_loc (loc, EQ_EXPR, result_type,
377 build_binary_op (MINUS_EXPR, base_type,
378 ub1, lb1),
379 build_binary_op (MINUS_EXPR, base_type,
380 ub2, lb2));
381 this_a1_is_null
382 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
384 this_a2_is_null = convert (result_type, boolean_false_node);
387 /* Otherwise, compare the computed lengths. */
388 else
390 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
391 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
393 comparison
394 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
396 lb1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1, a1);
397 ub1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1, a1);
399 this_a1_is_null
400 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
402 lb2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb2, a2);
403 ub2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub2, a2);
405 this_a2_is_null
406 = fold_build2_loc (loc, LT_EXPR, result_type, ub2, lb2);
409 /* Append expressions for this dimension to the final expressions. */
410 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
411 result, comparison);
413 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
414 this_a1_is_null, a1_is_null);
416 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
417 this_a2_is_null, a2_is_null);
419 t1 = TREE_TYPE (t1);
420 t2 = TREE_TYPE (t2);
423 /* Unless the length of some dimension is known to be zero, compare the
424 data in the array. */
425 if (!length_zero_p)
427 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
428 tree comparison;
430 if (type)
432 a1 = convert (type, a1),
433 a2 = convert (type, a2);
436 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
438 result
439 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
442 /* The result is also true if both sizes are zero. */
443 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
444 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
445 a1_is_null, a2_is_null),
446 result);
448 /* If the operands have side-effects, they need to be evaluated before
449 doing the tests above since the place they otherwise would end up
450 being evaluated at run time could be wrong. */
451 if (a1_side_effects_p)
452 result = build2 (COMPOUND_EXPR, result_type, a1, result);
454 if (a2_side_effects_p)
455 result = build2 (COMPOUND_EXPR, result_type, a2, result);
457 return result;
460 /* Return an expression tree representing an ordering comparison of A1 and A2,
461 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
463 A1 is less than A2 according to the following alternative:
464 - when A1's length is less than A2'length: if every element of A1 is equal
465 to its counterpart in A2 or the first differing is lesser in A1 than A2,
466 - otherwise: if not every element of A2 is equal to its counterpart in A1
467 and the first differing is lesser in A1 than A2.
469 The other 3 ordering comparisons can be easily deduced from this one. */
471 static tree
472 compare_arrays_for_ordering (location_t loc, tree result_type, tree a1, tree a2)
474 const bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
475 const bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
476 tree t1 = TREE_TYPE (a1);
477 tree t2 = TREE_TYPE (a2);
478 tree dom1 = TYPE_DOMAIN (t1);
479 tree dom2 = TYPE_DOMAIN (t2);
480 tree length1 = size_binop (PLUS_EXPR,
481 size_binop (MINUS_EXPR,
482 TYPE_MAX_VALUE (dom1),
483 TYPE_MIN_VALUE (dom1)),
484 size_one_node);
485 tree length2 = size_binop (PLUS_EXPR,
486 size_binop (MINUS_EXPR,
487 TYPE_MAX_VALUE (dom2),
488 TYPE_MIN_VALUE (dom2)),
489 size_one_node);
490 tree addr1, addr2, fndecl, result;
492 /* If the lengths are known at compile time, fold the alternative and let the
493 gimplifier optimize the case of power-of-two lengths. */
494 if (TREE_CODE (length1) == INTEGER_CST && TREE_CODE (length2) == INTEGER_CST)
495 return tree_int_cst_compare (length1, length2) < 0
496 ? fold_build2_loc (loc, LE_EXPR, result_type, a1, convert (t1, a2))
497 : fold_build2_loc (loc, LT_EXPR, result_type, convert (t2, a1), a2);
499 /* If the operands have side-effects, they need to be evaluated only once
500 in spite of the multiple references in the comparison. */
501 if (a1_side_effects_p)
502 a1 = gnat_protect_expr (a1);
504 if (a2_side_effects_p)
505 a2 = gnat_protect_expr (a2);
507 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
508 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
510 /* If the lengths are not known at compile time, call memcmp directly with
511 the actual lengths since a1 and a2 may have the same nominal subtype. */
512 addr1 = build_fold_addr_expr_loc (loc, a1);
513 addr2 = build_fold_addr_expr_loc (loc, a2);
514 fndecl = builtin_decl_implicit (BUILT_IN_MEMCMP);
516 result
517 = fold_build3_loc (loc, COND_EXPR, result_type,
518 fold_build2_loc (loc, LT_EXPR, boolean_type_node,
519 length1, length2),
520 fold_build2_loc (loc, LE_EXPR, result_type,
521 build_call_expr_loc (loc, fndecl, 3,
522 addr1, addr2,
523 length1),
524 integer_zero_node),
525 fold_build2_loc (loc, LT_EXPR, result_type,
526 build_call_expr_loc (loc, fndecl, 3,
527 addr1, addr2,
528 length2),
529 integer_zero_node));
531 /* If the operands have side-effects, they need to be evaluated before
532 doing the tests above since the place they otherwise would end up
533 being evaluated at run time could be wrong. */
534 if (a1_side_effects_p)
535 result = build2 (COMPOUND_EXPR, result_type, a1, result);
537 if (a2_side_effects_p)
538 result = build2 (COMPOUND_EXPR, result_type, a2, result);
540 return result;
543 /* Return an expression tree representing an equality comparison of P1 and P2,
544 two objects of fat pointer type. The result should be of type RESULT_TYPE.
546 Two fat pointers are equal in one of two ways: (1) if both have a null
547 pointer to the array or (2) if they contain the same couple of pointers.
548 We perform the comparison in as efficient a manner as possible. */
550 static tree
551 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
553 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
554 tree p1_array_is_null, p2_array_is_null;
556 /* If either operand has side-effects, they have to be evaluated only once
557 in spite of the multiple references to the operand in the comparison. */
558 p1 = gnat_protect_expr (p1);
559 p2 = gnat_protect_expr (p2);
561 /* The constant folder doesn't fold fat pointer types so we do it here. */
562 if (TREE_CODE (p1) == CONSTRUCTOR)
563 p1_array = CONSTRUCTOR_ELT (p1, 0)->value;
564 else
565 p1_array = build_component_ref (p1, TYPE_FIELDS (TREE_TYPE (p1)), true);
567 p1_array_is_null
568 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
569 fold_convert_loc (loc, TREE_TYPE (p1_array),
570 null_pointer_node));
572 if (TREE_CODE (p2) == CONSTRUCTOR)
573 p2_array = CONSTRUCTOR_ELT (p2, 0)->value;
574 else
575 p2_array = build_component_ref (p2, TYPE_FIELDS (TREE_TYPE (p2)), true);
577 p2_array_is_null
578 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
579 fold_convert_loc (loc, TREE_TYPE (p2_array),
580 null_pointer_node));
582 /* If one of the pointers to the array is null, just compare the other. */
583 if (integer_zerop (p1_array))
584 return p2_array_is_null;
585 else if (integer_zerop (p2_array))
586 return p1_array_is_null;
588 /* Otherwise, do the fully-fledged comparison. */
589 same_array
590 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
592 if (TREE_CODE (p1) == CONSTRUCTOR)
593 p1_bounds = CONSTRUCTOR_ELT (p1, 1)->value;
594 else
595 p1_bounds
596 = build_component_ref (p1, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))),
597 true);
599 if (TREE_CODE (p2) == CONSTRUCTOR)
600 p2_bounds = CONSTRUCTOR_ELT (p2, 1)->value;
601 else
602 p2_bounds
603 = build_component_ref (p2, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))),
604 true);
606 same_bounds
607 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
609 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
610 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
611 build_binary_op (TRUTH_ORIF_EXPR, result_type,
612 p1_array_is_null, same_bounds));
615 /* Try to compute the reduction of OP modulo MODULUS in PRECISION bits with a
616 division-free algorithm. Return NULL_TREE if this is not easily doable. */
618 tree
619 fast_modulo_reduction (tree op, tree modulus, unsigned int precision)
621 const tree type = TREE_TYPE (op);
622 const unsigned int type_precision = TYPE_PRECISION (type);
624 /* The implementation is host-dependent for the time being. */
625 if (type_precision <= HOST_BITS_PER_WIDE_INT)
627 const unsigned HOST_WIDE_INT d = tree_to_uhwi (modulus);
628 unsigned HOST_WIDE_INT ml, mh;
629 int pre_shift, post_shift;
630 tree t;
632 /* The trick is to replace the division by d with a multiply-and-shift
633 sequence parameterized by a (multiplier, shifter) pair computed from
634 d, the precision of the type and the needed precision:
636 op / d = (op * multiplier) >> shifter
638 But choose_multiplier provides a slightly different interface:
640 op / d = (op h* multiplier) >> reduced_shifter
642 that makes things easier by using a high-part multiplication. */
643 mh = choose_multiplier (d, type_precision, precision, &ml, &post_shift);
645 /* If the suggested multiplier is more than TYPE_PRECISION bits, we can
646 do better for even divisors, using an initial right shift. */
647 if (mh != 0 && (d & 1) == 0)
649 pre_shift = ctz_or_zero (d);
650 mh = choose_multiplier (d >> pre_shift, type_precision,
651 precision - pre_shift, &ml, &post_shift);
653 else
654 pre_shift = 0;
656 /* If the suggested multiplier is still more than TYPE_PRECISION bits,
657 or the TYPE_MODE does not have a high-part multiply, try again with
658 a larger type up to the word size. */
659 if (mh != 0 || !can_mult_highpart_p (TYPE_MODE (type), true))
661 if (type_precision < BITS_PER_WORD)
663 const scalar_int_mode m
664 = smallest_int_mode_for_size (type_precision + 1);
665 tree new_type = gnat_type_for_mode (m, 1);
666 op = fold_convert (new_type, op);
667 modulus = fold_convert (new_type, modulus);
668 t = fast_modulo_reduction (op, modulus, precision);
669 if (t)
670 return fold_convert (type, t);
673 return NULL_TREE;
676 /* This computes op - (op / modulus) * modulus with PRECISION bits. */
677 op = gnat_protect_expr (op);
679 /* t = op >> pre_shift
680 t = t h* ml
681 t = t >> post_shift
682 t = t * modulus */
683 if (pre_shift)
684 t = fold_build2 (RSHIFT_EXPR, type, op,
685 build_int_cst (type, pre_shift));
686 else
687 t = op;
688 t = fold_build2 (MULT_HIGHPART_EXPR, type, t, build_int_cst (type, ml));
689 if (post_shift)
690 t = fold_build2 (RSHIFT_EXPR, type, t,
691 build_int_cst (type, post_shift));
692 t = fold_build2 (MULT_EXPR, type, t, modulus);
694 return fold_build2 (MINUS_EXPR, type, op, t);
697 else
698 return NULL_TREE;
701 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
702 TYPE. We know that TYPE is a modular type with a nonbinary modulus. */
704 static tree
705 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
706 tree rhs)
708 tree modulus = TYPE_MODULUS (type);
709 unsigned precision = tree_floor_log2 (modulus) + 1;
710 tree op_type, result, fmr;
712 /* For the logical operations, we only need PRECISION bits. For addition and
713 subtraction, we need one more, and for multiplication twice as many. */
714 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
715 precision += 1;
716 else if (op_code == MULT_EXPR)
717 precision *= 2;
719 /* If the type is not wide enough, make a new type of the needed precision
720 and convert modulus and operands to it. Use a type with full precision
721 for its mode since operations are ultimately performed in the mode. */
722 if (TYPE_PRECISION (type) < precision)
724 const scalar_int_mode m = smallest_int_mode_for_size (precision);
725 op_type = gnat_type_for_mode (m, 1);
726 modulus = fold_convert (op_type, modulus);
727 lhs = fold_convert (op_type, lhs);
728 rhs = fold_convert (op_type, rhs);
730 else
731 op_type = type;
733 /* Do the operation, then we'll fix it up. */
734 result = fold_build2 (op_code, op_type, lhs, rhs);
736 /* Unconditionally add the modulus to the result for a subtraction, this gets
737 rid of all its peculiarities by cancelling out the addition of the binary
738 modulus in the case where the subtraction wraps around in OP_TYPE, and may
739 even generate better code on architectures with conditional moves. */
740 if (op_code == MINUS_EXPR)
741 result = fold_build2 (PLUS_EXPR, op_type, result, modulus);
743 /* For a multiplication, we first try to do a modulo reduction by means of a
744 (multiplier, shifter) pair in the needed precision up to the word size, or
745 else we fall back to a standard modulo operation. But not when optimizing
746 for size, because it will be longer than a div+mul+sub sequence. */
747 if (op_code == MULT_EXPR)
749 if (!optimize_size
750 && precision <= BITS_PER_WORD
751 && (fmr = fast_modulo_reduction (result, modulus, precision)))
752 result = fmr;
753 else
754 result = fold_build2 (TRUNC_MOD_EXPR, op_type, result, modulus);
757 /* For the other operations, subtract the modulus if we are >= it. */
758 else
760 result = gnat_protect_expr (result);
761 result = fold_build3 (COND_EXPR, op_type,
762 fold_build2 (GE_EXPR, boolean_type_node,
763 result, modulus),
764 fold_build2 (MINUS_EXPR, op_type,
765 result, modulus),
766 result);
769 return fold_convert (type, result);
772 /* This page contains routines that implement the Ada semantics with regard
773 to atomic objects. They are fully piggybacked on the middle-end support
774 for atomic loads and stores.
776 *** Memory barriers and volatile objects ***
778 We implement the weakened form of the C.6(16) clause that was introduced
779 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
780 implementable without significant performance hits on modern platforms.
782 We also take advantage of the requirements imposed on shared variables by
783 9.10 (conditions for sequential actions) to have non-erroneous execution
784 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
785 volatile updates with regard to sequential actions, i.e. with regard to
786 reads or updates of atomic objects.
788 As such, an update of an atomic object by a task requires that all earlier
789 accesses to volatile objects have completed. Similarly, later accesses to
790 volatile objects cannot be reordered before the update of the atomic object.
791 So, memory barriers both before and after the atomic update are needed.
793 For a read of an atomic object, to avoid seeing writes of volatile objects
794 by a task earlier than by the other tasks, a memory barrier is needed before
795 the atomic read. Finally, to avoid reordering later reads or updates of
796 volatile objects to before the atomic read, a barrier is needed after the
797 atomic read.
799 So, memory barriers are needed before and after atomic reads and updates.
800 And, in order to simplify the implementation, we use full memory barriers
801 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
803 /* Return the size of TYPE, which must be a positive power of 2. */
805 unsigned int
806 resolve_atomic_size (tree type)
808 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
810 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
811 return size;
813 /* We shouldn't reach here without having already detected that the size
814 isn't compatible with an atomic access. */
815 gcc_assert (Serious_Errors_Detected);
817 return 0;
820 /* Build an atomic load for the underlying atomic object in SRC. SYNC is
821 true if the load requires synchronization. */
823 tree
824 build_atomic_load (tree src, bool sync)
826 tree ptr_type
827 = build_pointer_type
828 (build_qualified_type (void_type_node,
829 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
830 tree mem_model
831 = build_int_cst (integer_type_node,
832 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
833 tree orig_src = src;
834 tree type, t, addr, val;
835 unsigned int size;
836 int fncode;
838 /* Remove conversions to get the address of the underlying object. */
839 src = remove_conversions (src, false);
840 type = TREE_TYPE (src);
841 size = resolve_atomic_size (type);
842 if (size == 0)
843 return orig_src;
845 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
846 t = builtin_decl_implicit ((enum built_in_function) fncode);
848 addr = build_unary_op (ADDR_EXPR, ptr_type, src);
849 val = build_call_expr (t, 2, addr, mem_model);
851 /* First reinterpret the loaded bits in the original type of the load,
852 then convert to the expected result type. */
853 t = fold_build1 (VIEW_CONVERT_EXPR, type, val);
854 return convert (TREE_TYPE (orig_src), t);
857 /* Build an atomic store from SRC to the underlying atomic object in DEST.
858 SYNC is true if the store requires synchronization. */
860 tree
861 build_atomic_store (tree dest, tree src, bool sync)
863 tree ptr_type
864 = build_pointer_type
865 (build_qualified_type (void_type_node,
866 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
867 tree mem_model
868 = build_int_cst (integer_type_node,
869 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
870 tree orig_dest = dest;
871 tree type, t, int_type, addr;
872 unsigned int size;
873 int fncode;
875 /* Remove conversions to get the address of the underlying object. */
876 dest = remove_conversions (dest, false);
877 type = TREE_TYPE (dest);
878 size = resolve_atomic_size (type);
879 if (size == 0)
880 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
882 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
883 t = builtin_decl_implicit ((enum built_in_function) fncode);
884 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
886 /* First convert the bits to be stored to the original type of the store,
887 then reinterpret them in the effective type. But if the original type
888 is a padded type with the same size, convert to the inner type instead,
889 as we don't want to artificially introduce a CONSTRUCTOR here. */
890 if (TYPE_IS_PADDING_P (type)
891 && TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (type))))
892 src = convert (TREE_TYPE (TYPE_FIELDS (type)), src);
893 else
894 src = convert (type, src);
895 src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
896 addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
898 return build_call_expr (t, 3, addr, src, mem_model);
901 /* Build a load-modify-store sequence from SRC to DEST. GNAT_NODE is used for
902 the location of the sequence. Note that, even though the load and the store
903 are both atomic, the sequence itself is not atomic. */
905 tree
906 build_load_modify_store (tree dest, tree src, Node_Id gnat_node)
908 /* We will be modifying DEST below so we build a copy. */
909 dest = copy_node (dest);
910 tree ref = dest;
912 while (handled_component_p (ref))
914 /* The load should already have been generated during the translation
915 of the GNAT destination tree; find it out in the GNU tree. */
916 if (TREE_CODE (TREE_OPERAND (ref, 0)) == VIEW_CONVERT_EXPR)
918 tree op = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
919 if (TREE_CODE (op) == CALL_EXPR && call_is_atomic_load (op))
921 tree type = TREE_TYPE (TREE_OPERAND (ref, 0));
922 tree t = CALL_EXPR_ARG (op, 0);
923 tree obj, temp, stmt;
925 /* Find out the loaded object. */
926 if (TREE_CODE (t) == NOP_EXPR)
927 t = TREE_OPERAND (t, 0);
928 if (TREE_CODE (t) == ADDR_EXPR)
929 obj = TREE_OPERAND (t, 0);
930 else
931 obj = build1 (INDIRECT_REF, type, t);
933 /* Drop atomic and volatile qualifiers for the temporary. */
934 type = TYPE_MAIN_VARIANT (type);
936 /* And drop BLKmode, if need be, to put it into a register. */
937 if (TYPE_MODE (type) == BLKmode)
939 unsigned int size = tree_to_uhwi (TYPE_SIZE (type));
940 type = copy_type (type);
941 machine_mode mode = int_mode_for_size (size, 0).else_blk ();
942 SET_TYPE_MODE (type, mode);
945 /* Create the temporary by inserting a SAVE_EXPR. */
946 temp = build1 (SAVE_EXPR, type,
947 build1 (VIEW_CONVERT_EXPR, type, op));
948 TREE_OPERAND (ref, 0) = temp;
950 start_stmt_group ();
952 /* Build the modify of the temporary. */
953 stmt = build_binary_op (MODIFY_EXPR, NULL_TREE, dest, src);
954 add_stmt_with_node (stmt, gnat_node);
956 /* Build the store to the object. */
957 stmt = build_atomic_store (obj, temp, false);
958 add_stmt_with_node (stmt, gnat_node);
960 return end_stmt_group ();
964 TREE_OPERAND (ref, 0) = copy_node (TREE_OPERAND (ref, 0));
965 ref = TREE_OPERAND (ref, 0);
968 /* Something went wrong earlier if we have not found the atomic load. */
969 gcc_unreachable ();
972 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
973 desired for the result. Usually the operation is to be performed
974 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
975 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
976 case the type to be used will be derived from the operands.
977 Don't fold the result if NO_FOLD is true.
979 This function is very much unlike the ones for C and C++ since we
980 have already done any type conversion and matching required. All we
981 have to do here is validate the work done by SEM and handle subtypes. */
983 tree
984 build_binary_op (enum tree_code op_code, tree result_type,
985 tree left_operand, tree right_operand,
986 bool no_fold)
988 tree left_type = TREE_TYPE (left_operand);
989 tree right_type = TREE_TYPE (right_operand);
990 tree left_base_type = get_base_type (left_type);
991 tree right_base_type = get_base_type (right_type);
992 tree operation_type = result_type;
993 tree best_type = NULL_TREE;
994 tree modulus, result;
995 bool has_side_effects = false;
997 if (operation_type
998 && TREE_CODE (operation_type) == RECORD_TYPE
999 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1000 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1002 if (operation_type && TYPE_IS_EXTRA_SUBTYPE_P (operation_type))
1003 operation_type = get_base_type (operation_type);
1005 modulus = (operation_type
1006 && TREE_CODE (operation_type) == INTEGER_TYPE
1007 && TYPE_MODULAR_P (operation_type)
1008 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1010 switch (op_code)
1012 case INIT_EXPR:
1013 case MODIFY_EXPR:
1014 gcc_checking_assert (!result_type);
1016 /* If there were integral or pointer conversions on the LHS, remove
1017 them; we'll be putting them back below if needed. Likewise for
1018 conversions between record types, except for justified modular types.
1019 But don't do this if the right operand is not BLKmode (for packed
1020 arrays) unless we are not changing the mode, or if both ooperands
1021 are view conversions to the same type. */
1022 while ((CONVERT_EXPR_P (left_operand)
1023 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
1024 && (((INTEGRAL_TYPE_P (left_type)
1025 || POINTER_TYPE_P (left_type))
1026 && (INTEGRAL_TYPE_P (operand_type (left_operand))
1027 || POINTER_TYPE_P (operand_type (left_operand))))
1028 || (TREE_CODE (left_type) == RECORD_TYPE
1029 && !TYPE_JUSTIFIED_MODULAR_P (left_type)
1030 && TREE_CODE (operand_type (left_operand)) == RECORD_TYPE
1031 && (TYPE_MODE (right_type) == BLKmode
1032 || TYPE_MODE (left_type)
1033 == TYPE_MODE (operand_type (left_operand)))
1034 && !(TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
1035 && TREE_CODE (right_operand) == VIEW_CONVERT_EXPR
1036 && left_type == right_type))))
1038 left_operand = TREE_OPERAND (left_operand, 0);
1039 left_type = TREE_TYPE (left_operand);
1042 /* If a class-wide type may be involved, force use of the RHS type. */
1043 if ((TREE_CODE (right_type) == RECORD_TYPE
1044 || TREE_CODE (right_type) == UNION_TYPE)
1045 && TYPE_ALIGN_OK (right_type))
1046 operation_type = right_type;
1048 /* If we are copying between padded objects with compatible types, use
1049 the padded view of the objects, this is very likely more efficient.
1050 Likewise for a padded object that is assigned a constructor, if we
1051 can convert the constructor to the inner type, to avoid putting a
1052 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
1053 actually copied anything. */
1054 else if (TYPE_IS_PADDING_P (left_type)
1055 && TREE_CONSTANT (TYPE_SIZE (left_type))
1056 && ((TREE_CODE (right_operand) == COMPONENT_REF
1057 && TYPE_MAIN_VARIANT (left_type)
1058 == TYPE_MAIN_VARIANT (operand_type (right_operand)))
1059 || (TREE_CODE (right_operand) == CONSTRUCTOR
1060 && !CONTAINS_PLACEHOLDER_P
1061 (DECL_SIZE (TYPE_FIELDS (left_type)))))
1062 && !integer_zerop (TYPE_SIZE (right_type)))
1064 /* We make an exception for a BLKmode type padding a non-BLKmode
1065 inner type and do the conversion of the LHS right away, since
1066 unchecked_convert wouldn't do it properly. */
1067 if (TYPE_MODE (left_type) == BLKmode
1068 && TYPE_MODE (right_type) != BLKmode
1069 && TREE_CODE (right_operand) != CONSTRUCTOR)
1071 operation_type = right_type;
1072 left_operand = convert (operation_type, left_operand);
1073 left_type = operation_type;
1075 else
1076 operation_type = left_type;
1079 /* If we have a call to a function that returns with variable size, use
1080 the RHS type in case we want to use the return slot optimization. */
1081 else if (TREE_CODE (right_operand) == CALL_EXPR
1082 && return_type_with_variable_size_p (right_type))
1083 operation_type = right_type;
1085 /* Find the best type to use for copying between aggregate types. */
1086 else if (((TREE_CODE (left_type) == ARRAY_TYPE
1087 && TREE_CODE (right_type) == ARRAY_TYPE)
1088 || (TREE_CODE (left_type) == RECORD_TYPE
1089 && TREE_CODE (right_type) == RECORD_TYPE))
1090 && (best_type = find_common_type (left_type, right_type)))
1091 operation_type = best_type;
1093 /* Otherwise use the LHS type. */
1094 else
1095 operation_type = left_type;
1097 /* Ensure everything on the LHS is valid. If we have a field reference,
1098 strip anything that get_inner_reference can handle. Then remove any
1099 conversions between types having the same code and mode. And mark
1100 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
1101 either an INDIRECT_REF, a NULL_EXPR, a SAVE_EXPR or a DECL node. */
1102 result = left_operand;
1103 while (true)
1105 tree restype = TREE_TYPE (result);
1107 if (TREE_CODE (result) == COMPONENT_REF
1108 || TREE_CODE (result) == ARRAY_REF
1109 || TREE_CODE (result) == ARRAY_RANGE_REF)
1110 while (handled_component_p (result))
1111 result = TREE_OPERAND (result, 0);
1113 else if (TREE_CODE (result) == REALPART_EXPR
1114 || TREE_CODE (result) == IMAGPART_EXPR
1115 || (CONVERT_EXPR_P (result)
1116 && (((TREE_CODE (restype)
1117 == TREE_CODE (operand_type (result))
1118 && TYPE_MODE (restype)
1119 == TYPE_MODE (operand_type (result))))
1120 || TYPE_ALIGN_OK (restype))))
1121 result = TREE_OPERAND (result, 0);
1123 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
1125 TREE_ADDRESSABLE (result) = 1;
1126 result = TREE_OPERAND (result, 0);
1129 else
1130 break;
1133 gcc_assert (INDIRECT_REF_P (result)
1134 || TREE_CODE (result) == NULL_EXPR
1135 || TREE_CODE (result) == SAVE_EXPR
1136 || DECL_P (result));
1138 /* Convert the right operand to the operation type unless it is
1139 either already of the correct type or if the type involves a
1140 placeholder, since the RHS may not have the same record type. */
1141 if (operation_type != right_type
1142 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
1144 right_operand = convert (operation_type, right_operand);
1145 right_type = operation_type;
1148 /* If the left operand is not of the same type as the operation
1149 type, wrap it up in a VIEW_CONVERT_EXPR. */
1150 if (left_type != operation_type)
1151 left_operand = unchecked_convert (operation_type, left_operand, false);
1153 has_side_effects = true;
1154 modulus = NULL_TREE;
1155 break;
1157 case ARRAY_REF:
1158 if (!operation_type)
1159 operation_type = TREE_TYPE (left_type);
1161 /* ... fall through ... */
1163 case ARRAY_RANGE_REF:
1164 /* First look through conversion between type variants. Note that
1165 this changes neither the operation type nor the type domain. */
1166 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
1167 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
1168 == TYPE_MAIN_VARIANT (left_type))
1170 left_operand = TREE_OPERAND (left_operand, 0);
1171 left_type = TREE_TYPE (left_operand);
1174 /* For a range, make sure the element type is consistent. */
1175 if (op_code == ARRAY_RANGE_REF
1176 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
1178 operation_type = copy_type (operation_type);
1179 TREE_TYPE (operation_type) = TREE_TYPE (left_type);
1181 /* Declare it now since it will never be declared otherwise. This
1182 is necessary to ensure that its subtrees are properly marked. */
1183 create_type_decl (TYPE_NAME (operation_type), operation_type, true,
1184 false, Empty);
1187 /* Then convert the right operand to its base type. This will prevent
1188 unneeded sign conversions when sizetype is wider than integer. */
1189 right_operand = convert (right_base_type, right_operand);
1190 right_operand = convert_to_index_type (right_operand);
1191 modulus = NULL_TREE;
1192 break;
1194 case TRUTH_ANDIF_EXPR:
1195 case TRUTH_ORIF_EXPR:
1196 case TRUTH_AND_EXPR:
1197 case TRUTH_OR_EXPR:
1198 case TRUTH_XOR_EXPR:
1199 gcc_checking_assert
1200 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1201 operation_type = left_base_type;
1202 left_operand = convert (operation_type, left_operand);
1203 right_operand = convert (operation_type, right_operand);
1204 break;
1206 case GE_EXPR:
1207 case LE_EXPR:
1208 case GT_EXPR:
1209 case LT_EXPR:
1210 case EQ_EXPR:
1211 case NE_EXPR:
1212 gcc_checking_assert
1213 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1214 /* If either operand is a NULL_EXPR, just return a new one. */
1215 if (TREE_CODE (left_operand) == NULL_EXPR)
1216 return build2 (op_code, result_type,
1217 build1 (NULL_EXPR, integer_type_node,
1218 TREE_OPERAND (left_operand, 0)),
1219 integer_zero_node);
1221 else if (TREE_CODE (right_operand) == NULL_EXPR)
1222 return build2 (op_code, result_type,
1223 build1 (NULL_EXPR, integer_type_node,
1224 TREE_OPERAND (right_operand, 0)),
1225 integer_zero_node);
1227 /* If either object is a justified modular types, get the
1228 fields from within. */
1229 if (TREE_CODE (left_type) == RECORD_TYPE
1230 && TYPE_JUSTIFIED_MODULAR_P (left_type))
1232 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
1233 left_operand);
1234 left_type = TREE_TYPE (left_operand);
1235 left_base_type = get_base_type (left_type);
1238 if (TREE_CODE (right_type) == RECORD_TYPE
1239 && TYPE_JUSTIFIED_MODULAR_P (right_type))
1241 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
1242 right_operand);
1243 right_type = TREE_TYPE (right_operand);
1244 right_base_type = get_base_type (right_type);
1247 /* If both objects are arrays, compare them specially. */
1248 if ((TREE_CODE (left_type) == ARRAY_TYPE
1249 || (TREE_CODE (left_type) == INTEGER_TYPE
1250 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
1251 && (TREE_CODE (right_type) == ARRAY_TYPE
1252 || (TREE_CODE (right_type) == INTEGER_TYPE
1253 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1255 if (op_code == EQ_EXPR || op_code == NE_EXPR)
1257 result
1258 = compare_arrays_for_equality (input_location, result_type,
1259 left_operand, right_operand);
1260 if (op_code == NE_EXPR)
1261 result = invert_truthvalue_loc (input_location, result);
1264 else
1266 /* Swap the operands to canonicalize to LT_EXPR or GE_EXPR. */
1267 if (op_code == GT_EXPR || op_code == LE_EXPR)
1268 result
1269 = compare_arrays_for_ordering (input_location, result_type,
1270 right_operand, left_operand);
1272 else
1273 result
1274 = compare_arrays_for_ordering (input_location, result_type,
1275 left_operand, right_operand);
1277 /* GE_EXPR is (not LT_EXPR) for discrete array types. */
1278 if (op_code == GE_EXPR || op_code == LE_EXPR)
1279 result = invert_truthvalue_loc (input_location, result);
1282 return result;
1285 /* Otherwise, the base types must be the same, unless they are both (fat)
1286 pointer types or record types. In the latter case, use the best type
1287 and convert both operands to that type. */
1288 if (left_base_type != right_base_type)
1290 if (TYPE_IS_FAT_POINTER_P (left_base_type)
1291 && TYPE_IS_FAT_POINTER_P (right_base_type))
1293 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1294 == TYPE_MAIN_VARIANT (right_base_type));
1295 best_type = left_base_type;
1298 else if (POINTER_TYPE_P (left_base_type)
1299 && POINTER_TYPE_P (right_base_type))
1301 tree left_ref_type = TREE_TYPE (left_base_type);
1302 tree right_ref_type = TREE_TYPE (right_base_type);
1304 /* Anonymous access types in Ada 2005 may point to compatible
1305 object subtypes or function types in the language sense. */
1306 gcc_assert (FUNCTION_POINTER_TYPE_P (left_ref_type)
1307 == FUNCTION_POINTER_TYPE_P (right_ref_type));
1308 best_type = left_base_type;
1311 else if (TREE_CODE (left_base_type) == RECORD_TYPE
1312 && TREE_CODE (right_base_type) == RECORD_TYPE)
1314 /* The only way this is permitted is if both types have the same
1315 name. In that case, one of them must not be self-referential.
1316 Use it as the best type. Even better with a fixed size. */
1317 gcc_assert (TYPE_NAME (left_base_type)
1318 && TYPE_NAME (left_base_type)
1319 == TYPE_NAME (right_base_type));
1321 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1322 best_type = left_base_type;
1323 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1324 best_type = right_base_type;
1325 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1326 best_type = left_base_type;
1327 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1328 best_type = right_base_type;
1329 else
1330 gcc_unreachable ();
1333 else
1334 gcc_unreachable ();
1336 left_operand = convert (best_type, left_operand);
1337 right_operand = convert (best_type, right_operand);
1339 else
1341 left_operand = convert (left_base_type, left_operand);
1342 right_operand = convert (right_base_type, right_operand);
1345 /* If both objects are fat pointers, compare them specially. */
1346 if (TYPE_IS_FAT_POINTER_P (left_base_type))
1348 result
1349 = compare_fat_pointers (input_location,
1350 result_type, left_operand, right_operand);
1351 if (op_code == NE_EXPR)
1352 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1353 else
1354 gcc_assert (op_code == EQ_EXPR);
1356 return result;
1359 modulus = NULL_TREE;
1360 break;
1362 case LSHIFT_EXPR:
1363 case RSHIFT_EXPR:
1364 case LROTATE_EXPR:
1365 case RROTATE_EXPR:
1366 /* The RHS of a shift can be any type. Also, ignore any modulus
1367 (we used to abort, but this is needed for unchecked conversion
1368 to modular types). Otherwise, processing is the same as normal. */
1369 gcc_assert (operation_type == left_base_type);
1370 modulus = NULL_TREE;
1371 left_operand = convert (operation_type, left_operand);
1372 break;
1374 case BIT_AND_EXPR:
1375 case BIT_IOR_EXPR:
1376 case BIT_XOR_EXPR:
1377 /* For binary modulus, if the inputs are in range, so are the
1378 outputs. */
1379 if (modulus && integer_pow2p (modulus))
1380 modulus = NULL_TREE;
1381 goto common;
1383 case COMPLEX_EXPR:
1384 gcc_assert (TREE_TYPE (result_type) == left_base_type
1385 && TREE_TYPE (result_type) == right_base_type);
1386 left_operand = convert (left_base_type, left_operand);
1387 right_operand = convert (right_base_type, right_operand);
1388 break;
1390 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1391 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1392 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1393 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1394 /* These always produce results lower than either operand. */
1395 modulus = NULL_TREE;
1396 goto common;
1398 case POINTER_PLUS_EXPR:
1399 gcc_assert (operation_type == left_base_type
1400 && sizetype == right_base_type);
1401 left_operand = convert (operation_type, left_operand);
1402 right_operand = convert (sizetype, right_operand);
1403 break;
1405 case PLUS_NOMOD_EXPR:
1406 case MINUS_NOMOD_EXPR:
1407 if (op_code == PLUS_NOMOD_EXPR)
1408 op_code = PLUS_EXPR;
1409 else
1410 op_code = MINUS_EXPR;
1411 modulus = NULL_TREE;
1413 /* ... fall through ... */
1415 case PLUS_EXPR:
1416 case MINUS_EXPR:
1417 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1418 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1419 these types but can generate addition/subtraction for Succ/Pred. */
1420 if (operation_type
1421 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1422 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1423 operation_type = left_base_type = right_base_type
1424 = gnat_type_for_mode (TYPE_MODE (operation_type),
1425 TYPE_UNSIGNED (operation_type));
1427 /* ... fall through ... */
1429 default:
1430 common:
1431 /* The result type should be the same as the base types of the
1432 both operands (and they should be the same). Convert
1433 everything to the result type. */
1435 gcc_assert (operation_type == left_base_type
1436 && left_base_type == right_base_type);
1437 left_operand = convert (operation_type, left_operand);
1438 right_operand = convert (operation_type, right_operand);
1441 if (modulus && !integer_pow2p (modulus))
1443 result = nonbinary_modular_operation (op_code, operation_type,
1444 left_operand, right_operand);
1445 modulus = NULL_TREE;
1447 /* If either operand is a NULL_EXPR, just return a new one. */
1448 else if (TREE_CODE (left_operand) == NULL_EXPR)
1449 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1450 else if (TREE_CODE (right_operand) == NULL_EXPR)
1451 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1452 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1454 result = build4 (op_code, operation_type, left_operand, right_operand,
1455 NULL_TREE, NULL_TREE);
1456 if (!no_fold)
1457 result = fold (result);
1459 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1460 result = build2 (op_code, void_type_node, left_operand, right_operand);
1461 else if (no_fold)
1462 result = build2 (op_code, operation_type, left_operand, right_operand);
1463 else
1464 result
1465 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1467 if (TREE_CONSTANT (result))
1469 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1471 if (TYPE_VOLATILE (operation_type))
1472 TREE_THIS_VOLATILE (result) = 1;
1474 else if (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand))
1475 TREE_CONSTANT (result) = 1;
1477 if (has_side_effects)
1478 TREE_SIDE_EFFECTS (result) = 1;
1480 /* If we are working with modular types, perform the MOD operation
1481 if something above hasn't eliminated the need for it. */
1482 if (modulus)
1484 modulus = convert (operation_type, modulus);
1485 if (no_fold)
1486 result = build2 (FLOOR_MOD_EXPR, operation_type, result, modulus);
1487 else
1488 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result, modulus);
1491 if (result_type && result_type != operation_type)
1492 result = convert (result_type, result);
1494 return result;
1497 /* Similar, but for unary operations. */
1499 tree
1500 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1502 tree type = TREE_TYPE (operand);
1503 tree base_type = get_base_type (type);
1504 tree operation_type = result_type;
1505 tree result;
1507 if (operation_type
1508 && TREE_CODE (operation_type) == RECORD_TYPE
1509 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1510 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1512 if (operation_type
1513 && TREE_CODE (operation_type) == INTEGER_TYPE
1514 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1515 operation_type = get_base_type (operation_type);
1517 switch (op_code)
1519 case REALPART_EXPR:
1520 case IMAGPART_EXPR:
1521 if (!operation_type)
1522 result_type = operation_type = TREE_TYPE (type);
1523 else
1524 gcc_assert (result_type == TREE_TYPE (type));
1526 result = fold_build1 (op_code, operation_type, operand);
1527 break;
1529 case TRUTH_NOT_EXPR:
1530 gcc_checking_assert
1531 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1532 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1533 /* When not optimizing, fold the result as invert_truthvalue_loc
1534 doesn't fold the result of comparisons. This is intended to undo
1535 the trick used for boolean rvalues in gnat_to_gnu. */
1536 if (!optimize)
1537 result = fold (result);
1538 break;
1540 case ATTR_ADDR_EXPR:
1541 case ADDR_EXPR:
1542 switch (TREE_CODE (operand))
1544 case INDIRECT_REF:
1545 case UNCONSTRAINED_ARRAY_REF:
1546 result = TREE_OPERAND (operand, 0);
1548 /* Make sure the type here is a pointer, not a reference.
1549 GCC wants pointer types for function addresses. */
1550 if (!result_type)
1551 result_type = build_pointer_type (type);
1553 /* If the underlying object can alias everything, propagate the
1554 property since we are effectively retrieving the object. */
1555 if (POINTER_TYPE_P (TREE_TYPE (result))
1556 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1558 if (TREE_CODE (result_type) == POINTER_TYPE
1559 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1560 result_type
1561 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1562 TYPE_MODE (result_type),
1563 true);
1564 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1565 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1566 result_type
1567 = build_reference_type_for_mode (TREE_TYPE (result_type),
1568 TYPE_MODE (result_type),
1569 true);
1571 break;
1573 case NULL_EXPR:
1574 result = operand;
1575 TREE_TYPE (result) = type = build_pointer_type (type);
1576 break;
1578 case COMPOUND_EXPR:
1579 /* Fold a compound expression if it has unconstrained array type
1580 since the middle-end cannot handle it. But we don't it in the
1581 general case because it may introduce aliasing issues if the
1582 first operand is an indirect assignment and the second operand
1583 the corresponding address, e.g. for an allocator. However do
1584 it for a return value to expose it for later recognition. */
1585 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE
1586 || (VAR_P (TREE_OPERAND (operand, 1))
1587 && DECL_RETURN_VALUE_P (TREE_OPERAND (operand, 1))))
1589 result = build_unary_op (ADDR_EXPR, result_type,
1590 TREE_OPERAND (operand, 1));
1591 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1592 TREE_OPERAND (operand, 0), result);
1593 break;
1595 goto common;
1597 case ARRAY_REF:
1598 case ARRAY_RANGE_REF:
1599 case COMPONENT_REF:
1600 case BIT_FIELD_REF:
1601 /* If this is for 'Address, find the address of the prefix and add
1602 the offset to the field. Otherwise, do this the normal way. */
1603 if (op_code == ATTR_ADDR_EXPR)
1605 poly_int64 bitsize;
1606 poly_int64 bitpos;
1607 tree offset, inner;
1608 machine_mode mode;
1609 int unsignedp, reversep, volatilep;
1611 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1612 &mode, &unsignedp, &reversep,
1613 &volatilep);
1615 /* If INNER is a padding type whose field has a self-referential
1616 size, convert to that inner type. We know the offset is zero
1617 and we need to have that type visible. */
1618 if (type_is_padding_self_referential (TREE_TYPE (inner)))
1619 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1620 inner);
1622 /* Compute the offset as a byte offset from INNER. */
1623 if (!offset)
1624 offset = size_zero_node;
1626 offset
1627 = size_binop (PLUS_EXPR, offset,
1628 size_int (bits_to_bytes_round_down (bitpos)));
1630 /* Take the address of INNER, convert it to a pointer to our type
1631 and add the offset. */
1632 inner = build_unary_op (ADDR_EXPR,
1633 build_pointer_type (TREE_TYPE (operand)),
1634 inner);
1635 result = build_binary_op (POINTER_PLUS_EXPR, TREE_TYPE (inner),
1636 inner, offset);
1637 break;
1639 goto common;
1641 case CONSTRUCTOR:
1642 /* If this is just a constructor for a padded record, we can
1643 just take the address of the single field and convert it to
1644 a pointer to our type. */
1645 if (TYPE_IS_PADDING_P (type))
1647 result
1648 = build_unary_op (ADDR_EXPR,
1649 build_pointer_type (TREE_TYPE (operand)),
1650 CONSTRUCTOR_ELT (operand, 0)->value);
1651 break;
1653 goto common;
1655 case NOP_EXPR:
1656 if (AGGREGATE_TYPE_P (type)
1657 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1658 return build_unary_op (ADDR_EXPR, result_type,
1659 TREE_OPERAND (operand, 0));
1661 /* ... fallthru ... */
1663 case VIEW_CONVERT_EXPR:
1664 /* If this just a variant conversion or if the conversion doesn't
1665 change the mode, get the result type from this type and go down.
1666 This is needed for conversions of CONST_DECLs, to eventually get
1667 to the address of their CORRESPONDING_VARs. */
1668 if ((TYPE_MAIN_VARIANT (type)
1669 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1670 || (TYPE_MODE (type) != BLKmode
1671 && (TYPE_MODE (type)
1672 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1673 return build_unary_op (ADDR_EXPR,
1674 (result_type ? result_type
1675 : build_pointer_type (type)),
1676 TREE_OPERAND (operand, 0));
1677 goto common;
1679 case CONST_DECL:
1680 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1682 /* ... fall through ... */
1684 default:
1685 common:
1687 /* If we are taking the address of a padded record whose field
1688 contains a template, take the address of the field. */
1689 if (TYPE_IS_PADDING_P (type)
1690 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1691 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1693 type = TREE_TYPE (TYPE_FIELDS (type));
1694 operand = convert (type, operand);
1697 gnat_mark_addressable (operand);
1698 result = build_fold_addr_expr (operand);
1701 if (TREE_CONSTANT (operand) || staticp (operand))
1702 TREE_CONSTANT (result) = 1;
1704 break;
1706 case INDIRECT_REF:
1708 tree t = remove_conversions (operand, false);
1709 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1711 /* If TYPE is a thin pointer, either first retrieve the base if this
1712 is an expression with an offset built for the initialization of an
1713 object with an unconstrained nominal subtype, or else convert to
1714 the fat pointer. */
1715 if (TYPE_IS_THIN_POINTER_P (type))
1717 tree rec_type = TREE_TYPE (type);
1719 if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1720 && TREE_OPERAND (operand, 1)
1721 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1722 && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1724 operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1725 type = TREE_TYPE (operand);
1727 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1729 operand
1730 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1731 operand);
1732 type = TREE_TYPE (operand);
1736 /* If we want to refer to an unconstrained array, use the appropriate
1737 expression. But this will never survive down to the back-end. */
1738 if (TYPE_IS_FAT_POINTER_P (type))
1740 result = build1 (UNCONSTRAINED_ARRAY_REF,
1741 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1742 TREE_READONLY (result)
1743 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1746 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1747 else if (TREE_CODE (operand) == ADDR_EXPR)
1748 result = TREE_OPERAND (operand, 0);
1750 /* Otherwise, build and fold the indirect reference. */
1751 else
1753 result = build_fold_indirect_ref (operand);
1754 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1757 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1759 TREE_SIDE_EFFECTS (result) = 1;
1760 if (INDIRECT_REF_P (result))
1761 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1764 if ((INDIRECT_REF_P (result)
1765 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1766 && can_never_be_null)
1767 TREE_THIS_NOTRAP (result) = 1;
1769 break;
1772 case NEGATE_EXPR:
1773 case BIT_NOT_EXPR:
1775 tree modulus = ((operation_type
1776 && TREE_CODE (operation_type) == INTEGER_TYPE
1777 && TYPE_MODULAR_P (operation_type))
1778 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1779 int mod_pow2 = modulus && integer_pow2p (modulus);
1781 /* If this is a modular type, there are various possibilities
1782 depending on the operation and whether the modulus is a
1783 power of two or not. */
1785 if (modulus)
1787 gcc_assert (operation_type == base_type);
1788 operand = convert (operation_type, operand);
1790 /* The fastest in the negate case for binary modulus is
1791 the straightforward code; the TRUNC_MOD_EXPR below
1792 is an AND operation. */
1793 if (op_code == NEGATE_EXPR && mod_pow2)
1794 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1795 fold_build1 (NEGATE_EXPR, operation_type,
1796 operand),
1797 modulus);
1799 /* For nonbinary negate case, return zero for zero operand,
1800 else return the modulus minus the operand. If the modulus
1801 is a power of two minus one, we can do the subtraction
1802 as an XOR since it is equivalent and faster on most machines. */
1803 else if (op_code == NEGATE_EXPR && !mod_pow2)
1805 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1806 modulus,
1807 build_int_cst (operation_type,
1808 1))))
1809 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1810 operand, modulus);
1811 else
1812 result = fold_build2 (MINUS_EXPR, operation_type,
1813 modulus, operand);
1815 result = fold_build3 (COND_EXPR, operation_type,
1816 fold_build2 (NE_EXPR,
1817 boolean_type_node,
1818 operand,
1819 build_int_cst
1820 (operation_type, 0)),
1821 result, operand);
1823 else
1825 /* For the NOT cases, we need a constant equal to
1826 the modulus minus one. For a binary modulus, we
1827 XOR against the constant and subtract the operand from
1828 that constant for nonbinary modulus. */
1830 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1831 build_int_cst (operation_type, 1));
1833 if (mod_pow2)
1834 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1835 operand, cnst);
1836 else
1837 result = fold_build2 (MINUS_EXPR, operation_type,
1838 cnst, operand);
1841 break;
1845 /* ... fall through ... */
1847 default:
1848 gcc_assert (operation_type == base_type);
1849 result = fold_build1 (op_code, operation_type,
1850 convert (operation_type, operand));
1853 if (result_type && TREE_TYPE (result) != result_type)
1854 result = convert (result_type, result);
1856 return result;
1859 /* Similar, but for COND_EXPR. */
1861 tree
1862 build_cond_expr (tree result_type, tree condition_operand,
1863 tree true_operand, tree false_operand)
1865 bool addr_p = false;
1866 tree result;
1868 /* The front-end verified that result, true and false operands have
1869 same base type. Convert everything to the result type. */
1870 true_operand = convert (result_type, true_operand);
1871 false_operand = convert (result_type, false_operand);
1873 /* If the result type is unconstrained or variable-sized, take the address
1874 of the operands and then dereference the result. Likewise if the result
1875 type is passed by reference, because creating a temporary of this type is
1876 not allowed. */
1877 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1878 || type_contains_placeholder_p (result_type)
1879 || !TREE_CONSTANT (TYPE_SIZE (result_type))
1880 || TYPE_IS_BY_REFERENCE_P (result_type))
1882 result_type = build_pointer_type (result_type);
1883 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1884 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1885 addr_p = true;
1888 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1889 true_operand, false_operand);
1891 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1892 in both arms, make sure it gets evaluated by moving it ahead of the
1893 conditional expression. This is necessary because it is evaluated
1894 in only one place at run time and would otherwise be uninitialized
1895 in one of the arms. */
1896 true_operand = skip_simple_arithmetic (true_operand);
1897 false_operand = skip_simple_arithmetic (false_operand);
1899 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1900 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1902 if (addr_p)
1903 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1905 return result;
1908 /* Similar, but for COMPOUND_EXPR. */
1910 tree
1911 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1913 bool addr_p = false;
1914 tree result;
1916 /* If the result type is unconstrained, take the address of the operand and
1917 then dereference the result. Likewise if the result type is passed by
1918 reference, but this is natively handled in the gimplifier. */
1919 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1920 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1922 result_type = build_pointer_type (result_type);
1923 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1924 addr_p = true;
1927 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1928 expr_operand);
1930 if (addr_p)
1931 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1933 return result;
1936 /* Conveniently construct a function call expression. FNDECL names the
1937 function to be called, N is the number of arguments, and the "..."
1938 parameters are the argument expressions. Unlike build_call_expr
1939 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1941 tree
1942 build_call_n_expr (tree fndecl, int n, ...)
1944 va_list ap;
1945 tree fntype = TREE_TYPE (fndecl);
1946 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1948 va_start (ap, n);
1949 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1950 va_end (ap);
1951 return fn;
1954 /* Build a goto to LABEL for a raise, with an optional call to Local_Raise.
1955 MSG gives the exception's identity for the call to Local_Raise, if any. */
1957 static tree
1958 build_goto_raise (Entity_Id gnat_label, int msg)
1960 tree gnu_label = gnat_to_gnu_entity (gnat_label, NULL_TREE, false);
1961 tree gnu_result = build1 (GOTO_EXPR, void_type_node, gnu_label);
1962 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1964 /* If Local_Raise is present, build Local_Raise (Exception'Identity). */
1965 if (Present (local_raise))
1967 tree gnu_local_raise
1968 = gnat_to_gnu_entity (local_raise, NULL_TREE, false);
1969 tree gnu_exception_entity
1970 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, false);
1971 tree gnu_call
1972 = build_call_n_expr (gnu_local_raise, 1,
1973 build_unary_op (ADDR_EXPR, NULL_TREE,
1974 gnu_exception_entity));
1975 gnu_result
1976 = build2 (COMPOUND_EXPR, void_type_node, gnu_call, gnu_result);
1979 TREE_USED (gnu_label) = 1;
1980 return gnu_result;
1983 /* Expand the SLOC of GNAT_NODE, if present, into tree location information
1984 pointed to by FILENAME, LINE and COL. Fall back to the current location
1985 if GNAT_NODE is absent or has no SLOC. */
1987 static void
1988 expand_sloc (Node_Id gnat_node, tree *filename, tree *line, tree *col)
1990 const char *str;
1991 int line_number, column_number;
1993 if (Debug_Flag_NN || Exception_Locations_Suppressed)
1995 str = "";
1996 line_number = 0;
1997 column_number = 0;
1999 else if (Present (gnat_node) && Sloc (gnat_node) != No_Location)
2001 str = Get_Name_String
2002 (Debug_Source_Name (Get_Source_File_Index (Sloc (gnat_node))));
2003 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
2004 column_number = Get_Column_Number (Sloc (gnat_node));
2006 else
2008 str = lbasename (LOCATION_FILE (input_location));
2009 line_number = LOCATION_LINE (input_location);
2010 column_number = LOCATION_COLUMN (input_location);
2013 const int len = strlen (str);
2014 *filename = build_string (len, str);
2015 TREE_TYPE (*filename) = build_array_type (char_type_node,
2016 build_index_type (size_int (len)));
2017 *line = build_int_cst (NULL_TREE, line_number);
2018 if (col)
2019 *col = build_int_cst (NULL_TREE, column_number);
2022 /* Build a call to a function that raises an exception and passes file name
2023 and line number, if requested. MSG says which exception function to call.
2024 GNAT_NODE is the node conveying the source location for which the error
2025 should be signaled, or Empty in which case the error is signaled for the
2026 current location. KIND says which kind of exception node this is for,
2027 among N_Raise_{Constraint,Storage,Program}_Error. */
2029 tree
2030 build_call_raise (int msg, Node_Id gnat_node, char kind)
2032 Entity_Id gnat_label = get_exception_label (kind);
2033 tree fndecl = gnat_raise_decls[msg];
2034 tree filename, line;
2036 /* If this is to be done as a goto, handle that case. */
2037 if (Present (gnat_label))
2038 return build_goto_raise (gnat_label, msg);
2040 expand_sloc (gnat_node, &filename, &line, NULL);
2042 return
2043 build_call_n_expr (fndecl, 2,
2044 build1 (ADDR_EXPR,
2045 build_pointer_type (char_type_node),
2046 filename),
2047 line);
2050 /* Similar to build_call_raise, with extra information about the column
2051 where the check failed. */
2053 tree
2054 build_call_raise_column (int msg, Node_Id gnat_node, char kind)
2056 Entity_Id gnat_label = get_exception_label (kind);
2057 tree fndecl = gnat_raise_decls_ext[msg];
2058 tree filename, line, col;
2060 /* If this is to be done as a goto, handle that case. */
2061 if (Present (gnat_label))
2062 return build_goto_raise (gnat_label, msg);
2064 expand_sloc (gnat_node, &filename, &line, &col);
2066 return
2067 build_call_n_expr (fndecl, 3,
2068 build1 (ADDR_EXPR,
2069 build_pointer_type (char_type_node),
2070 filename),
2071 line, col);
2074 /* Similar to build_call_raise_column, for an index or range check exception ,
2075 with extra information of the form "INDEX out of range FIRST..LAST". */
2077 tree
2078 build_call_raise_range (int msg, Node_Id gnat_node, char kind,
2079 tree index, tree first, tree last)
2081 Entity_Id gnat_label = get_exception_label (kind);
2082 tree fndecl = gnat_raise_decls_ext[msg];
2083 tree filename, line, col;
2085 /* If this is to be done as a goto, handle that case. */
2086 if (Present (gnat_label))
2087 return build_goto_raise (gnat_label, msg);
2089 expand_sloc (gnat_node, &filename, &line, &col);
2091 return
2092 build_call_n_expr (fndecl, 6,
2093 build1 (ADDR_EXPR,
2094 build_pointer_type (char_type_node),
2095 filename),
2096 line, col,
2097 convert (integer_type_node, index),
2098 convert (integer_type_node, first),
2099 convert (integer_type_node, last));
2102 /* qsort comparer for the bit positions of two constructor elements
2103 for record components. */
2105 static int
2106 compare_elmt_bitpos (const void *rt1, const void *rt2)
2108 const constructor_elt * const elmt1 = (const constructor_elt *) rt1;
2109 const constructor_elt * const elmt2 = (const constructor_elt *) rt2;
2110 const_tree const field1 = elmt1->index;
2111 const_tree const field2 = elmt2->index;
2112 const int ret
2113 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
2115 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
2118 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
2120 tree
2121 gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
2123 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
2124 bool read_only = true;
2125 bool side_effects = false;
2126 tree result, obj, val;
2127 unsigned int n_elmts;
2129 /* Scan the elements to see if they are all constant or if any has side
2130 effects, to let us set global flags on the resulting constructor. Count
2131 the elements along the way for possible sorting purposes below. */
2132 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
2134 /* The predicate must be in keeping with output_constructor and, unlike
2135 initializer_constant_valid_p, we accept "&{...}" because we'll put
2136 the CONSTRUCTOR into the constant pool during gimplification. */
2137 if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
2138 || (TREE_CODE (type) == RECORD_TYPE
2139 && CONSTRUCTOR_BITFIELD_P (obj)
2140 && !initializer_constant_valid_for_bitfield_p (val))
2141 || (!initializer_constant_valid_p (val,
2142 TREE_TYPE (val),
2143 TYPE_REVERSE_STORAGE_ORDER (type))
2144 && !(TREE_CODE (val) == ADDR_EXPR
2145 && TREE_CODE (TREE_OPERAND (val, 0)) == CONSTRUCTOR
2146 && TREE_CONSTANT (TREE_OPERAND (val, 0)))))
2147 allconstant = false;
2149 if (!TREE_READONLY (val))
2150 read_only = false;
2152 if (TREE_SIDE_EFFECTS (val))
2153 side_effects = true;
2156 /* For record types with constant components only, sort field list
2157 by increasing bit position. This is necessary to ensure the
2158 constructor can be output as static data. */
2159 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
2160 v->qsort (compare_elmt_bitpos);
2162 result = build_constructor (type, v);
2163 CONSTRUCTOR_NO_CLEARING (result) = 1;
2164 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
2165 TREE_SIDE_EFFECTS (result) = side_effects;
2166 TREE_READONLY (result) = TYPE_READONLY (type) || read_only || allconstant;
2167 return result;
2170 /* Return a COMPONENT_REF to access FIELD in RECORD, or NULL_TREE if the field
2171 is not found in the record. Don't fold the result if NO_FOLD is true. */
2173 static tree
2174 build_simple_component_ref (tree record, tree field, bool no_fold)
2176 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (record));
2177 tree ref;
2179 /* The failure of this assertion will very likely come from a missing
2180 insertion of an explicit dereference. */
2181 gcc_assert (RECORD_OR_UNION_TYPE_P (type));
2183 /* The type must be frozen at this point. */
2184 gcc_assert (COMPLETE_TYPE_P (type));
2186 /* Try to fold a conversion from another record or union type unless the type
2187 contains a placeholder as it might be needed for a later substitution. */
2188 if (TREE_CODE (record) == VIEW_CONVERT_EXPR
2189 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (record, 0)))
2190 && !type_contains_placeholder_p (type))
2192 tree op = TREE_OPERAND (record, 0);
2194 /* If this is an unpadding operation, convert the underlying object to
2195 the unpadded type directly. */
2196 if (TYPE_IS_PADDING_P (type) && field == TYPE_FIELDS (type))
2197 return convert (TREE_TYPE (field), op);
2199 /* Otherwise try to access FIELD directly in the underlying type, but
2200 make sure that the form of the reference doesn't change too much;
2201 this can happen for an unconstrained bit-packed array type whose
2202 constrained form can be an integer type. */
2203 ref = build_simple_component_ref (op, field, no_fold);
2204 if (ref && TREE_CODE (TREE_TYPE (ref)) == TREE_CODE (TREE_TYPE (field)))
2205 return ref;
2208 /* If this field is not in the specified record, see if we can find a field
2209 in the specified record whose original field is the same as this one. */
2210 if (DECL_CONTEXT (field) != type)
2212 tree new_field;
2214 /* First loop through normal components. */
2215 for (new_field = TYPE_FIELDS (type);
2216 new_field;
2217 new_field = DECL_CHAIN (new_field))
2218 if (SAME_FIELD_P (field, new_field))
2219 break;
2221 /* Next, loop through DECL_INTERNAL_P components if we haven't found the
2222 component in the first search. Doing this search in two steps is
2223 required to avoid hidden homonymous fields in the _Parent field. */
2224 if (!new_field)
2225 for (new_field = TYPE_FIELDS (type);
2226 new_field;
2227 new_field = DECL_CHAIN (new_field))
2228 if (DECL_INTERNAL_P (new_field)
2229 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (new_field)))
2231 tree field_ref
2232 = build_simple_component_ref (record, new_field, no_fold);
2233 ref = build_simple_component_ref (field_ref, field, no_fold);
2234 if (ref)
2235 return ref;
2238 field = new_field;
2241 if (!field)
2242 return NULL_TREE;
2244 /* If the field's offset has overflowed, do not try to access it, as doing
2245 so may trigger sanity checks deeper in the back-end. Note that we don't
2246 need to warn since this will be done on trying to declare the object. */
2247 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
2248 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
2249 return build1 (NULL_EXPR, TREE_TYPE (field),
2250 build_call_raise (SE_Object_Too_Large, Empty,
2251 N_Raise_Storage_Error));
2253 ref = build3 (COMPONENT_REF, TREE_TYPE (field), record, field, NULL_TREE);
2255 if (TREE_READONLY (record)
2256 || TREE_READONLY (field)
2257 || TYPE_READONLY (type))
2258 TREE_READONLY (ref) = 1;
2260 if (TREE_THIS_VOLATILE (record)
2261 || TREE_THIS_VOLATILE (field)
2262 || TYPE_VOLATILE (type))
2263 TREE_THIS_VOLATILE (ref) = 1;
2265 if (no_fold)
2266 return ref;
2268 /* The generic folder may punt in this case because the inner array type
2269 can be self-referential, but folding is in fact not problematic. */
2270 if (TREE_CODE (record) == CONSTRUCTOR
2271 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record)))
2273 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (record);
2274 unsigned HOST_WIDE_INT idx;
2275 tree index, value;
2276 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
2277 if (index == field)
2278 return value;
2279 return ref;
2282 return fold (ref);
2285 /* Likewise, but return NULL_EXPR and generate a Program_Error if the
2286 field is not found in the record. */
2288 tree
2289 build_component_ref (tree record, tree field, bool no_fold)
2291 tree ref = build_simple_component_ref (record, field, no_fold);
2292 if (ref)
2293 return ref;
2295 /* The missing field should have been detected in the front-end. */
2296 gigi_checking_assert (false);
2298 /* Assume this is an invalid user field so raise Program_Error. */
2299 return build1 (NULL_EXPR, TREE_TYPE (field),
2300 build_call_raise (PE_Explicit_Raise, Empty,
2301 N_Raise_Program_Error));
2304 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2305 identically. Process the case where a GNAT_PROC to call is provided. */
2307 static inline tree
2308 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2309 Entity_Id gnat_proc, Entity_Id gnat_pool)
2311 tree gnu_proc = gnat_to_gnu (gnat_proc);
2312 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2314 tree gnu_call;
2316 /* A storage pool's underlying type is a record type for both predefined
2317 storage pools and GNAT simple storage pools. The return and secondary
2318 stacks use the same mechanism, but their pool object is an integer. */
2319 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2321 /* The size is the third parameter; the alignment is the
2322 same type. */
2323 Entity_Id gnat_size_type
2324 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2325 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2327 tree gnu_pool = gnat_to_gnu (gnat_pool);
2328 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2330 gnu_size = convert (gnu_size_type, gnu_size);
2331 gnu_align = convert (gnu_size_type, gnu_align);
2333 /* The first arg is always the address of the storage pool; next
2334 comes the address of the object, for a deallocator, then the
2335 size and alignment. */
2336 if (gnu_obj)
2337 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2338 gnu_size, gnu_align);
2339 else
2340 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2341 gnu_size, gnu_align);
2344 else
2346 /* The size is the second parameter. */
2347 Entity_Id gnat_size_type
2348 = Etype (Next_Formal (First_Formal (gnat_proc)));
2349 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2351 /* Deallocation is not supported for return and secondary stacks. */
2352 gcc_assert (!gnu_obj);
2354 gnu_size = convert (gnu_size_type, gnu_size);
2355 gnu_align = convert (gnu_size_type, gnu_align);
2357 if (DECL_BUILT_IN_CLASS (gnu_proc) == BUILT_IN_FRONTEND
2358 && DECL_FE_FUNCTION_CODE (gnu_proc) == BUILT_IN_RETURN_SLOT)
2360 /* This must be a function that returns by invisible reference. */
2361 gcc_assert (current_function_decl
2362 && TREE_ADDRESSABLE (TREE_TYPE (current_function_decl)));
2363 tree gnu_ret_size;
2365 gnu_call = DECL_RESULT (current_function_decl);
2367 /* The allocation has already been done by the caller so we check that
2368 we are not going to overflow the return slot. */
2369 if (TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl)))
2370 gnu_ret_size
2371 = TYPE_SIZE_UNIT
2372 (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (TREE_TYPE (gnu_call)))));
2373 else
2374 gnu_ret_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (gnu_call)));
2376 gnu_call
2377 = fold_build3 (COND_EXPR, TREE_TYPE (gnu_call),
2378 fold_build2 (LE_EXPR, boolean_type_node,
2379 fold_convert (sizetype, gnu_size),
2380 gnu_ret_size),
2381 gnu_call,
2382 build_call_raise (PE_Explicit_Raise, Empty,
2383 N_Raise_Program_Error));
2386 else
2387 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_size, gnu_align);
2390 return gnu_call;
2393 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2394 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2395 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2396 latter offers. */
2398 static inline tree
2399 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2401 /* When the DATA_TYPE alignment is stricter than what malloc offers
2402 (super-aligned case), we allocate an "aligning" wrapper type and return
2403 the address of its single data field with the malloc's return value
2404 stored just in front. */
2406 unsigned int data_align = TYPE_ALIGN (data_type);
2407 unsigned int system_allocator_alignment
2408 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2410 tree aligning_type
2411 = ((data_align > system_allocator_alignment)
2412 ? make_aligning_type (data_type, data_align, data_size,
2413 system_allocator_alignment,
2414 POINTER_SIZE / BITS_PER_UNIT,
2415 gnat_node)
2416 : NULL_TREE);
2418 tree size_to_malloc
2419 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2421 tree malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2423 Check_Restriction_No_Dependence_On_System (Name_Memory, gnat_node);
2425 if (aligning_type)
2427 /* Latch malloc's return value and get a pointer to the aligning field
2428 first. */
2429 tree storage_ptr = gnat_protect_expr (malloc_ptr);
2431 tree aligning_record_addr
2432 = convert (build_pointer_type (aligning_type), storage_ptr);
2434 tree aligning_record
2435 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2437 tree aligning_field
2438 = build_component_ref (aligning_record, TYPE_FIELDS (aligning_type),
2439 false);
2441 tree aligning_field_addr
2442 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2444 /* Then arrange to store the allocator's return value ahead
2445 and return. */
2446 tree storage_ptr_slot_addr
2447 = build_binary_op (POINTER_PLUS_EXPR, ptr_type_node,
2448 convert (ptr_type_node, aligning_field_addr),
2449 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2450 / BITS_PER_UNIT));
2452 tree storage_ptr_slot
2453 = build_unary_op (INDIRECT_REF, NULL_TREE,
2454 convert (build_pointer_type (ptr_type_node),
2455 storage_ptr_slot_addr));
2457 return
2458 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2459 build_binary_op (INIT_EXPR, NULL_TREE,
2460 storage_ptr_slot, storage_ptr),
2461 aligning_field_addr);
2463 else
2464 return malloc_ptr;
2467 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2468 designated by DATA_PTR using the __gnat_free entry point. */
2470 static inline tree
2471 maybe_wrap_free (tree data_ptr, tree data_type, Node_Id gnat_node)
2473 /* In the regular alignment case, we pass the data pointer straight to free.
2474 In the superaligned case, we need to retrieve the initial allocator
2475 return value, stored in front of the data block at allocation time. */
2477 unsigned int data_align = TYPE_ALIGN (data_type);
2478 unsigned int system_allocator_alignment
2479 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2481 tree free_ptr;
2483 Check_Restriction_No_Dependence_On_System (Name_Memory, gnat_node);
2485 if (data_align > system_allocator_alignment)
2487 /* DATA_FRONT_PTR (void *)
2488 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2489 tree data_front_ptr
2490 = build_binary_op
2491 (POINTER_PLUS_EXPR, ptr_type_node,
2492 convert (ptr_type_node, data_ptr),
2493 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2495 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2496 free_ptr
2497 = build_unary_op
2498 (INDIRECT_REF, NULL_TREE,
2499 convert (build_pointer_type (ptr_type_node), data_front_ptr));
2501 else
2502 free_ptr = data_ptr;
2504 return build_call_n_expr (free_decl, 1, free_ptr);
2507 /* Build a GCC tree to call an allocation or deallocation function.
2508 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2509 generate an allocation.
2511 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2512 object type, used to determine the to-be-honored address alignment.
2513 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2514 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2515 to provide an error location for restriction violation messages. */
2517 tree
2518 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2519 Entity_Id gnat_proc, Entity_Id gnat_pool,
2520 Node_Id gnat_node)
2522 /* Explicit proc to call ? This one is assumed to deal with the type
2523 alignment constraints. */
2524 if (Present (gnat_proc))
2525 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2526 gnat_proc, gnat_pool);
2528 /* Otherwise, object to "free" or "malloc" with possible special processing
2529 for alignments stricter than what the default allocator honors. */
2530 else if (gnu_obj)
2531 return maybe_wrap_free (gnu_obj, gnu_type, gnat_node);
2532 else
2534 /* Assert that we no longer can be called with this special pool. */
2535 gcc_assert (gnat_pool != -1);
2537 /* Check that we aren't violating the associated restriction. */
2538 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2540 Check_No_Implicit_Heap_Alloc (gnat_node);
2541 if (Has_Task (Etype (gnat_node)))
2542 Check_No_Implicit_Task_Alloc (gnat_node);
2543 if (Has_Protected (Etype (gnat_node)))
2544 Check_No_Implicit_Protected_Alloc (gnat_node);
2546 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2550 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2551 initial value is INIT, if INIT is nonzero. Convert the expression to
2552 RESULT_TYPE, which must be some pointer type, and return the result.
2554 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2555 the storage pool to use. GNAT_NODE is used to provide an error
2556 location for restriction violation messages. If IGNORE_INIT_TYPE is
2557 true, ignore the type of INIT for the purpose of determining the size;
2558 this will cause the maximum size to be allocated if TYPE is of
2559 self-referential size. */
2561 tree
2562 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2563 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2565 const bool pool_is_storage_model
2566 = Present (gnat_pool)
2567 && Has_Storage_Model_Type_Aspect (Etype (gnat_pool))
2568 && Present (Storage_Model_Copy_To (gnat_pool));
2569 tree size, storage, storage_deref, storage_init;
2571 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2572 if (init && TREE_CODE (init) == NULL_EXPR)
2573 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2575 /* If we are just annotating types, also return a NULL_EXPR. */
2576 else if (type_annotate_only)
2577 return build1 (NULL_EXPR, result_type,
2578 build_call_raise (CE_Range_Check_Failed, gnat_node,
2579 N_Raise_Constraint_Error));
2581 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2582 else if (init && TREE_CODE (init) == COND_EXPR)
2583 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2584 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2585 gnat_proc, gnat_pool, gnat_node,
2586 ignore_init_type),
2587 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2588 gnat_proc, gnat_pool, gnat_node,
2589 ignore_init_type));
2591 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2592 sizes of the object and its template. Allocate the whole thing and
2593 fill in the parts that are known. */
2594 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2596 tree storage_type
2597 = build_unc_object_type_from_ptr (result_type, type,
2598 get_identifier ("ALLOC"), false);
2599 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2600 tree storage_ptr_type = build_pointer_type (storage_type);
2601 tree lhs, rhs;
2603 size = TYPE_SIZE_UNIT (storage_type);
2604 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, init);
2606 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2607 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2608 size = size_int (-1);
2610 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2611 gnat_proc, gnat_pool, gnat_node);
2612 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2613 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2614 TREE_THIS_NOTRAP (storage_deref) = 1;
2616 /* If there is an initializing expression, then make a constructor for
2617 the entire object including the bounds and copy it into the object.
2618 If there is no initializing expression, just set the bounds. Note
2619 that, if we have a storage model, we need to copy the initializing
2620 expression separately from the bounds. */
2621 if (init && !pool_is_storage_model)
2623 vec<constructor_elt, va_gc> *v;
2624 vec_alloc (v, 2);
2626 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2627 build_template (template_type, type, init));
2628 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2629 init);
2631 lhs = storage_deref;
2632 rhs = gnat_build_constructor (storage_type, v);
2634 else
2636 lhs = build_component_ref (storage_deref, TYPE_FIELDS (storage_type),
2637 false);
2638 rhs = build_template (template_type, type, init);
2641 if (pool_is_storage_model)
2643 storage_init = build_storage_model_store (gnat_pool, lhs, rhs);
2644 if (init)
2646 start_stmt_group ();
2647 add_stmt (storage_init);
2649 = build_component_ref (storage_deref,
2650 DECL_CHAIN (TYPE_FIELDS (storage_type)),
2651 false);
2652 rhs = init;
2653 size = TYPE_SIZE_UNIT (TREE_TYPE (lhs));
2654 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, init);
2655 tree t = build_storage_model_store (gnat_pool, lhs, rhs, size);
2656 add_stmt (t);
2657 storage_init = end_stmt_group ();
2660 else
2661 storage_init = build_binary_op (INIT_EXPR, NULL_TREE, lhs, rhs);
2663 return build2 (COMPOUND_EXPR, result_type,
2664 storage_init, convert (result_type, storage));
2667 size = TYPE_SIZE_UNIT (type);
2669 /* If we have an initializing expression, see if its size is simpler
2670 than the size from the type. */
2671 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2672 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2673 || CONTAINS_PLACEHOLDER_P (size)))
2674 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2676 /* If the size is still self-referential, reference the initializing
2677 expression, if it is present. If not, this must have been a call
2678 to allocate a library-level object, in which case we just use the
2679 maximum size. */
2680 if (!ignore_init_type && init)
2681 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, init);
2682 else if (CONTAINS_PLACEHOLDER_P (size))
2683 size = max_size (size, true);
2685 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2686 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2687 size = size_int (-1);
2689 storage = convert (result_type,
2690 build_call_alloc_dealloc (NULL_TREE, size, type,
2691 gnat_proc, gnat_pool,
2692 gnat_node));
2694 /* If we have an initial value, protect the new address, assign the value
2695 and return the address with a COMPOUND_EXPR. */
2696 if (init)
2698 storage = gnat_protect_expr (storage);
2699 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2700 TREE_THIS_NOTRAP (storage_deref) = 1;
2701 if (pool_is_storage_model)
2702 storage_init
2703 = build_storage_model_store (gnat_pool, storage_deref, init, size);
2704 else
2705 storage_init
2706 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2707 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2710 return storage;
2713 /* Build a call to a copy procedure of a storage model given by an object.
2714 DEST, SRC and SIZE are as for a call to memcpy. GNAT_SMO is the entity
2715 for the storage model object and COPY_TO says which procedure to use. */
2717 static tree
2718 build_storage_model_copy (Entity_Id gnat_smo, tree dest, tree src, tree size,
2719 bool copy_to)
2721 const Entity_Id gnat_copy_proc
2722 = copy_to
2723 ? Storage_Model_Copy_To (gnat_smo)
2724 : Storage_Model_Copy_From (gnat_smo);
2725 tree gnu_copy_proc = gnat_to_gnu (gnat_copy_proc);
2726 tree gnu_param_type_list = TYPE_ARG_TYPES (TREE_TYPE (gnu_copy_proc));
2727 tree t1 = TREE_VALUE (gnu_param_type_list);
2728 tree t2 = TREE_VALUE (TREE_CHAIN (gnu_param_type_list));
2729 tree t3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (gnu_param_type_list)));
2730 tree t4
2731 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (gnu_param_type_list))));
2733 return
2734 build_call_n_expr (gnu_copy_proc,
2736 build_unary_op (ADDR_EXPR, t1, gnat_to_gnu (gnat_smo)),
2737 build_unary_op (ADDR_EXPR, t2, dest),
2738 build_unary_op (ADDR_EXPR, t3, src),
2739 convert (t4, size));
2742 /* Build a load of SRC using the storage model of GNAT_SMO. */
2744 tree
2745 build_storage_model_load (Entity_Id gnat_smo, tree src)
2747 tree ret = build2 (LOAD_EXPR, TREE_TYPE (src), src, NULL_TREE);
2749 /* Unconstrained array references have no size so we need to store the
2750 storage object model for future processing by the machinery. */
2751 if (TREE_CODE (src) == UNCONSTRAINED_ARRAY_REF)
2752 TREE_OPERAND (ret, 1) = build_int_cst (integer_type_node, gnat_smo);
2753 else
2754 TREE_OPERAND (ret, 1) = build_storage_model_load (gnat_smo, src, src);
2756 return ret;
2759 /* Build a load of SRC into DEST using the storage model of GNAT_SMO.
2760 If SIZE is specified, use it, otherwise use the size of SRC. */
2762 tree
2763 build_storage_model_load (Entity_Id gnat_smo, tree dest, tree src, tree size)
2765 gcc_assert (TREE_CODE (src) != LOAD_EXPR);
2767 if (!size)
2769 size = TYPE_SIZE_UNIT (TREE_TYPE (src));
2770 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, src);
2771 size = INSTANTIATE_LOAD_IN_EXPR (size, gnat_smo);
2774 return build_storage_model_copy (gnat_smo, dest, src, size, false);
2777 /* Build a store of SRC into DEST using the storage model of GNAT_SMO.
2778 If SIZE is specified, use it, otherwise use the size of DEST. */
2780 tree
2781 build_storage_model_store (Entity_Id gnat_smo, tree dest, tree src, tree size)
2783 gcc_assert (TREE_CODE (src) != LOAD_EXPR);
2785 if (!size)
2787 size = TYPE_SIZE_UNIT (TREE_TYPE (dest));
2788 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, dest);
2789 size = INSTANTIATE_LOAD_IN_EXPR (size, gnat_smo);
2792 return build_storage_model_copy (gnat_smo, dest, src, size, true);
2795 /* Given a tree EXP, instantiate occurrences of LOAD_EXPR in it and associate
2796 them with the storage model of GNAT_SMO. */
2798 tree
2799 instantiate_load_in_expr (tree exp, Entity_Id gnat_smo)
2801 const enum tree_code code = TREE_CODE (exp);
2802 tree type = TREE_TYPE (exp);
2803 tree op0, op1, op2, op3;
2804 tree new_tree;
2806 /* We handle TREE_LIST and COMPONENT_REF separately. */
2807 if (code == TREE_LIST)
2809 op0 = INSTANTIATE_LOAD_IN_EXPR (TREE_CHAIN (exp), gnat_smo);
2810 op1 = INSTANTIATE_LOAD_IN_EXPR (TREE_VALUE (exp), gnat_smo);
2811 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
2812 return exp;
2814 return tree_cons (TREE_PURPOSE (exp), op1, op0);
2816 else if (code == COMPONENT_REF)
2818 /* The field. */
2819 op1 = TREE_OPERAND (exp, 1);
2821 /* If it is a discriminant or equivalent, a LOAD_EXPR is needed. */
2822 if (DECL_DISCRIMINANT_NUMBER (op1))
2823 return build_storage_model_load (gnat_smo, exp);
2825 op0 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 0), gnat_smo);
2826 if (op0 == TREE_OPERAND (exp, 0))
2827 return exp;
2829 new_tree = fold_build3 (COMPONENT_REF, type, op0, op1, NULL_TREE);
2831 else
2832 switch (TREE_CODE_CLASS (code))
2834 case tcc_constant:
2835 case tcc_declaration:
2836 return exp;
2838 case tcc_expression:
2839 if (code == LOAD_EXPR)
2840 return exp;
2842 /* Fall through. */
2844 case tcc_exceptional:
2845 case tcc_unary:
2846 case tcc_binary:
2847 case tcc_comparison:
2848 case tcc_reference:
2849 switch (TREE_CODE_LENGTH (code))
2851 case 0:
2852 return exp;
2854 case 1:
2855 op0 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 0), gnat_smo);
2856 if (op0 == TREE_OPERAND (exp, 0))
2857 return exp;
2859 new_tree = fold_build1 (code, type, op0);
2860 break;
2862 case 2:
2863 op0 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 0), gnat_smo);
2864 op1 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 1), gnat_smo);
2866 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
2867 return exp;
2869 new_tree = fold_build2 (code, type, op0, op1);
2870 break;
2872 case 3:
2873 op0 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 0), gnat_smo);
2874 op1 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 1), gnat_smo);
2875 op2 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 2), gnat_smo);
2877 if (op0 == TREE_OPERAND (exp, 0)
2878 && op1 == TREE_OPERAND (exp, 1)
2879 && op2 == TREE_OPERAND (exp, 2))
2880 return exp;
2882 new_tree = fold_build3 (code, type, op0, op1, op2);
2883 break;
2885 case 4:
2886 op0 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 0), gnat_smo);
2887 op1 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 1), gnat_smo);
2888 op2 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 2), gnat_smo);
2889 op3 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (exp, 3), gnat_smo);
2891 if (op0 == TREE_OPERAND (exp, 0)
2892 && op1 == TREE_OPERAND (exp, 1)
2893 && op2 == TREE_OPERAND (exp, 2)
2894 && op3 == TREE_OPERAND (exp, 3))
2895 return exp;
2897 new_tree = fold (build4 (code, type, op0, op1, op2, op3));
2898 break;
2900 default:
2901 gcc_unreachable ();
2903 break;
2905 case tcc_vl_exp:
2907 gcc_assert (code == CALL_EXPR);
2909 const int n = call_expr_nargs (exp);
2910 gcc_assert (n > 0);
2911 tree *argarray = XALLOCAVEC (tree, n);
2912 for (int i = 0; i < n; i++)
2913 argarray[i]
2914 = INSTANTIATE_LOAD_IN_EXPR (CALL_EXPR_ARG (exp, i), gnat_smo);
2916 for (int i = 0; i < n; i++)
2917 if (argarray[i] != CALL_EXPR_ARG (exp, i))
2918 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2920 return exp;
2923 default:
2924 gcc_unreachable ();
2927 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
2929 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
2930 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
2932 return new_tree;
2935 /* Given an array or slice reference, instantiate occurrences of LOAD_EXPR in
2936 it and associate them with the storage model of GNAT_SMO. */
2938 void
2939 instantiate_load_in_array_ref (tree ref, Entity_Id gnat_smo)
2941 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
2942 tree elem_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
2944 TREE_OPERAND (ref, 2)
2945 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), ref);
2946 TREE_OPERAND (ref, 2)
2947 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (ref, 2), gnat_smo);
2949 TREE_OPERAND (ref, 3)
2950 = size_binop (EXACT_DIV_EXPR,
2951 SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elem_type),
2952 ref),
2953 size_int (TYPE_ALIGN_UNIT (elem_type)));
2954 TREE_OPERAND (ref, 3)
2955 = INSTANTIATE_LOAD_IN_EXPR (TREE_OPERAND (ref, 3), gnat_smo);
2958 /* Indicate that we need to take the address of T and that it therefore
2959 should not be allocated in a register. Return true if successful. */
2961 bool
2962 gnat_mark_addressable (tree t)
2964 while (true)
2965 switch (TREE_CODE (t))
2967 case ADDR_EXPR:
2968 case COMPONENT_REF:
2969 case ARRAY_REF:
2970 case ARRAY_RANGE_REF:
2971 case REALPART_EXPR:
2972 case IMAGPART_EXPR:
2973 case VIEW_CONVERT_EXPR:
2974 case NON_LVALUE_EXPR:
2975 CASE_CONVERT:
2976 t = TREE_OPERAND (t, 0);
2977 break;
2979 case COMPOUND_EXPR:
2980 t = TREE_OPERAND (t, 1);
2981 break;
2983 case CONSTRUCTOR:
2984 TREE_ADDRESSABLE (t) = 1;
2985 return true;
2987 case VAR_DECL:
2988 case PARM_DECL:
2989 case RESULT_DECL:
2990 TREE_ADDRESSABLE (t) = 1;
2991 return true;
2993 case FUNCTION_DECL:
2994 TREE_ADDRESSABLE (t) = 1;
2995 return true;
2997 case CONST_DECL:
2998 return DECL_CONST_CORRESPONDING_VAR (t)
2999 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
3001 default:
3002 return true;
3006 /* Return true if EXP is a stable expression for the purpose of the functions
3007 below and, therefore, can be returned unmodified by them. We accept things
3008 that are actual constants or that have already been handled. */
3010 static bool
3011 gnat_stable_expr_p (tree exp)
3013 enum tree_code code = TREE_CODE (exp);
3014 return TREE_CONSTANT (exp) || code == NULL_EXPR || code == SAVE_EXPR;
3017 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.cc
3018 but we know how to handle our own nodes. */
3020 tree
3021 gnat_save_expr (tree exp)
3023 tree type = TREE_TYPE (exp);
3024 enum tree_code code = TREE_CODE (exp);
3026 if (gnat_stable_expr_p (exp))
3027 return exp;
3029 if (code == UNCONSTRAINED_ARRAY_REF)
3031 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
3032 TREE_READONLY (t) = TYPE_READONLY (type);
3033 return t;
3036 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
3037 This may be more efficient, but will also allow us to more easily find
3038 the match for the PLACEHOLDER_EXPR. */
3039 if (code == COMPONENT_REF
3040 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3041 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
3042 TREE_OPERAND (exp, 1), NULL_TREE);
3044 return save_expr (exp);
3047 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
3048 is optimized under the assumption that EXP's value doesn't change before
3049 its subsequent reuse(s) except potentially through its reevaluation.
3051 gnat_protect_expr guarantees that multiple evaluations of the expression
3052 will not generate multiple side effects, whereas gnat_save_expr further
3053 guarantees that all evaluations will yield the same result. */
3055 tree
3056 gnat_protect_expr (tree exp)
3058 tree type = TREE_TYPE (exp);
3059 enum tree_code code = TREE_CODE (exp);
3061 if (gnat_stable_expr_p (exp))
3062 return exp;
3064 /* If EXP has no side effects, we theoretically don't need to do anything.
3065 However, we may be recursively passed more and more complex expressions
3066 involving checks which will be reused multiple times and eventually be
3067 unshared for gimplification; in order to avoid a complexity explosion
3068 at that point, we protect any expressions more complex than a simple
3069 arithmetic expression. */
3070 if (!TREE_SIDE_EFFECTS (exp))
3072 tree inner = skip_simple_arithmetic (exp);
3073 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
3074 return exp;
3077 /* If this is a conversion, protect what's inside the conversion. */
3078 if (code == NON_LVALUE_EXPR
3079 || CONVERT_EXPR_CODE_P (code)
3080 || code == VIEW_CONVERT_EXPR)
3081 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
3083 /* If we're indirectly referencing something, we only need to protect the
3084 address since the data itself can't change in these situations. */
3085 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
3087 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
3088 TREE_READONLY (t) = TYPE_READONLY (type);
3089 return t;
3092 /* Likewise if we're indirectly referencing part of something. */
3093 if (code == COMPONENT_REF
3094 && INDIRECT_REF_P (TREE_OPERAND (exp, 0)))
3095 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
3096 TREE_OPERAND (exp, 1), NULL_TREE);
3098 /* An atomic load is an INDIRECT_REF of its first argument, so apply the
3099 same transformation as in the INDIRECT_REF case above. */
3100 if (code == CALL_EXPR && call_is_atomic_load (exp))
3101 return build_call_expr (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 2,
3102 gnat_protect_expr (CALL_EXPR_ARG (exp, 0)),
3103 CALL_EXPR_ARG (exp, 1));
3105 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
3106 This may be more efficient, but will also allow us to more easily find
3107 the match for the PLACEHOLDER_EXPR. */
3108 if (code == COMPONENT_REF
3109 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3110 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
3111 TREE_OPERAND (exp, 1), NULL_TREE);
3113 /* If this is a fat pointer or a scalar, just make a SAVE_EXPR. Likewise
3114 for a CALL_EXPR as large objects are returned via invisible reference
3115 in most ABIs so the temporary will directly be filled by the callee. */
3116 if (TYPE_IS_FAT_POINTER_P (type)
3117 || !AGGREGATE_TYPE_P (type)
3118 || code == CALL_EXPR)
3119 return save_expr (exp);
3121 /* Otherwise reference, protect the address and dereference. */
3122 return
3123 build_unary_op (INDIRECT_REF, type,
3124 save_expr (build_unary_op (ADDR_EXPR, NULL_TREE, exp)));
3127 /* This is equivalent to stabilize_reference_1 in tree.cc but we take an extra
3128 argument to force evaluation of everything. */
3130 static tree
3131 gnat_stabilize_reference_1 (tree e, void *data)
3133 const bool force = *(bool *)data;
3134 enum tree_code code = TREE_CODE (e);
3135 tree type = TREE_TYPE (e);
3136 tree result;
3138 if (gnat_stable_expr_p (e))
3139 return e;
3141 switch (TREE_CODE_CLASS (code))
3143 case tcc_exceptional:
3144 case tcc_declaration:
3145 case tcc_comparison:
3146 case tcc_expression:
3147 case tcc_reference:
3148 case tcc_vl_exp:
3149 /* If this is a COMPONENT_REF of a fat pointer, save the entire
3150 fat pointer. This may be more efficient, but will also allow
3151 us to more easily find the match for the PLACEHOLDER_EXPR. */
3152 if (code == COMPONENT_REF
3153 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
3154 result
3155 = build3 (code, type,
3156 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
3157 TREE_OPERAND (e, 1), NULL_TREE);
3158 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3159 so that it will only be evaluated once. */
3160 /* The tcc_reference and tcc_comparison classes could be handled as
3161 below, but it is generally faster to only evaluate them once. */
3162 else if (TREE_SIDE_EFFECTS (e) || force)
3163 return save_expr (e);
3164 else
3165 return e;
3166 break;
3168 case tcc_binary:
3169 /* Recursively stabilize each operand. */
3170 result
3171 = build2 (code, type,
3172 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
3173 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), data));
3174 break;
3176 case tcc_unary:
3177 /* Recursively stabilize each operand. */
3178 result
3179 = build1 (code, type,
3180 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data));
3181 break;
3183 default:
3184 gcc_unreachable ();
3187 /* See gnat_rewrite_reference below for the rationale. */
3188 TREE_READONLY (result) = TREE_READONLY (e);
3189 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
3191 if (TREE_SIDE_EFFECTS (e))
3192 TREE_SIDE_EFFECTS (result) = 1;
3194 return result;
3197 /* This is equivalent to stabilize_reference in tree.cc but we know how to
3198 handle our own nodes and we take extra arguments. FORCE says whether to
3199 force evaluation of everything in REF. INIT is set to the first arm of
3200 a COMPOUND_EXPR present in REF, if any. */
3202 tree
3203 gnat_stabilize_reference (tree ref, bool force, tree *init)
3205 return
3206 gnat_rewrite_reference (ref, gnat_stabilize_reference_1, &force, init);
3209 /* Rewrite reference REF and call FUNC on each expression within REF in the
3210 process. DATA is passed unmodified to FUNC. INIT is set to the first
3211 arm of a COMPOUND_EXPR present in REF, if any. */
3213 tree
3214 gnat_rewrite_reference (tree ref, rewrite_fn func, void *data, tree *init)
3216 tree type = TREE_TYPE (ref);
3217 enum tree_code code = TREE_CODE (ref);
3218 tree result;
3220 switch (code)
3222 case CONST_DECL:
3223 case VAR_DECL:
3224 case PARM_DECL:
3225 case RESULT_DECL:
3226 /* No action is needed in this case. */
3227 return ref;
3229 CASE_CONVERT:
3230 case FLOAT_EXPR:
3231 case FIX_TRUNC_EXPR:
3232 case REALPART_EXPR:
3233 case IMAGPART_EXPR:
3234 case VIEW_CONVERT_EXPR:
3235 result
3236 = build1 (code, type,
3237 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
3238 init));
3239 break;
3241 case INDIRECT_REF:
3242 case UNCONSTRAINED_ARRAY_REF:
3243 result = build1 (code, type, func (TREE_OPERAND (ref, 0), data));
3244 break;
3246 case COMPONENT_REF:
3247 result = build3 (COMPONENT_REF, type,
3248 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
3249 data, init),
3250 TREE_OPERAND (ref, 1), NULL_TREE);
3251 break;
3253 case BIT_FIELD_REF:
3254 result = build3 (BIT_FIELD_REF, type,
3255 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
3256 data, init),
3257 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
3258 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
3259 break;
3261 case ARRAY_REF:
3262 case ARRAY_RANGE_REF:
3263 result
3264 = build4 (code, type,
3265 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
3266 init),
3267 func (TREE_OPERAND (ref, 1), data),
3268 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
3269 break;
3271 case COMPOUND_EXPR:
3272 gcc_assert (!*init);
3273 *init = TREE_OPERAND (ref, 0);
3274 /* We expect only the pattern built in Call_to_gnu. */
3275 gcc_assert (DECL_P (TREE_OPERAND (ref, 1))
3276 || (TREE_CODE (TREE_OPERAND (ref, 1)) == COMPONENT_REF
3277 && DECL_P (TREE_OPERAND (TREE_OPERAND (ref, 1), 0))));
3278 return TREE_OPERAND (ref, 1);
3280 case CALL_EXPR:
3282 /* This can only be an atomic load. */
3283 gcc_assert (call_is_atomic_load (ref));
3285 /* An atomic load is an INDIRECT_REF of its first argument. */
3286 tree t = CALL_EXPR_ARG (ref, 0);
3287 if (TREE_CODE (t) == NOP_EXPR)
3288 t = TREE_OPERAND (t, 0);
3289 if (TREE_CODE (t) == ADDR_EXPR)
3290 t = build1 (ADDR_EXPR, TREE_TYPE (t),
3291 gnat_rewrite_reference (TREE_OPERAND (t, 0), func, data,
3292 init));
3293 else
3294 t = func (t, data);
3295 t = fold_convert (TREE_TYPE (CALL_EXPR_ARG (ref, 0)), t);
3297 result = build_call_expr (TREE_OPERAND (CALL_EXPR_FN (ref), 0), 2,
3298 t, CALL_EXPR_ARG (ref, 1));
3300 break;
3302 case ERROR_MARK:
3303 case NULL_EXPR:
3304 return ref;
3306 default:
3307 gcc_unreachable ();
3310 /* TREE_READONLY and TREE_THIS_VOLATILE set on the initial expression may
3311 not be sustained across some paths, such as the one for INDIRECT_REF.
3313 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
3314 paths introduce side-effects where there was none initially (e.g. if a
3315 SAVE_EXPR is built) and we also want to keep track of that. */
3316 TREE_READONLY (result) = TREE_READONLY (ref);
3317 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
3319 if (TREE_SIDE_EFFECTS (ref))
3320 TREE_SIDE_EFFECTS (result) = 1;
3322 if (code == INDIRECT_REF
3323 || code == UNCONSTRAINED_ARRAY_REF
3324 || code == ARRAY_REF
3325 || code == ARRAY_RANGE_REF)
3326 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
3328 return result;
3331 /* This is equivalent to get_inner_reference in expr.cc but it returns the
3332 ultimate containing object only if the reference (lvalue) is constant,
3333 i.e. if it doesn't depend on the context in which it is evaluated. */
3335 tree
3336 get_inner_constant_reference (tree exp)
3338 while (true)
3340 switch (TREE_CODE (exp))
3342 case BIT_FIELD_REF:
3343 break;
3345 case COMPONENT_REF:
3346 if (!TREE_CONSTANT (DECL_FIELD_OFFSET (TREE_OPERAND (exp, 1))))
3347 return NULL_TREE;
3348 break;
3350 case ARRAY_REF:
3351 case ARRAY_RANGE_REF:
3353 tree array_type = TREE_TYPE (TREE_OPERAND (exp, 0));
3354 if (!TREE_CONSTANT (TREE_OPERAND (exp, 1))
3355 || !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
3356 || !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (array_type))))
3357 return NULL_TREE;
3359 break;
3361 case REALPART_EXPR:
3362 case IMAGPART_EXPR:
3363 case VIEW_CONVERT_EXPR:
3364 break;
3366 default:
3367 goto done;
3370 exp = TREE_OPERAND (exp, 0);
3373 done:
3374 return exp;
3377 /* Return true if EXPR is the addition or the subtraction of a constant and,
3378 if so, set *ADD to the addend, *CST to the constant and *MINUS_P to true
3379 if this is a subtraction. */
3381 bool
3382 is_simple_additive_expression (tree expr, tree *add, tree *cst, bool *minus_p)
3384 /* Skip overflow checks. */
3385 if (TREE_CODE (expr) == COND_EXPR
3386 && TREE_CODE (COND_EXPR_THEN (expr)) == COMPOUND_EXPR
3387 && TREE_CODE (TREE_OPERAND (COND_EXPR_THEN (expr), 0)) == CALL_EXPR
3388 && get_callee_fndecl (TREE_OPERAND (COND_EXPR_THEN (expr), 0))
3389 == gnat_raise_decls[CE_Overflow_Check_Failed])
3390 expr = COND_EXPR_ELSE (expr);
3392 if (TREE_CODE (expr) == PLUS_EXPR)
3394 if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3396 *add = TREE_OPERAND (expr, 1);
3397 *cst = TREE_OPERAND (expr, 0);
3398 *minus_p = false;
3399 return true;
3401 else if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3403 *add = TREE_OPERAND (expr, 0);
3404 *cst = TREE_OPERAND (expr, 1);
3405 *minus_p = false;
3406 return true;
3409 else if (TREE_CODE (expr) == MINUS_EXPR)
3411 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3413 *add = TREE_OPERAND (expr, 0);
3414 *cst = TREE_OPERAND (expr, 1);
3415 *minus_p = true;
3416 return true;
3420 return false;
3423 /* If EXPR is an expression that is invariant in the current function, in the
3424 sense that it can be evaluated anywhere in the function and any number of
3425 times, return EXPR or an equivalent expression. Otherwise return NULL. */
3427 tree
3428 gnat_invariant_expr (tree expr)
3430 tree type = TREE_TYPE (expr);
3431 tree add, cst;
3432 bool minus_p;
3434 expr = remove_conversions (expr, false);
3436 /* Look through temporaries created to capture values. */
3437 while ((TREE_CODE (expr) == CONST_DECL
3438 || (VAR_P (expr) && TREE_READONLY (expr)))
3439 && decl_function_context (expr) == current_function_decl
3440 && DECL_INITIAL (expr))
3442 expr = DECL_INITIAL (expr);
3443 /* Look into CONSTRUCTORs built to initialize padded types. */
3444 expr = maybe_padded_object (expr);
3445 expr = remove_conversions (expr, false);
3448 /* We are only interested in scalar types at the moment and, even if we may
3449 have gone through padding types in the above loop, we must be back to a
3450 scalar value at this point. */
3451 if (AGGREGATE_TYPE_P (TREE_TYPE (expr)))
3452 return NULL_TREE;
3454 if (TREE_CONSTANT (expr))
3455 return fold_convert (type, expr);
3457 /* Deal with aligning patterns. */
3458 if (TREE_CODE (expr) == BIT_AND_EXPR
3459 && TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3461 tree op0 = gnat_invariant_expr (TREE_OPERAND (expr, 0));
3462 if (op0)
3463 return fold_build2 (BIT_AND_EXPR, type, op0, TREE_OPERAND (expr, 1));
3464 else
3465 return NULL_TREE;
3468 /* Deal with addition or subtraction of constants. */
3469 if (is_simple_additive_expression (expr, &add, &cst, &minus_p))
3471 add = gnat_invariant_expr (add);
3472 if (add)
3473 return
3474 fold_build2 (minus_p ? MINUS_EXPR : PLUS_EXPR, type,
3475 fold_convert (type, add), fold_convert (type, cst));
3476 else
3477 return NULL_TREE;
3480 bool invariant_p = false;
3481 tree t = expr;
3483 while (true)
3485 switch (TREE_CODE (t))
3487 case COMPONENT_REF:
3488 invariant_p |= DECL_INVARIANT_P (TREE_OPERAND (t, 1));
3489 break;
3491 case ARRAY_REF:
3492 case ARRAY_RANGE_REF:
3494 tree array_type = TREE_TYPE (TREE_OPERAND (t, 0));
3495 if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
3496 || !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
3497 || !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (array_type))))
3498 return NULL_TREE;
3500 break;
3502 case BIT_FIELD_REF:
3503 case REALPART_EXPR:
3504 case IMAGPART_EXPR:
3505 case VIEW_CONVERT_EXPR:
3506 CASE_CONVERT:
3507 break;
3509 case INDIRECT_REF:
3510 if ((!invariant_p && !TREE_READONLY (t)) || TREE_SIDE_EFFECTS (t))
3511 return NULL_TREE;
3512 invariant_p = false;
3513 break;
3515 default:
3516 goto object;
3519 t = TREE_OPERAND (t, 0);
3522 object:
3523 if (TREE_SIDE_EFFECTS (t))
3524 return NULL_TREE;
3526 if (TREE_CODE (t) == CONST_DECL
3527 && (DECL_EXTERNAL (t)
3528 || decl_function_context (t) != current_function_decl))
3529 return fold_convert (type, expr);
3531 if (!invariant_p && !TREE_READONLY (t))
3532 return NULL_TREE;
3534 if (TREE_CODE (t) == PARM_DECL)
3535 return fold_convert (type, expr);
3537 if (VAR_P (t)
3538 && (DECL_EXTERNAL (t)
3539 || decl_function_context (t) != current_function_decl))
3540 return fold_convert (type, expr);
3542 return NULL_TREE;