* gcc-interface/gigi.h (gnat_to_gnu_entity): Adjust prototype.
[official-gcc.git] / gcc / ada / gcc-interface / utils2.c
blobd5dd436d48e5a8091207ba46f40992bdcaf8e88d
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2016, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "vec.h"
31 #include "alias.h"
32 #include "tree.h"
33 #include "inchash.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stringpool.h"
37 #include "varasm.h"
38 #include "flags.h"
39 #include "toplev.h"
40 #include "ggc.h"
41 #include "tree-inline.h"
43 #include "ada.h"
44 #include "types.h"
45 #include "atree.h"
46 #include "elists.h"
47 #include "namet.h"
48 #include "nlists.h"
49 #include "snames.h"
50 #include "stringt.h"
51 #include "uintp.h"
52 #include "fe.h"
53 #include "sinfo.h"
54 #include "einfo.h"
55 #include "ada-tree.h"
56 #include "gigi.h"
58 /* Return the base type of TYPE. */
60 tree
61 get_base_type (tree type)
63 if (TREE_CODE (type) == RECORD_TYPE
64 && TYPE_JUSTIFIED_MODULAR_P (type))
65 type = TREE_TYPE (TYPE_FIELDS (type));
67 while (TREE_TYPE (type)
68 && (TREE_CODE (type) == INTEGER_TYPE
69 || TREE_CODE (type) == REAL_TYPE))
70 type = TREE_TYPE (type);
72 return type;
75 /* EXP is a GCC tree representing an address. See if we can find how strictly
76 the object at this address is aligned and, if so, return the alignment of
77 the object in bits. Otherwise return 0. */
79 unsigned int
80 known_alignment (tree exp)
82 unsigned int this_alignment;
83 unsigned int lhs, rhs;
85 switch (TREE_CODE (exp))
87 CASE_CONVERT:
88 case VIEW_CONVERT_EXPR:
89 case NON_LVALUE_EXPR:
90 /* Conversions between pointers and integers don't change the alignment
91 of the underlying object. */
92 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
93 break;
95 case COMPOUND_EXPR:
96 /* The value of a COMPOUND_EXPR is that of its second operand. */
97 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
98 break;
100 case PLUS_EXPR:
101 case MINUS_EXPR:
102 /* If two addresses are added, the alignment of the result is the
103 minimum of the two alignments. */
104 lhs = known_alignment (TREE_OPERAND (exp, 0));
105 rhs = known_alignment (TREE_OPERAND (exp, 1));
106 this_alignment = MIN (lhs, rhs);
107 break;
109 case POINTER_PLUS_EXPR:
110 /* If this is the pattern built for aligning types, decode it. */
111 if (TREE_CODE (TREE_OPERAND (exp, 1)) == BIT_AND_EXPR
112 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)) == NEGATE_EXPR)
114 tree op = TREE_OPERAND (TREE_OPERAND (exp, 1), 1);
115 return
116 known_alignment (fold_build1 (BIT_NOT_EXPR, TREE_TYPE (op), op));
119 /* If we don't know the alignment of the offset, we assume that
120 of the base. */
121 lhs = known_alignment (TREE_OPERAND (exp, 0));
122 rhs = known_alignment (TREE_OPERAND (exp, 1));
124 if (rhs == 0)
125 this_alignment = lhs;
126 else
127 this_alignment = MIN (lhs, rhs);
128 break;
130 case COND_EXPR:
131 /* If there is a choice between two values, use the smaller one. */
132 lhs = known_alignment (TREE_OPERAND (exp, 1));
133 rhs = known_alignment (TREE_OPERAND (exp, 2));
134 this_alignment = MIN (lhs, rhs);
135 break;
137 case INTEGER_CST:
139 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
140 /* The first part of this represents the lowest bit in the constant,
141 but it is originally in bytes, not bits. */
142 this_alignment = (c & -c) * BITS_PER_UNIT;
144 break;
146 case MULT_EXPR:
147 /* If we know the alignment of just one side, use it. Otherwise,
148 use the product of the alignments. */
149 lhs = known_alignment (TREE_OPERAND (exp, 0));
150 rhs = known_alignment (TREE_OPERAND (exp, 1));
152 if (lhs == 0)
153 this_alignment = rhs;
154 else if (rhs == 0)
155 this_alignment = lhs;
156 else
157 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
158 break;
160 case BIT_AND_EXPR:
161 /* A bit-and expression is as aligned as the maximum alignment of the
162 operands. We typically get here for a complex lhs and a constant
163 negative power of two on the rhs to force an explicit alignment, so
164 don't bother looking at the lhs. */
165 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
166 break;
168 case ADDR_EXPR:
169 this_alignment = expr_align (TREE_OPERAND (exp, 0));
170 break;
172 case CALL_EXPR:
174 tree t = maybe_inline_call_in_expr (exp);
175 if (t)
176 return known_alignment (t);
179 /* ... fall through ... */
181 default:
182 /* For other pointer expressions, we assume that the pointed-to object
183 is at least as aligned as the pointed-to type. Beware that we can
184 have a dummy type here (e.g. a Taft Amendment type), for which the
185 alignment is meaningless and should be ignored. */
186 if (POINTER_TYPE_P (TREE_TYPE (exp))
187 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
188 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
189 else
190 this_alignment = 0;
191 break;
194 return this_alignment;
197 /* We have a comparison or assignment operation on two types, T1 and T2, which
198 are either both array types or both record types. T1 is assumed to be for
199 the left hand side operand, and T2 for the right hand side. Return the
200 type that both operands should be converted to for the operation, if any.
201 Otherwise return zero. */
203 static tree
204 find_common_type (tree t1, tree t2)
206 /* ??? As of today, various constructs lead to here with types of different
207 sizes even when both constants (e.g. tagged types, packable vs regular
208 component types, padded vs unpadded types, ...). While some of these
209 would better be handled upstream (types should be made consistent before
210 calling into build_binary_op), some others are really expected and we
211 have to be careful. */
213 /* We must avoid writing more than what the target can hold if this is for
214 an assignment and the case of tagged types is handled in build_binary_op
215 so we use the lhs type if it is known to be smaller or of constant size
216 and the rhs type is not, whatever the modes. We also force t1 in case of
217 constant size equality to minimize occurrences of view conversions on the
218 lhs of an assignment, except for the case of record types with a variant
219 part on the lhs but not on the rhs to make the conversion simpler. */
220 if (TREE_CONSTANT (TYPE_SIZE (t1))
221 && (!TREE_CONSTANT (TYPE_SIZE (t2))
222 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
223 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
224 && !(TREE_CODE (t1) == RECORD_TYPE
225 && TREE_CODE (t2) == RECORD_TYPE
226 && get_variant_part (t1)
227 && !get_variant_part (t2)))))
228 return t1;
230 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
231 that we will not have any alignment problems since, if we did, the
232 non-BLKmode type could not have been used. */
233 if (TYPE_MODE (t1) != BLKmode)
234 return t1;
236 /* If the rhs type is of constant size, use it whatever the modes. At
237 this point it is known to be smaller, or of constant size and the
238 lhs type is not. */
239 if (TREE_CONSTANT (TYPE_SIZE (t2)))
240 return t2;
242 /* Otherwise, if the rhs type is non-BLKmode, use it. */
243 if (TYPE_MODE (t2) != BLKmode)
244 return t2;
246 /* In this case, both types have variable size and BLKmode. It's
247 probably best to leave the "type mismatch" because changing it
248 could cause a bad self-referential reference. */
249 return NULL_TREE;
252 /* Return an expression tree representing an equality comparison of A1 and A2,
253 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
255 Two arrays are equal in one of two ways: (1) if both have zero length in
256 some dimension (not necessarily the same dimension) or (2) if the lengths
257 in each dimension are equal and the data is equal. We perform the length
258 tests in as efficient a manner as possible. */
260 static tree
261 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
263 tree result = convert (result_type, boolean_true_node);
264 tree a1_is_null = convert (result_type, boolean_false_node);
265 tree a2_is_null = convert (result_type, boolean_false_node);
266 tree t1 = TREE_TYPE (a1);
267 tree t2 = TREE_TYPE (a2);
268 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
269 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
270 bool length_zero_p = false;
272 /* If the operands have side-effects, they need to be evaluated only once
273 in spite of the multiple references in the comparison. */
274 if (a1_side_effects_p)
275 a1 = gnat_protect_expr (a1);
277 if (a2_side_effects_p)
278 a2 = gnat_protect_expr (a2);
280 /* Process each dimension separately and compare the lengths. If any
281 dimension has a length known to be zero, set LENGTH_ZERO_P to true
282 in order to suppress the comparison of the data at the end. */
283 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
285 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
286 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
287 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
288 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
289 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
290 size_one_node);
291 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
292 size_one_node);
293 tree comparison, this_a1_is_null, this_a2_is_null;
295 /* If the length of the first array is a constant, swap our operands
296 unless the length of the second array is the constant zero. */
297 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
299 tree tem;
300 bool btem;
302 tem = a1, a1 = a2, a2 = tem;
303 tem = t1, t1 = t2, t2 = tem;
304 tem = lb1, lb1 = lb2, lb2 = tem;
305 tem = ub1, ub1 = ub2, ub2 = tem;
306 tem = length1, length1 = length2, length2 = tem;
307 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
308 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
309 a2_side_effects_p = btem;
312 /* If the length of the second array is the constant zero, we can just
313 use the original stored bounds for the first array and see whether
314 last < first holds. */
315 if (integer_zerop (length2))
317 tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
319 length_zero_p = true;
322 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
324 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
326 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
327 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
328 if (EXPR_P (comparison))
329 SET_EXPR_LOCATION (comparison, loc);
331 this_a1_is_null = comparison;
332 this_a2_is_null = convert (result_type, boolean_true_node);
335 /* Otherwise, if the length is some other constant value, we know that
336 this dimension in the second array cannot be superflat, so we can
337 just use its length computed from the actual stored bounds. */
338 else if (TREE_CODE (length2) == INTEGER_CST)
340 tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
343 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
345 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
346 /* Note that we know that UB2 and LB2 are constant and hence
347 cannot contain a PLACEHOLDER_EXPR. */
349 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
351 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
353 comparison
354 = fold_build2_loc (loc, EQ_EXPR, result_type,
355 build_binary_op (MINUS_EXPR, b, ub1, lb1),
356 build_binary_op (MINUS_EXPR, b, ub2, lb2));
357 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
358 if (EXPR_P (comparison))
359 SET_EXPR_LOCATION (comparison, loc);
361 this_a1_is_null
362 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
364 this_a2_is_null = convert (result_type, boolean_false_node);
367 /* Otherwise, compare the computed lengths. */
368 else
370 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
371 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
373 comparison
374 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
376 /* If the length expression is of the form (cond ? val : 0), assume
377 that cond is equivalent to (length != 0). That's guaranteed by
378 construction of the array types in gnat_to_gnu_entity. */
379 if (TREE_CODE (length1) == COND_EXPR
380 && integer_zerop (TREE_OPERAND (length1, 2)))
381 this_a1_is_null
382 = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
383 else
384 this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
385 length1, size_zero_node);
387 /* Likewise for the second array. */
388 if (TREE_CODE (length2) == COND_EXPR
389 && integer_zerop (TREE_OPERAND (length2, 2)))
390 this_a2_is_null
391 = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
392 else
393 this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
394 length2, size_zero_node);
397 /* Append expressions for this dimension to the final expressions. */
398 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
399 result, comparison);
401 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
402 this_a1_is_null, a1_is_null);
404 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
405 this_a2_is_null, a2_is_null);
407 t1 = TREE_TYPE (t1);
408 t2 = TREE_TYPE (t2);
411 /* Unless the length of some dimension is known to be zero, compare the
412 data in the array. */
413 if (!length_zero_p)
415 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
416 tree comparison;
418 if (type)
420 a1 = convert (type, a1),
421 a2 = convert (type, a2);
424 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
426 result
427 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
430 /* The result is also true if both sizes are zero. */
431 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
432 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
433 a1_is_null, a2_is_null),
434 result);
436 /* If the operands have side-effects, they need to be evaluated before
437 doing the tests above since the place they otherwise would end up
438 being evaluated at run time could be wrong. */
439 if (a1_side_effects_p)
440 result = build2 (COMPOUND_EXPR, result_type, a1, result);
442 if (a2_side_effects_p)
443 result = build2 (COMPOUND_EXPR, result_type, a2, result);
445 return result;
448 /* Return an expression tree representing an equality comparison of P1 and P2,
449 two objects of fat pointer type. The result should be of type RESULT_TYPE.
451 Two fat pointers are equal in one of two ways: (1) if both have a null
452 pointer to the array or (2) if they contain the same couple of pointers.
453 We perform the comparison in as efficient a manner as possible. */
455 static tree
456 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
458 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
459 tree p1_array_is_null, p2_array_is_null;
461 /* If either operand has side-effects, they have to be evaluated only once
462 in spite of the multiple references to the operand in the comparison. */
463 p1 = gnat_protect_expr (p1);
464 p2 = gnat_protect_expr (p2);
466 /* The constant folder doesn't fold fat pointer types so we do it here. */
467 if (TREE_CODE (p1) == CONSTRUCTOR)
468 p1_array = CONSTRUCTOR_ELT (p1, 0)->value;
469 else
470 p1_array = build_component_ref (p1, TYPE_FIELDS (TREE_TYPE (p1)), true);
472 p1_array_is_null
473 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
474 fold_convert_loc (loc, TREE_TYPE (p1_array),
475 null_pointer_node));
477 if (TREE_CODE (p2) == CONSTRUCTOR)
478 p2_array = CONSTRUCTOR_ELT (p2, 0)->value;
479 else
480 p2_array = build_component_ref (p2, TYPE_FIELDS (TREE_TYPE (p2)), true);
482 p2_array_is_null
483 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
484 fold_convert_loc (loc, TREE_TYPE (p2_array),
485 null_pointer_node));
487 /* If one of the pointers to the array is null, just compare the other. */
488 if (integer_zerop (p1_array))
489 return p2_array_is_null;
490 else if (integer_zerop (p2_array))
491 return p1_array_is_null;
493 /* Otherwise, do the fully-fledged comparison. */
494 same_array
495 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
497 if (TREE_CODE (p1) == CONSTRUCTOR)
498 p1_bounds = CONSTRUCTOR_ELT (p1, 1)->value;
499 else
500 p1_bounds
501 = build_component_ref (p1, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))),
502 true);
504 if (TREE_CODE (p2) == CONSTRUCTOR)
505 p2_bounds = CONSTRUCTOR_ELT (p2, 1)->value;
506 else
507 p2_bounds
508 = build_component_ref (p2, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))),
509 true);
511 same_bounds
512 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
514 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
515 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
516 build_binary_op (TRUTH_ORIF_EXPR, result_type,
517 p1_array_is_null, same_bounds));
520 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
521 type TYPE. We know that TYPE is a modular type with a nonbinary
522 modulus. */
524 static tree
525 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
526 tree rhs)
528 tree modulus = TYPE_MODULUS (type);
529 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
530 unsigned int precision;
531 bool unsignedp = true;
532 tree op_type = type;
533 tree result;
535 /* If this is an addition of a constant, convert it to a subtraction
536 of a constant since we can do that faster. */
537 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
539 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
540 op_code = MINUS_EXPR;
543 /* For the logical operations, we only need PRECISION bits. For
544 addition and subtraction, we need one more and for multiplication we
545 need twice as many. But we never want to make a size smaller than
546 our size. */
547 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
548 needed_precision += 1;
549 else if (op_code == MULT_EXPR)
550 needed_precision *= 2;
552 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
554 /* Unsigned will do for everything but subtraction. */
555 if (op_code == MINUS_EXPR)
556 unsignedp = false;
558 /* If our type is the wrong signedness or isn't wide enough, make a new
559 type and convert both our operands to it. */
560 if (TYPE_PRECISION (op_type) < precision
561 || TYPE_UNSIGNED (op_type) != unsignedp)
563 /* Copy the type so we ensure it can be modified to make it modular. */
564 op_type = copy_type (gnat_type_for_size (precision, unsignedp));
565 modulus = convert (op_type, modulus);
566 SET_TYPE_MODULUS (op_type, modulus);
567 TYPE_MODULAR_P (op_type) = 1;
568 lhs = convert (op_type, lhs);
569 rhs = convert (op_type, rhs);
572 /* Do the operation, then we'll fix it up. */
573 result = fold_build2 (op_code, op_type, lhs, rhs);
575 /* For multiplication, we have no choice but to do a full modulus
576 operation. However, we want to do this in the narrowest
577 possible size. */
578 if (op_code == MULT_EXPR)
580 /* Copy the type so we ensure it can be modified to make it modular. */
581 tree div_type = copy_type (gnat_type_for_size (needed_precision, 1));
582 modulus = convert (div_type, modulus);
583 SET_TYPE_MODULUS (div_type, modulus);
584 TYPE_MODULAR_P (div_type) = 1;
585 result = convert (op_type,
586 fold_build2 (TRUNC_MOD_EXPR, div_type,
587 convert (div_type, result), modulus));
590 /* For subtraction, add the modulus back if we are negative. */
591 else if (op_code == MINUS_EXPR)
593 result = gnat_protect_expr (result);
594 result = fold_build3 (COND_EXPR, op_type,
595 fold_build2 (LT_EXPR, boolean_type_node, result,
596 build_int_cst (op_type, 0)),
597 fold_build2 (PLUS_EXPR, op_type, result, modulus),
598 result);
601 /* For the other operations, subtract the modulus if we are >= it. */
602 else
604 result = gnat_protect_expr (result);
605 result = fold_build3 (COND_EXPR, op_type,
606 fold_build2 (GE_EXPR, boolean_type_node,
607 result, modulus),
608 fold_build2 (MINUS_EXPR, op_type,
609 result, modulus),
610 result);
613 return convert (type, result);
616 /* This page contains routines that implement the Ada semantics with regard
617 to atomic objects. They are fully piggybacked on the middle-end support
618 for atomic loads and stores.
620 *** Memory barriers and volatile objects ***
622 We implement the weakened form of the C.6(16) clause that was introduced
623 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
624 implementable without significant performance hits on modern platforms.
626 We also take advantage of the requirements imposed on shared variables by
627 9.10 (conditions for sequential actions) to have non-erroneous execution
628 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
629 volatile updates with regard to sequential actions, i.e. with regard to
630 reads or updates of atomic objects.
632 As such, an update of an atomic object by a task requires that all earlier
633 accesses to volatile objects have completed. Similarly, later accesses to
634 volatile objects cannot be reordered before the update of the atomic object.
635 So, memory barriers both before and after the atomic update are needed.
637 For a read of an atomic object, to avoid seeing writes of volatile objects
638 by a task earlier than by the other tasks, a memory barrier is needed before
639 the atomic read. Finally, to avoid reordering later reads or updates of
640 volatile objects to before the atomic read, a barrier is needed after the
641 atomic read.
643 So, memory barriers are needed before and after atomic reads and updates.
644 And, in order to simplify the implementation, we use full memory barriers
645 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
647 /* Return the size of TYPE, which must be a positive power of 2. */
649 static unsigned int
650 resolve_atomic_size (tree type)
652 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
654 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
655 return size;
657 /* We shouldn't reach here without having already detected that the size
658 isn't compatible with an atomic access. */
659 gcc_assert (Serious_Errors_Detected);
661 return 0;
664 /* Build an atomic load for the underlying atomic object in SRC. SYNC is
665 true if the load requires synchronization. */
667 tree
668 build_atomic_load (tree src, bool sync)
670 tree ptr_type
671 = build_pointer_type
672 (build_qualified_type (void_type_node,
673 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
674 tree mem_model
675 = build_int_cst (integer_type_node,
676 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
677 tree orig_src = src;
678 tree t, addr, val;
679 unsigned int size;
680 int fncode;
682 /* Remove conversions to get the address of the underlying object. */
683 src = remove_conversions (src, false);
684 size = resolve_atomic_size (TREE_TYPE (src));
685 if (size == 0)
686 return orig_src;
688 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
689 t = builtin_decl_implicit ((enum built_in_function) fncode);
691 addr = build_unary_op (ADDR_EXPR, ptr_type, src);
692 val = build_call_expr (t, 2, addr, mem_model);
694 /* First reinterpret the loaded bits in the original type of the load,
695 then convert to the expected result type. */
696 t = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (src), val);
697 return convert (TREE_TYPE (orig_src), t);
700 /* Build an atomic store from SRC to the underlying atomic object in DEST.
701 SYNC is true if the store requires synchronization. */
703 tree
704 build_atomic_store (tree dest, tree src, bool sync)
706 tree ptr_type
707 = build_pointer_type
708 (build_qualified_type (void_type_node,
709 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
710 tree mem_model
711 = build_int_cst (integer_type_node,
712 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
713 tree orig_dest = dest;
714 tree t, int_type, addr;
715 unsigned int size;
716 int fncode;
718 /* Remove conversions to get the address of the underlying object. */
719 dest = remove_conversions (dest, false);
720 size = resolve_atomic_size (TREE_TYPE (dest));
721 if (size == 0)
722 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
724 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
725 t = builtin_decl_implicit ((enum built_in_function) fncode);
726 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
728 /* First convert the bits to be stored to the original type of the store,
729 then reinterpret them in the effective type. But if the original type
730 is a padded type with the same size, convert to the inner type instead,
731 as we don't want to artificially introduce a CONSTRUCTOR here. */
732 if (TYPE_IS_PADDING_P (TREE_TYPE (dest))
733 && TYPE_SIZE (TREE_TYPE (dest))
734 == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest)))))
735 src = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest))), src);
736 else
737 src = convert (TREE_TYPE (dest), src);
738 src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
739 addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
741 return build_call_expr (t, 3, addr, src, mem_model);
744 /* Build a load-modify-store sequence from SRC to DEST. GNAT_NODE is used for
745 the location of the sequence. Note that, even though the load and the store
746 are both atomic, the sequence itself is not atomic. */
748 tree
749 build_load_modify_store (tree dest, tree src, Node_Id gnat_node)
751 /* We will be modifying DEST below so we build a copy. */
752 dest = copy_node (dest);
753 tree ref = dest;
755 while (handled_component_p (ref))
757 /* The load should already have been generated during the translation
758 of the GNAT destination tree; find it out in the GNU tree. */
759 if (TREE_CODE (TREE_OPERAND (ref, 0)) == VIEW_CONVERT_EXPR)
761 tree op = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
762 if (TREE_CODE (op) == CALL_EXPR && call_is_atomic_load (op))
764 tree type = TREE_TYPE (TREE_OPERAND (ref, 0));
765 tree t = CALL_EXPR_ARG (op, 0);
766 tree obj, temp, stmt;
768 /* Find out the loaded object. */
769 if (TREE_CODE (t) == NOP_EXPR)
770 t = TREE_OPERAND (t, 0);
771 if (TREE_CODE (t) == ADDR_EXPR)
772 obj = TREE_OPERAND (t, 0);
773 else
774 obj = build1 (INDIRECT_REF, type, t);
776 /* Drop atomic and volatile qualifiers for the temporary. */
777 type = TYPE_MAIN_VARIANT (type);
779 /* And drop BLKmode, if need be, to put it into a register. */
780 if (TYPE_MODE (type) == BLKmode)
782 unsigned int size = tree_to_uhwi (TYPE_SIZE (type));
783 type = copy_type (type);
784 SET_TYPE_MODE (type, mode_for_size (size, MODE_INT, 0));
787 /* Create the temporary by inserting a SAVE_EXPR. */
788 temp = build1 (SAVE_EXPR, type,
789 build1 (VIEW_CONVERT_EXPR, type, op));
790 TREE_OPERAND (ref, 0) = temp;
792 start_stmt_group ();
794 /* Build the modify of the temporary. */
795 stmt = build_binary_op (MODIFY_EXPR, NULL_TREE, dest, src);
796 add_stmt_with_node (stmt, gnat_node);
798 /* Build the store to the object. */
799 stmt = build_atomic_store (obj, temp, false);
800 add_stmt_with_node (stmt, gnat_node);
802 return end_stmt_group ();
806 TREE_OPERAND (ref, 0) = copy_node (TREE_OPERAND (ref, 0));
807 ref = TREE_OPERAND (ref, 0);
810 /* Something went wrong earlier if we have not found the atomic load. */
811 gcc_unreachable ();
814 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
815 desired for the result. Usually the operation is to be performed
816 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
817 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
818 case the type to be used will be derived from the operands.
820 This function is very much unlike the ones for C and C++ since we
821 have already done any type conversion and matching required. All we
822 have to do here is validate the work done by SEM and handle subtypes. */
824 tree
825 build_binary_op (enum tree_code op_code, tree result_type,
826 tree left_operand, tree right_operand)
828 tree left_type = TREE_TYPE (left_operand);
829 tree right_type = TREE_TYPE (right_operand);
830 tree left_base_type = get_base_type (left_type);
831 tree right_base_type = get_base_type (right_type);
832 tree operation_type = result_type;
833 tree best_type = NULL_TREE;
834 tree modulus, result;
835 bool has_side_effects = false;
837 if (operation_type
838 && TREE_CODE (operation_type) == RECORD_TYPE
839 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
840 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
842 if (operation_type
843 && TREE_CODE (operation_type) == INTEGER_TYPE
844 && TYPE_EXTRA_SUBTYPE_P (operation_type))
845 operation_type = get_base_type (operation_type);
847 modulus = (operation_type
848 && TREE_CODE (operation_type) == INTEGER_TYPE
849 && TYPE_MODULAR_P (operation_type)
850 ? TYPE_MODULUS (operation_type) : NULL_TREE);
852 switch (op_code)
854 case INIT_EXPR:
855 case MODIFY_EXPR:
856 gcc_checking_assert (!result_type);
858 /* If there were integral or pointer conversions on the LHS, remove
859 them; we'll be putting them back below if needed. Likewise for
860 conversions between array and record types, except for justified
861 modular types. But don't do this if the right operand is not
862 BLKmode (for packed arrays) unless we are not changing the mode. */
863 while ((CONVERT_EXPR_P (left_operand)
864 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
865 && (((INTEGRAL_TYPE_P (left_type)
866 || POINTER_TYPE_P (left_type))
867 && (INTEGRAL_TYPE_P (TREE_TYPE
868 (TREE_OPERAND (left_operand, 0)))
869 || POINTER_TYPE_P (TREE_TYPE
870 (TREE_OPERAND (left_operand, 0)))))
871 || (((TREE_CODE (left_type) == RECORD_TYPE
872 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
873 || TREE_CODE (left_type) == ARRAY_TYPE)
874 && ((TREE_CODE (TREE_TYPE
875 (TREE_OPERAND (left_operand, 0)))
876 == RECORD_TYPE)
877 || (TREE_CODE (TREE_TYPE
878 (TREE_OPERAND (left_operand, 0)))
879 == ARRAY_TYPE))
880 && (TYPE_MODE (right_type) == BLKmode
881 || (TYPE_MODE (left_type)
882 == TYPE_MODE (TREE_TYPE
883 (TREE_OPERAND
884 (left_operand, 0))))))))
886 left_operand = TREE_OPERAND (left_operand, 0);
887 left_type = TREE_TYPE (left_operand);
890 /* If a class-wide type may be involved, force use of the RHS type. */
891 if ((TREE_CODE (right_type) == RECORD_TYPE
892 || TREE_CODE (right_type) == UNION_TYPE)
893 && TYPE_ALIGN_OK (right_type))
894 operation_type = right_type;
896 /* If we are copying between padded objects with compatible types, use
897 the padded view of the objects, this is very likely more efficient.
898 Likewise for a padded object that is assigned a constructor, if we
899 can convert the constructor to the inner type, to avoid putting a
900 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
901 actually copied anything. */
902 else if (TYPE_IS_PADDING_P (left_type)
903 && TREE_CONSTANT (TYPE_SIZE (left_type))
904 && ((TREE_CODE (right_operand) == COMPONENT_REF
905 && TYPE_MAIN_VARIANT (left_type)
906 == TYPE_MAIN_VARIANT
907 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
908 || (TREE_CODE (right_operand) == CONSTRUCTOR
909 && !CONTAINS_PLACEHOLDER_P
910 (DECL_SIZE (TYPE_FIELDS (left_type)))))
911 && !integer_zerop (TYPE_SIZE (right_type)))
913 /* We make an exception for a BLKmode type padding a non-BLKmode
914 inner type and do the conversion of the LHS right away, since
915 unchecked_convert wouldn't do it properly. */
916 if (TYPE_MODE (left_type) == BLKmode
917 && TYPE_MODE (right_type) != BLKmode
918 && TREE_CODE (right_operand) != CONSTRUCTOR)
920 operation_type = right_type;
921 left_operand = convert (operation_type, left_operand);
922 left_type = operation_type;
924 else
925 operation_type = left_type;
928 /* If we have a call to a function that returns with variable size, use
929 the RHS type in case we want to use the return slot optimization. */
930 else if (TREE_CODE (right_operand) == CALL_EXPR
931 && return_type_with_variable_size_p (right_type))
932 operation_type = right_type;
934 /* Find the best type to use for copying between aggregate types. */
935 else if (((TREE_CODE (left_type) == ARRAY_TYPE
936 && TREE_CODE (right_type) == ARRAY_TYPE)
937 || (TREE_CODE (left_type) == RECORD_TYPE
938 && TREE_CODE (right_type) == RECORD_TYPE))
939 && (best_type = find_common_type (left_type, right_type)))
940 operation_type = best_type;
942 /* Otherwise use the LHS type. */
943 else
944 operation_type = left_type;
946 /* Ensure everything on the LHS is valid. If we have a field reference,
947 strip anything that get_inner_reference can handle. Then remove any
948 conversions between types having the same code and mode. And mark
949 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
950 either an INDIRECT_REF, a NULL_EXPR, a SAVE_EXPR or a DECL node. */
951 result = left_operand;
952 while (true)
954 tree restype = TREE_TYPE (result);
956 if (TREE_CODE (result) == COMPONENT_REF
957 || TREE_CODE (result) == ARRAY_REF
958 || TREE_CODE (result) == ARRAY_RANGE_REF)
959 while (handled_component_p (result))
960 result = TREE_OPERAND (result, 0);
961 else if (TREE_CODE (result) == REALPART_EXPR
962 || TREE_CODE (result) == IMAGPART_EXPR
963 || (CONVERT_EXPR_P (result)
964 && (((TREE_CODE (restype)
965 == TREE_CODE (TREE_TYPE
966 (TREE_OPERAND (result, 0))))
967 && (TYPE_MODE (TREE_TYPE
968 (TREE_OPERAND (result, 0)))
969 == TYPE_MODE (restype)))
970 || TYPE_ALIGN_OK (restype))))
971 result = TREE_OPERAND (result, 0);
972 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
974 TREE_ADDRESSABLE (result) = 1;
975 result = TREE_OPERAND (result, 0);
977 else
978 break;
981 gcc_assert (TREE_CODE (result) == INDIRECT_REF
982 || TREE_CODE (result) == NULL_EXPR
983 || TREE_CODE (result) == SAVE_EXPR
984 || DECL_P (result));
986 /* Convert the right operand to the operation type unless it is
987 either already of the correct type or if the type involves a
988 placeholder, since the RHS may not have the same record type. */
989 if (operation_type != right_type
990 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
992 right_operand = convert (operation_type, right_operand);
993 right_type = operation_type;
996 /* If the left operand is not of the same type as the operation
997 type, wrap it up in a VIEW_CONVERT_EXPR. */
998 if (left_type != operation_type)
999 left_operand = unchecked_convert (operation_type, left_operand, false);
1001 has_side_effects = true;
1002 modulus = NULL_TREE;
1003 break;
1005 case ARRAY_REF:
1006 if (!operation_type)
1007 operation_type = TREE_TYPE (left_type);
1009 /* ... fall through ... */
1011 case ARRAY_RANGE_REF:
1012 /* First look through conversion between type variants. Note that
1013 this changes neither the operation type nor the type domain. */
1014 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
1015 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
1016 == TYPE_MAIN_VARIANT (left_type))
1018 left_operand = TREE_OPERAND (left_operand, 0);
1019 left_type = TREE_TYPE (left_operand);
1022 /* For a range, make sure the element type is consistent. */
1023 if (op_code == ARRAY_RANGE_REF
1024 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
1025 operation_type = build_array_type (TREE_TYPE (left_type),
1026 TYPE_DOMAIN (operation_type));
1028 /* Then convert the right operand to its base type. This will prevent
1029 unneeded sign conversions when sizetype is wider than integer. */
1030 right_operand = convert (right_base_type, right_operand);
1031 right_operand = convert_to_index_type (right_operand);
1032 modulus = NULL_TREE;
1033 break;
1035 case TRUTH_ANDIF_EXPR:
1036 case TRUTH_ORIF_EXPR:
1037 case TRUTH_AND_EXPR:
1038 case TRUTH_OR_EXPR:
1039 case TRUTH_XOR_EXPR:
1040 gcc_checking_assert
1041 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1042 operation_type = left_base_type;
1043 left_operand = convert (operation_type, left_operand);
1044 right_operand = convert (operation_type, right_operand);
1045 break;
1047 case GE_EXPR:
1048 case LE_EXPR:
1049 case GT_EXPR:
1050 case LT_EXPR:
1051 case EQ_EXPR:
1052 case NE_EXPR:
1053 gcc_checking_assert
1054 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1055 /* If either operand is a NULL_EXPR, just return a new one. */
1056 if (TREE_CODE (left_operand) == NULL_EXPR)
1057 return build2 (op_code, result_type,
1058 build1 (NULL_EXPR, integer_type_node,
1059 TREE_OPERAND (left_operand, 0)),
1060 integer_zero_node);
1062 else if (TREE_CODE (right_operand) == NULL_EXPR)
1063 return build2 (op_code, result_type,
1064 build1 (NULL_EXPR, integer_type_node,
1065 TREE_OPERAND (right_operand, 0)),
1066 integer_zero_node);
1068 /* If either object is a justified modular types, get the
1069 fields from within. */
1070 if (TREE_CODE (left_type) == RECORD_TYPE
1071 && TYPE_JUSTIFIED_MODULAR_P (left_type))
1073 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
1074 left_operand);
1075 left_type = TREE_TYPE (left_operand);
1076 left_base_type = get_base_type (left_type);
1079 if (TREE_CODE (right_type) == RECORD_TYPE
1080 && TYPE_JUSTIFIED_MODULAR_P (right_type))
1082 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
1083 right_operand);
1084 right_type = TREE_TYPE (right_operand);
1085 right_base_type = get_base_type (right_type);
1088 /* If both objects are arrays, compare them specially. */
1089 if ((TREE_CODE (left_type) == ARRAY_TYPE
1090 || (TREE_CODE (left_type) == INTEGER_TYPE
1091 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
1092 && (TREE_CODE (right_type) == ARRAY_TYPE
1093 || (TREE_CODE (right_type) == INTEGER_TYPE
1094 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1096 result = compare_arrays (input_location,
1097 result_type, left_operand, right_operand);
1098 if (op_code == NE_EXPR)
1099 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1100 else
1101 gcc_assert (op_code == EQ_EXPR);
1103 return result;
1106 /* Otherwise, the base types must be the same, unless they are both fat
1107 pointer types or record types. In the latter case, use the best type
1108 and convert both operands to that type. */
1109 if (left_base_type != right_base_type)
1111 if (TYPE_IS_FAT_POINTER_P (left_base_type)
1112 && TYPE_IS_FAT_POINTER_P (right_base_type))
1114 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1115 == TYPE_MAIN_VARIANT (right_base_type));
1116 best_type = left_base_type;
1119 else if (TREE_CODE (left_base_type) == RECORD_TYPE
1120 && TREE_CODE (right_base_type) == RECORD_TYPE)
1122 /* The only way this is permitted is if both types have the same
1123 name. In that case, one of them must not be self-referential.
1124 Use it as the best type. Even better with a fixed size. */
1125 gcc_assert (TYPE_NAME (left_base_type)
1126 && TYPE_NAME (left_base_type)
1127 == TYPE_NAME (right_base_type));
1129 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1130 best_type = left_base_type;
1131 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1132 best_type = right_base_type;
1133 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1134 best_type = left_base_type;
1135 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1136 best_type = right_base_type;
1137 else
1138 gcc_unreachable ();
1141 else if (POINTER_TYPE_P (left_base_type)
1142 && POINTER_TYPE_P (right_base_type))
1144 gcc_assert (TREE_TYPE (left_base_type)
1145 == TREE_TYPE (right_base_type));
1146 best_type = left_base_type;
1148 else
1149 gcc_unreachable ();
1151 left_operand = convert (best_type, left_operand);
1152 right_operand = convert (best_type, right_operand);
1154 else
1156 left_operand = convert (left_base_type, left_operand);
1157 right_operand = convert (right_base_type, right_operand);
1160 /* If both objects are fat pointers, compare them specially. */
1161 if (TYPE_IS_FAT_POINTER_P (left_base_type))
1163 result
1164 = compare_fat_pointers (input_location,
1165 result_type, left_operand, right_operand);
1166 if (op_code == NE_EXPR)
1167 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1168 else
1169 gcc_assert (op_code == EQ_EXPR);
1171 return result;
1174 modulus = NULL_TREE;
1175 break;
1177 case LSHIFT_EXPR:
1178 case RSHIFT_EXPR:
1179 case LROTATE_EXPR:
1180 case RROTATE_EXPR:
1181 /* The RHS of a shift can be any type. Also, ignore any modulus
1182 (we used to abort, but this is needed for unchecked conversion
1183 to modular types). Otherwise, processing is the same as normal. */
1184 gcc_assert (operation_type == left_base_type);
1185 modulus = NULL_TREE;
1186 left_operand = convert (operation_type, left_operand);
1187 break;
1189 case BIT_AND_EXPR:
1190 case BIT_IOR_EXPR:
1191 case BIT_XOR_EXPR:
1192 /* For binary modulus, if the inputs are in range, so are the
1193 outputs. */
1194 if (modulus && integer_pow2p (modulus))
1195 modulus = NULL_TREE;
1196 goto common;
1198 case COMPLEX_EXPR:
1199 gcc_assert (TREE_TYPE (result_type) == left_base_type
1200 && TREE_TYPE (result_type) == right_base_type);
1201 left_operand = convert (left_base_type, left_operand);
1202 right_operand = convert (right_base_type, right_operand);
1203 break;
1205 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1206 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1207 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1208 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1209 /* These always produce results lower than either operand. */
1210 modulus = NULL_TREE;
1211 goto common;
1213 case POINTER_PLUS_EXPR:
1214 gcc_assert (operation_type == left_base_type
1215 && sizetype == right_base_type);
1216 left_operand = convert (operation_type, left_operand);
1217 right_operand = convert (sizetype, right_operand);
1218 break;
1220 case PLUS_NOMOD_EXPR:
1221 case MINUS_NOMOD_EXPR:
1222 if (op_code == PLUS_NOMOD_EXPR)
1223 op_code = PLUS_EXPR;
1224 else
1225 op_code = MINUS_EXPR;
1226 modulus = NULL_TREE;
1228 /* ... fall through ... */
1230 case PLUS_EXPR:
1231 case MINUS_EXPR:
1232 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1233 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1234 these types but can generate addition/subtraction for Succ/Pred. */
1235 if (operation_type
1236 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1237 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1238 operation_type = left_base_type = right_base_type
1239 = gnat_type_for_mode (TYPE_MODE (operation_type),
1240 TYPE_UNSIGNED (operation_type));
1242 /* ... fall through ... */
1244 default:
1245 common:
1246 /* The result type should be the same as the base types of the
1247 both operands (and they should be the same). Convert
1248 everything to the result type. */
1250 gcc_assert (operation_type == left_base_type
1251 && left_base_type == right_base_type);
1252 left_operand = convert (operation_type, left_operand);
1253 right_operand = convert (operation_type, right_operand);
1256 if (modulus && !integer_pow2p (modulus))
1258 result = nonbinary_modular_operation (op_code, operation_type,
1259 left_operand, right_operand);
1260 modulus = NULL_TREE;
1262 /* If either operand is a NULL_EXPR, just return a new one. */
1263 else if (TREE_CODE (left_operand) == NULL_EXPR)
1264 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1265 else if (TREE_CODE (right_operand) == NULL_EXPR)
1266 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1267 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1268 result = fold (build4 (op_code, operation_type, left_operand,
1269 right_operand, NULL_TREE, NULL_TREE));
1270 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1271 result = build2 (op_code, void_type_node, left_operand, right_operand);
1272 else
1273 result
1274 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1276 if (TREE_CONSTANT (result))
1278 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1280 if (TYPE_VOLATILE (operation_type))
1281 TREE_THIS_VOLATILE (result) = 1;
1283 else
1284 TREE_CONSTANT (result)
1285 |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1287 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1289 /* If we are working with modular types, perform the MOD operation
1290 if something above hasn't eliminated the need for it. */
1291 if (modulus)
1292 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1293 convert (operation_type, modulus));
1295 if (result_type && result_type != operation_type)
1296 result = convert (result_type, result);
1298 return result;
1301 /* Similar, but for unary operations. */
1303 tree
1304 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1306 tree type = TREE_TYPE (operand);
1307 tree base_type = get_base_type (type);
1308 tree operation_type = result_type;
1309 tree result;
1311 if (operation_type
1312 && TREE_CODE (operation_type) == RECORD_TYPE
1313 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1314 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1316 if (operation_type
1317 && TREE_CODE (operation_type) == INTEGER_TYPE
1318 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1319 operation_type = get_base_type (operation_type);
1321 switch (op_code)
1323 case REALPART_EXPR:
1324 case IMAGPART_EXPR:
1325 if (!operation_type)
1326 result_type = operation_type = TREE_TYPE (type);
1327 else
1328 gcc_assert (result_type == TREE_TYPE (type));
1330 result = fold_build1 (op_code, operation_type, operand);
1331 break;
1333 case TRUTH_NOT_EXPR:
1334 gcc_checking_assert
1335 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1336 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1337 /* When not optimizing, fold the result as invert_truthvalue_loc
1338 doesn't fold the result of comparisons. This is intended to undo
1339 the trick used for boolean rvalues in gnat_to_gnu. */
1340 if (!optimize)
1341 result = fold (result);
1342 break;
1344 case ATTR_ADDR_EXPR:
1345 case ADDR_EXPR:
1346 switch (TREE_CODE (operand))
1348 case INDIRECT_REF:
1349 case UNCONSTRAINED_ARRAY_REF:
1350 result = TREE_OPERAND (operand, 0);
1352 /* Make sure the type here is a pointer, not a reference.
1353 GCC wants pointer types for function addresses. */
1354 if (!result_type)
1355 result_type = build_pointer_type (type);
1357 /* If the underlying object can alias everything, propagate the
1358 property since we are effectively retrieving the object. */
1359 if (POINTER_TYPE_P (TREE_TYPE (result))
1360 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1362 if (TREE_CODE (result_type) == POINTER_TYPE
1363 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1364 result_type
1365 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1366 TYPE_MODE (result_type),
1367 true);
1368 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1369 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1370 result_type
1371 = build_reference_type_for_mode (TREE_TYPE (result_type),
1372 TYPE_MODE (result_type),
1373 true);
1375 break;
1377 case NULL_EXPR:
1378 result = operand;
1379 TREE_TYPE (result) = type = build_pointer_type (type);
1380 break;
1382 case COMPOUND_EXPR:
1383 /* Fold a compound expression if it has unconstrained array type
1384 since the middle-end cannot handle it. But we don't it in the
1385 general case because it may introduce aliasing issues if the
1386 first operand is an indirect assignment and the second operand
1387 the corresponding address, e.g. for an allocator. However do
1388 it for a return value to expose it for later recognition. */
1389 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE
1390 || (TREE_CODE (TREE_OPERAND (operand, 1)) == VAR_DECL
1391 && DECL_RETURN_VALUE_P (TREE_OPERAND (operand, 1))))
1393 result = build_unary_op (ADDR_EXPR, result_type,
1394 TREE_OPERAND (operand, 1));
1395 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1396 TREE_OPERAND (operand, 0), result);
1397 break;
1399 goto common;
1401 case ARRAY_REF:
1402 case ARRAY_RANGE_REF:
1403 case COMPONENT_REF:
1404 case BIT_FIELD_REF:
1405 /* If this is for 'Address, find the address of the prefix and add
1406 the offset to the field. Otherwise, do this the normal way. */
1407 if (op_code == ATTR_ADDR_EXPR)
1409 HOST_WIDE_INT bitsize;
1410 HOST_WIDE_INT bitpos;
1411 tree offset, inner;
1412 machine_mode mode;
1413 int unsignedp, reversep, volatilep;
1415 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1416 &mode, &unsignedp, &reversep,
1417 &volatilep, false);
1419 /* If INNER is a padding type whose field has a self-referential
1420 size, convert to that inner type. We know the offset is zero
1421 and we need to have that type visible. */
1422 if (type_is_padding_self_referential (TREE_TYPE (inner)))
1423 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1424 inner);
1426 /* Compute the offset as a byte offset from INNER. */
1427 if (!offset)
1428 offset = size_zero_node;
1430 offset = size_binop (PLUS_EXPR, offset,
1431 size_int (bitpos / BITS_PER_UNIT));
1433 /* Take the address of INNER, convert it to a pointer to our type
1434 and add the offset. */
1435 inner = build_unary_op (ADDR_EXPR,
1436 build_pointer_type (TREE_TYPE (operand)),
1437 inner);
1438 result = build_binary_op (POINTER_PLUS_EXPR, TREE_TYPE (inner),
1439 inner, offset);
1440 break;
1442 goto common;
1444 case CONSTRUCTOR:
1445 /* If this is just a constructor for a padded record, we can
1446 just take the address of the single field and convert it to
1447 a pointer to our type. */
1448 if (TYPE_IS_PADDING_P (type))
1450 result
1451 = build_unary_op (ADDR_EXPR,
1452 build_pointer_type (TREE_TYPE (operand)),
1453 CONSTRUCTOR_ELT (operand, 0)->value);
1454 break;
1456 goto common;
1458 case NOP_EXPR:
1459 if (AGGREGATE_TYPE_P (type)
1460 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1461 return build_unary_op (ADDR_EXPR, result_type,
1462 TREE_OPERAND (operand, 0));
1464 /* ... fallthru ... */
1466 case VIEW_CONVERT_EXPR:
1467 /* If this just a variant conversion or if the conversion doesn't
1468 change the mode, get the result type from this type and go down.
1469 This is needed for conversions of CONST_DECLs, to eventually get
1470 to the address of their CORRESPONDING_VARs. */
1471 if ((TYPE_MAIN_VARIANT (type)
1472 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1473 || (TYPE_MODE (type) != BLKmode
1474 && (TYPE_MODE (type)
1475 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1476 return build_unary_op (ADDR_EXPR,
1477 (result_type ? result_type
1478 : build_pointer_type (type)),
1479 TREE_OPERAND (operand, 0));
1480 goto common;
1482 case CONST_DECL:
1483 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1485 /* ... fall through ... */
1487 default:
1488 common:
1490 /* If we are taking the address of a padded record whose field
1491 contains a template, take the address of the field. */
1492 if (TYPE_IS_PADDING_P (type)
1493 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1494 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1496 type = TREE_TYPE (TYPE_FIELDS (type));
1497 operand = convert (type, operand);
1500 gnat_mark_addressable (operand);
1501 result = build_fold_addr_expr (operand);
1504 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1505 break;
1507 case INDIRECT_REF:
1509 tree t = remove_conversions (operand, false);
1510 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1512 /* If TYPE is a thin pointer, either first retrieve the base if this
1513 is an expression with an offset built for the initialization of an
1514 object with an unconstrained nominal subtype, or else convert to
1515 the fat pointer. */
1516 if (TYPE_IS_THIN_POINTER_P (type))
1518 tree rec_type = TREE_TYPE (type);
1520 if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1521 && TREE_OPERAND (operand, 1)
1522 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1523 && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1525 operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1526 type = TREE_TYPE (operand);
1528 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1530 operand
1531 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1532 operand);
1533 type = TREE_TYPE (operand);
1537 /* If we want to refer to an unconstrained array, use the appropriate
1538 expression. But this will never survive down to the back-end. */
1539 if (TYPE_IS_FAT_POINTER_P (type))
1541 result = build1 (UNCONSTRAINED_ARRAY_REF,
1542 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1543 TREE_READONLY (result)
1544 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1547 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1548 else if (TREE_CODE (operand) == ADDR_EXPR)
1549 result = TREE_OPERAND (operand, 0);
1551 /* Otherwise, build and fold the indirect reference. */
1552 else
1554 result = build_fold_indirect_ref (operand);
1555 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1558 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1560 TREE_SIDE_EFFECTS (result) = 1;
1561 if (TREE_CODE (result) == INDIRECT_REF)
1562 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1565 if ((TREE_CODE (result) == INDIRECT_REF
1566 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1567 && can_never_be_null)
1568 TREE_THIS_NOTRAP (result) = 1;
1570 break;
1573 case NEGATE_EXPR:
1574 case BIT_NOT_EXPR:
1576 tree modulus = ((operation_type
1577 && TREE_CODE (operation_type) == INTEGER_TYPE
1578 && TYPE_MODULAR_P (operation_type))
1579 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1580 int mod_pow2 = modulus && integer_pow2p (modulus);
1582 /* If this is a modular type, there are various possibilities
1583 depending on the operation and whether the modulus is a
1584 power of two or not. */
1586 if (modulus)
1588 gcc_assert (operation_type == base_type);
1589 operand = convert (operation_type, operand);
1591 /* The fastest in the negate case for binary modulus is
1592 the straightforward code; the TRUNC_MOD_EXPR below
1593 is an AND operation. */
1594 if (op_code == NEGATE_EXPR && mod_pow2)
1595 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1596 fold_build1 (NEGATE_EXPR, operation_type,
1597 operand),
1598 modulus);
1600 /* For nonbinary negate case, return zero for zero operand,
1601 else return the modulus minus the operand. If the modulus
1602 is a power of two minus one, we can do the subtraction
1603 as an XOR since it is equivalent and faster on most machines. */
1604 else if (op_code == NEGATE_EXPR && !mod_pow2)
1606 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1607 modulus,
1608 build_int_cst (operation_type,
1609 1))))
1610 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1611 operand, modulus);
1612 else
1613 result = fold_build2 (MINUS_EXPR, operation_type,
1614 modulus, operand);
1616 result = fold_build3 (COND_EXPR, operation_type,
1617 fold_build2 (NE_EXPR,
1618 boolean_type_node,
1619 operand,
1620 build_int_cst
1621 (operation_type, 0)),
1622 result, operand);
1624 else
1626 /* For the NOT cases, we need a constant equal to
1627 the modulus minus one. For a binary modulus, we
1628 XOR against the constant and subtract the operand from
1629 that constant for nonbinary modulus. */
1631 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1632 build_int_cst (operation_type, 1));
1634 if (mod_pow2)
1635 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1636 operand, cnst);
1637 else
1638 result = fold_build2 (MINUS_EXPR, operation_type,
1639 cnst, operand);
1642 break;
1646 /* ... fall through ... */
1648 default:
1649 gcc_assert (operation_type == base_type);
1650 result = fold_build1 (op_code, operation_type,
1651 convert (operation_type, operand));
1654 if (result_type && TREE_TYPE (result) != result_type)
1655 result = convert (result_type, result);
1657 return result;
1660 /* Similar, but for COND_EXPR. */
1662 tree
1663 build_cond_expr (tree result_type, tree condition_operand,
1664 tree true_operand, tree false_operand)
1666 bool addr_p = false;
1667 tree result;
1669 /* The front-end verified that result, true and false operands have
1670 same base type. Convert everything to the result type. */
1671 true_operand = convert (result_type, true_operand);
1672 false_operand = convert (result_type, false_operand);
1674 /* If the result type is unconstrained, take the address of the operands and
1675 then dereference the result. Likewise if the result type is passed by
1676 reference, because creating a temporary of this type is not allowed. */
1677 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1678 || TYPE_IS_BY_REFERENCE_P (result_type)
1679 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1681 result_type = build_pointer_type (result_type);
1682 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1683 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1684 addr_p = true;
1687 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1688 true_operand, false_operand);
1690 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1691 in both arms, make sure it gets evaluated by moving it ahead of the
1692 conditional expression. This is necessary because it is evaluated
1693 in only one place at run time and would otherwise be uninitialized
1694 in one of the arms. */
1695 true_operand = skip_simple_arithmetic (true_operand);
1696 false_operand = skip_simple_arithmetic (false_operand);
1698 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1699 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1701 if (addr_p)
1702 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1704 return result;
1707 /* Similar, but for COMPOUND_EXPR. */
1709 tree
1710 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1712 bool addr_p = false;
1713 tree result;
1715 /* If the result type is unconstrained, take the address of the operand and
1716 then dereference the result. Likewise if the result type is passed by
1717 reference, but this is natively handled in the gimplifier. */
1718 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1719 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1721 result_type = build_pointer_type (result_type);
1722 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1723 addr_p = true;
1726 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1727 expr_operand);
1729 if (addr_p)
1730 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1732 return result;
1735 /* Conveniently construct a function call expression. FNDECL names the
1736 function to be called, N is the number of arguments, and the "..."
1737 parameters are the argument expressions. Unlike build_call_expr
1738 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1740 tree
1741 build_call_n_expr (tree fndecl, int n, ...)
1743 va_list ap;
1744 tree fntype = TREE_TYPE (fndecl);
1745 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1747 va_start (ap, n);
1748 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1749 va_end (ap);
1750 return fn;
1753 /* Build a goto to LABEL for a raise, with an optional call to Local_Raise.
1754 MSG gives the exception's identity for the call to Local_Raise, if any. */
1756 static tree
1757 build_goto_raise (tree label, int msg)
1759 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1760 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1762 /* If Local_Raise is present, build Local_Raise (Exception'Identity). */
1763 if (Present (local_raise))
1765 tree gnu_local_raise
1766 = gnat_to_gnu_entity (local_raise, NULL_TREE, false);
1767 tree gnu_exception_entity
1768 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, false);
1769 tree gnu_call
1770 = build_call_n_expr (gnu_local_raise, 1,
1771 build_unary_op (ADDR_EXPR, NULL_TREE,
1772 gnu_exception_entity));
1773 gnu_result
1774 = build2 (COMPOUND_EXPR, void_type_node, gnu_call, gnu_result);
1777 return gnu_result;
1780 /* Expand the SLOC of GNAT_NODE, if present, into tree location information
1781 pointed to by FILENAME, LINE and COL. Fall back to the current location
1782 if GNAT_NODE is absent or has no SLOC. */
1784 static void
1785 expand_sloc (Node_Id gnat_node, tree *filename, tree *line, tree *col)
1787 const char *str;
1788 int line_number, column_number;
1790 if (Debug_Flag_NN || Exception_Locations_Suppressed)
1792 str = "";
1793 line_number = 0;
1794 column_number = 0;
1796 else if (Present (gnat_node) && Sloc (gnat_node) != No_Location)
1798 str = Get_Name_String
1799 (Debug_Source_Name (Get_Source_File_Index (Sloc (gnat_node))));
1800 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1801 column_number = Get_Column_Number (Sloc (gnat_node));
1803 else
1805 str = lbasename (LOCATION_FILE (input_location));
1806 line_number = LOCATION_LINE (input_location);
1807 column_number = LOCATION_COLUMN (input_location);
1810 const int len = strlen (str);
1811 *filename = build_string (len, str);
1812 TREE_TYPE (*filename) = build_array_type (char_type_node,
1813 build_index_type (size_int (len)));
1814 *line = build_int_cst (NULL_TREE, line_number);
1815 if (col)
1816 *col = build_int_cst (NULL_TREE, column_number);
1819 /* Build a call to a function that raises an exception and passes file name
1820 and line number, if requested. MSG says which exception function to call.
1821 GNAT_NODE is the node conveying the source location for which the error
1822 should be signaled, or Empty in which case the error is signaled for the
1823 current location. KIND says which kind of exception node this is for,
1824 among N_Raise_{Constraint,Storage,Program}_Error. */
1826 tree
1827 build_call_raise (int msg, Node_Id gnat_node, char kind)
1829 tree fndecl = gnat_raise_decls[msg];
1830 tree label = get_exception_label (kind);
1831 tree filename, line;
1833 /* If this is to be done as a goto, handle that case. */
1834 if (label)
1835 return build_goto_raise (label, msg);
1837 expand_sloc (gnat_node, &filename, &line, NULL);
1839 return
1840 build_call_n_expr (fndecl, 2,
1841 build1 (ADDR_EXPR,
1842 build_pointer_type (char_type_node),
1843 filename),
1844 line);
1847 /* Similar to build_call_raise, with extra information about the column
1848 where the check failed. */
1850 tree
1851 build_call_raise_column (int msg, Node_Id gnat_node, char kind)
1853 tree fndecl = gnat_raise_decls_ext[msg];
1854 tree label = get_exception_label (kind);
1855 tree filename, line, col;
1857 /* If this is to be done as a goto, handle that case. */
1858 if (label)
1859 return build_goto_raise (label, msg);
1861 expand_sloc (gnat_node, &filename, &line, &col);
1863 return
1864 build_call_n_expr (fndecl, 3,
1865 build1 (ADDR_EXPR,
1866 build_pointer_type (char_type_node),
1867 filename),
1868 line, col);
1871 /* Similar to build_call_raise_column, for an index or range check exception ,
1872 with extra information of the form "INDEX out of range FIRST..LAST". */
1874 tree
1875 build_call_raise_range (int msg, Node_Id gnat_node, char kind,
1876 tree index, tree first, tree last)
1878 tree fndecl = gnat_raise_decls_ext[msg];
1879 tree label = get_exception_label (kind);
1880 tree filename, line, col;
1882 /* If this is to be done as a goto, handle that case. */
1883 if (label)
1884 return build_goto_raise (label, msg);
1886 expand_sloc (gnat_node, &filename, &line, &col);
1888 return
1889 build_call_n_expr (fndecl, 6,
1890 build1 (ADDR_EXPR,
1891 build_pointer_type (char_type_node),
1892 filename),
1893 line, col,
1894 convert (integer_type_node, index),
1895 convert (integer_type_node, first),
1896 convert (integer_type_node, last));
1899 /* qsort comparer for the bit positions of two constructor elements
1900 for record components. */
1902 static int
1903 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1905 const constructor_elt * const elmt1 = (const constructor_elt * const) rt1;
1906 const constructor_elt * const elmt2 = (const constructor_elt * const) rt2;
1907 const_tree const field1 = elmt1->index;
1908 const_tree const field2 = elmt2->index;
1909 const int ret
1910 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1912 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1915 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1917 tree
1918 gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
1920 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1921 bool read_only = true;
1922 bool side_effects = false;
1923 tree result, obj, val;
1924 unsigned int n_elmts;
1926 /* Scan the elements to see if they are all constant or if any has side
1927 effects, to let us set global flags on the resulting constructor. Count
1928 the elements along the way for possible sorting purposes below. */
1929 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1931 /* The predicate must be in keeping with output_constructor. */
1932 if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1933 || (TREE_CODE (type) == RECORD_TYPE
1934 && CONSTRUCTOR_BITFIELD_P (obj)
1935 && !initializer_constant_valid_for_bitfield_p (val))
1936 || !initializer_constant_valid_p (val,
1937 TREE_TYPE (val),
1938 TYPE_REVERSE_STORAGE_ORDER (type)))
1939 allconstant = false;
1941 if (!TREE_READONLY (val))
1942 read_only = false;
1944 if (TREE_SIDE_EFFECTS (val))
1945 side_effects = true;
1948 /* For record types with constant components only, sort field list
1949 by increasing bit position. This is necessary to ensure the
1950 constructor can be output as static data. */
1951 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1952 v->qsort (compare_elmt_bitpos);
1954 result = build_constructor (type, v);
1955 CONSTRUCTOR_NO_CLEARING (result) = 1;
1956 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1957 TREE_SIDE_EFFECTS (result) = side_effects;
1958 TREE_READONLY (result) = TYPE_READONLY (type) || read_only || allconstant;
1959 return result;
1962 /* Return a COMPONENT_REF to access FIELD in RECORD, or NULL_TREE if the field
1963 is not found in the record. Don't fold the result if NO_FOLD is true. */
1965 static tree
1966 build_simple_component_ref (tree record, tree field, bool no_fold)
1968 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (record));
1969 tree ref;
1971 gcc_assert (RECORD_OR_UNION_TYPE_P (type) && COMPLETE_TYPE_P (type));
1973 /* Try to fold a conversion from another record or union type unless the type
1974 contains a placeholder as it might be needed for a later substitution. */
1975 if (TREE_CODE (record) == VIEW_CONVERT_EXPR
1976 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (record, 0)))
1977 && !type_contains_placeholder_p (type))
1979 tree op = TREE_OPERAND (record, 0);
1981 /* If this is an unpadding operation, convert the underlying object to
1982 the unpadded type directly. */
1983 if (TYPE_IS_PADDING_P (type) && field == TYPE_FIELDS (type))
1984 return convert (TREE_TYPE (field), op);
1986 /* Otherwise try to access FIELD directly in the underlying type, but
1987 make sure that the form of the reference doesn't change too much;
1988 this can happen for an unconstrained bit-packed array type whose
1989 constrained form can be an integer type. */
1990 ref = build_simple_component_ref (op, field, no_fold);
1991 if (ref && TREE_CODE (TREE_TYPE (ref)) == TREE_CODE (TREE_TYPE (field)))
1992 return ref;
1995 /* If this field is not in the specified record, see if we can find a field
1996 in the specified record whose original field is the same as this one. */
1997 if (DECL_CONTEXT (field) != type)
1999 tree new_field;
2001 /* First loop through normal components. */
2002 for (new_field = TYPE_FIELDS (type);
2003 new_field;
2004 new_field = DECL_CHAIN (new_field))
2005 if (SAME_FIELD_P (field, new_field))
2006 break;
2008 /* Next, loop through DECL_INTERNAL_P components if we haven't found the
2009 component in the first search. Doing this search in two steps is
2010 required to avoid hidden homonymous fields in the _Parent field. */
2011 if (!new_field)
2012 for (new_field = TYPE_FIELDS (type);
2013 new_field;
2014 new_field = DECL_CHAIN (new_field))
2015 if (DECL_INTERNAL_P (new_field)
2016 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (new_field)))
2018 tree field_ref
2019 = build_simple_component_ref (record, new_field, no_fold);
2020 ref = build_simple_component_ref (field_ref, field, no_fold);
2021 if (ref)
2022 return ref;
2025 field = new_field;
2028 if (!field)
2029 return NULL_TREE;
2031 /* If the field's offset has overflowed, do not try to access it, as doing
2032 so may trigger sanity checks deeper in the back-end. Note that we don't
2033 need to warn since this will be done on trying to declare the object. */
2034 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
2035 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
2036 return NULL_TREE;
2038 ref = build3 (COMPONENT_REF, TREE_TYPE (field), record, field, NULL_TREE);
2040 if (TREE_READONLY (record)
2041 || TREE_READONLY (field)
2042 || TYPE_READONLY (type))
2043 TREE_READONLY (ref) = 1;
2045 if (TREE_THIS_VOLATILE (record)
2046 || TREE_THIS_VOLATILE (field)
2047 || TYPE_VOLATILE (type))
2048 TREE_THIS_VOLATILE (ref) = 1;
2050 if (no_fold)
2051 return ref;
2053 /* The generic folder may punt in this case because the inner array type
2054 can be self-referential, but folding is in fact not problematic. */
2055 if (TREE_CODE (record) == CONSTRUCTOR
2056 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record)))
2058 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (record);
2059 unsigned HOST_WIDE_INT idx;
2060 tree index, value;
2061 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
2062 if (index == field)
2063 return value;
2064 return ref;
2067 return fold (ref);
2070 /* Likewise, but return NULL_EXPR and generate a Constraint_Error if the
2071 field is not found in the record. */
2073 tree
2074 build_component_ref (tree record, tree field, bool no_fold)
2076 tree ref = build_simple_component_ref (record, field, no_fold);
2077 if (ref)
2078 return ref;
2080 /* Assume this is an invalid user field so raise Constraint_Error. */
2081 return build1 (NULL_EXPR, TREE_TYPE (field),
2082 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2083 N_Raise_Constraint_Error));
2086 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2087 identically. Process the case where a GNAT_PROC to call is provided. */
2089 static inline tree
2090 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2091 Entity_Id gnat_proc, Entity_Id gnat_pool)
2093 tree gnu_proc = gnat_to_gnu (gnat_proc);
2094 tree gnu_call;
2096 /* A storage pool's underlying type is a record type (for both predefined
2097 storage pools and GNAT simple storage pools). The secondary stack uses
2098 the same mechanism, but its pool object (SS_Pool) is an integer. */
2099 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2101 /* The size is the third parameter; the alignment is the
2102 same type. */
2103 Entity_Id gnat_size_type
2104 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2105 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2107 tree gnu_pool = gnat_to_gnu (gnat_pool);
2108 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2109 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2111 gnu_size = convert (gnu_size_type, gnu_size);
2112 gnu_align = convert (gnu_size_type, gnu_align);
2114 /* The first arg is always the address of the storage pool; next
2115 comes the address of the object, for a deallocator, then the
2116 size and alignment. */
2117 if (gnu_obj)
2118 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2119 gnu_size, gnu_align);
2120 else
2121 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2122 gnu_size, gnu_align);
2125 /* Secondary stack case. */
2126 else
2128 /* The size is the second parameter. */
2129 Entity_Id gnat_size_type
2130 = Etype (Next_Formal (First_Formal (gnat_proc)));
2131 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2133 gnu_size = convert (gnu_size_type, gnu_size);
2135 /* The first arg is the address of the object, for a deallocator,
2136 then the size. */
2137 if (gnu_obj)
2138 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2139 else
2140 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2143 return gnu_call;
2146 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2147 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2148 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2149 latter offers. */
2151 static inline tree
2152 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2154 /* When the DATA_TYPE alignment is stricter than what malloc offers
2155 (super-aligned case), we allocate an "aligning" wrapper type and return
2156 the address of its single data field with the malloc's return value
2157 stored just in front. */
2159 unsigned int data_align = TYPE_ALIGN (data_type);
2160 unsigned int system_allocator_alignment
2161 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2163 tree aligning_type
2164 = ((data_align > system_allocator_alignment)
2165 ? make_aligning_type (data_type, data_align, data_size,
2166 system_allocator_alignment,
2167 POINTER_SIZE / BITS_PER_UNIT,
2168 gnat_node)
2169 : NULL_TREE);
2171 tree size_to_malloc
2172 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2174 tree malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2176 if (aligning_type)
2178 /* Latch malloc's return value and get a pointer to the aligning field
2179 first. */
2180 tree storage_ptr = gnat_protect_expr (malloc_ptr);
2182 tree aligning_record_addr
2183 = convert (build_pointer_type (aligning_type), storage_ptr);
2185 tree aligning_record
2186 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2188 tree aligning_field
2189 = build_component_ref (aligning_record, TYPE_FIELDS (aligning_type),
2190 false);
2192 tree aligning_field_addr
2193 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2195 /* Then arrange to store the allocator's return value ahead
2196 and return. */
2197 tree storage_ptr_slot_addr
2198 = build_binary_op (POINTER_PLUS_EXPR, ptr_type_node,
2199 convert (ptr_type_node, aligning_field_addr),
2200 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2201 / BITS_PER_UNIT));
2203 tree storage_ptr_slot
2204 = build_unary_op (INDIRECT_REF, NULL_TREE,
2205 convert (build_pointer_type (ptr_type_node),
2206 storage_ptr_slot_addr));
2208 return
2209 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2210 build_binary_op (INIT_EXPR, NULL_TREE,
2211 storage_ptr_slot, storage_ptr),
2212 aligning_field_addr);
2214 else
2215 return malloc_ptr;
2218 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2219 designated by DATA_PTR using the __gnat_free entry point. */
2221 static inline tree
2222 maybe_wrap_free (tree data_ptr, tree data_type)
2224 /* In the regular alignment case, we pass the data pointer straight to free.
2225 In the superaligned case, we need to retrieve the initial allocator
2226 return value, stored in front of the data block at allocation time. */
2228 unsigned int data_align = TYPE_ALIGN (data_type);
2229 unsigned int system_allocator_alignment
2230 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2232 tree free_ptr;
2234 if (data_align > system_allocator_alignment)
2236 /* DATA_FRONT_PTR (void *)
2237 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2238 tree data_front_ptr
2239 = build_binary_op
2240 (POINTER_PLUS_EXPR, ptr_type_node,
2241 convert (ptr_type_node, data_ptr),
2242 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2244 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2245 free_ptr
2246 = build_unary_op
2247 (INDIRECT_REF, NULL_TREE,
2248 convert (build_pointer_type (ptr_type_node), data_front_ptr));
2250 else
2251 free_ptr = data_ptr;
2253 return build_call_n_expr (free_decl, 1, free_ptr);
2256 /* Build a GCC tree to call an allocation or deallocation function.
2257 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2258 generate an allocator.
2260 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2261 object type, used to determine the to-be-honored address alignment.
2262 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2263 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2264 to provide an error location for restriction violation messages. */
2266 tree
2267 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2268 Entity_Id gnat_proc, Entity_Id gnat_pool,
2269 Node_Id gnat_node)
2271 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
2273 /* Explicit proc to call ? This one is assumed to deal with the type
2274 alignment constraints. */
2275 if (Present (gnat_proc))
2276 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2277 gnat_proc, gnat_pool);
2279 /* Otherwise, object to "free" or "malloc" with possible special processing
2280 for alignments stricter than what the default allocator honors. */
2281 else if (gnu_obj)
2282 return maybe_wrap_free (gnu_obj, gnu_type);
2283 else
2285 /* Assert that we no longer can be called with this special pool. */
2286 gcc_assert (gnat_pool != -1);
2288 /* Check that we aren't violating the associated restriction. */
2289 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2291 Check_No_Implicit_Heap_Alloc (gnat_node);
2292 if (Has_Task (Etype (gnat_node)))
2293 Check_No_Implicit_Task_Alloc (gnat_node);
2294 if (Has_Protected (Etype (gnat_node)))
2295 Check_No_Implicit_Protected_Alloc (gnat_node);
2297 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2301 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2302 initial value is INIT, if INIT is nonzero. Convert the expression to
2303 RESULT_TYPE, which must be some pointer type, and return the result.
2305 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2306 the storage pool to use. GNAT_NODE is used to provide an error
2307 location for restriction violation messages. If IGNORE_INIT_TYPE is
2308 true, ignore the type of INIT for the purpose of determining the size;
2309 this will cause the maximum size to be allocated if TYPE is of
2310 self-referential size. */
2312 tree
2313 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2314 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2316 tree size, storage, storage_deref, storage_init;
2318 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2319 if (init && TREE_CODE (init) == NULL_EXPR)
2320 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2322 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2323 else if (init && TREE_CODE (init) == COND_EXPR)
2324 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2325 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2326 gnat_proc, gnat_pool, gnat_node,
2327 ignore_init_type),
2328 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2329 gnat_proc, gnat_pool, gnat_node,
2330 ignore_init_type));
2332 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2333 sizes of the object and its template. Allocate the whole thing and
2334 fill in the parts that are known. */
2335 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2337 tree storage_type
2338 = build_unc_object_type_from_ptr (result_type, type,
2339 get_identifier ("ALLOC"), false);
2340 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2341 tree storage_ptr_type = build_pointer_type (storage_type);
2343 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2344 init);
2346 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2347 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2348 size = size_int (-1);
2350 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2351 gnat_proc, gnat_pool, gnat_node);
2352 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2353 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2354 TREE_THIS_NOTRAP (storage_deref) = 1;
2356 /* If there is an initializing expression, then make a constructor for
2357 the entire object including the bounds and copy it into the object.
2358 If there is no initializing expression, just set the bounds. */
2359 if (init)
2361 vec<constructor_elt, va_gc> *v;
2362 vec_alloc (v, 2);
2364 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2365 build_template (template_type, type, init));
2366 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2367 init);
2368 storage_init
2369 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2370 gnat_build_constructor (storage_type, v));
2372 else
2373 storage_init
2374 = build_binary_op (INIT_EXPR, NULL_TREE,
2375 build_component_ref (storage_deref,
2376 TYPE_FIELDS (storage_type),
2377 false),
2378 build_template (template_type, type, NULL_TREE));
2380 return build2 (COMPOUND_EXPR, result_type,
2381 storage_init, convert (result_type, storage));
2384 size = TYPE_SIZE_UNIT (type);
2386 /* If we have an initializing expression, see if its size is simpler
2387 than the size from the type. */
2388 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2389 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2390 || CONTAINS_PLACEHOLDER_P (size)))
2391 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2393 /* If the size is still self-referential, reference the initializing
2394 expression, if it is present. If not, this must have been a
2395 call to allocate a library-level object, in which case we use
2396 the maximum size. */
2397 if (CONTAINS_PLACEHOLDER_P (size))
2399 if (!ignore_init_type && init)
2400 size = substitute_placeholder_in_expr (size, init);
2401 else
2402 size = max_size (size, true);
2405 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2406 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2407 size = size_int (-1);
2409 storage = convert (result_type,
2410 build_call_alloc_dealloc (NULL_TREE, size, type,
2411 gnat_proc, gnat_pool,
2412 gnat_node));
2414 /* If we have an initial value, protect the new address, assign the value
2415 and return the address with a COMPOUND_EXPR. */
2416 if (init)
2418 storage = gnat_protect_expr (storage);
2419 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2420 TREE_THIS_NOTRAP (storage_deref) = 1;
2421 storage_init
2422 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2423 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2426 return storage;
2429 /* Indicate that we need to take the address of T and that it therefore
2430 should not be allocated in a register. Return true if successful. */
2432 bool
2433 gnat_mark_addressable (tree t)
2435 while (true)
2436 switch (TREE_CODE (t))
2438 case ADDR_EXPR:
2439 case COMPONENT_REF:
2440 case ARRAY_REF:
2441 case ARRAY_RANGE_REF:
2442 case REALPART_EXPR:
2443 case IMAGPART_EXPR:
2444 case VIEW_CONVERT_EXPR:
2445 case NON_LVALUE_EXPR:
2446 CASE_CONVERT:
2447 t = TREE_OPERAND (t, 0);
2448 break;
2450 case COMPOUND_EXPR:
2451 t = TREE_OPERAND (t, 1);
2452 break;
2454 case CONSTRUCTOR:
2455 TREE_ADDRESSABLE (t) = 1;
2456 return true;
2458 case VAR_DECL:
2459 case PARM_DECL:
2460 case RESULT_DECL:
2461 TREE_ADDRESSABLE (t) = 1;
2462 return true;
2464 case FUNCTION_DECL:
2465 TREE_ADDRESSABLE (t) = 1;
2466 return true;
2468 case CONST_DECL:
2469 return DECL_CONST_CORRESPONDING_VAR (t)
2470 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2472 default:
2473 return true;
2477 /* Return true if EXP is a stable expression for the purpose of the functions
2478 below and, therefore, can be returned unmodified by them. We accept things
2479 that are actual constants or that have already been handled. */
2481 static bool
2482 gnat_stable_expr_p (tree exp)
2484 enum tree_code code = TREE_CODE (exp);
2485 return TREE_CONSTANT (exp) || code == NULL_EXPR || code == SAVE_EXPR;
2488 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2489 but we know how to handle our own nodes. */
2491 tree
2492 gnat_save_expr (tree exp)
2494 tree type = TREE_TYPE (exp);
2495 enum tree_code code = TREE_CODE (exp);
2497 if (gnat_stable_expr_p (exp))
2498 return exp;
2500 if (code == UNCONSTRAINED_ARRAY_REF)
2502 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2503 TREE_READONLY (t) = TYPE_READONLY (type);
2504 return t;
2507 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2508 This may be more efficient, but will also allow us to more easily find
2509 the match for the PLACEHOLDER_EXPR. */
2510 if (code == COMPONENT_REF
2511 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2512 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2513 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2515 return save_expr (exp);
2518 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2519 is optimized under the assumption that EXP's value doesn't change before
2520 its subsequent reuse(s) except through its potential reevaluation. */
2522 tree
2523 gnat_protect_expr (tree exp)
2525 tree type = TREE_TYPE (exp);
2526 enum tree_code code = TREE_CODE (exp);
2528 if (gnat_stable_expr_p (exp))
2529 return exp;
2531 /* If EXP has no side effects, we theoretically don't need to do anything.
2532 However, we may be recursively passed more and more complex expressions
2533 involving checks which will be reused multiple times and eventually be
2534 unshared for gimplification; in order to avoid a complexity explosion
2535 at that point, we protect any expressions more complex than a simple
2536 arithmetic expression. */
2537 if (!TREE_SIDE_EFFECTS (exp))
2539 tree inner = skip_simple_arithmetic (exp);
2540 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2541 return exp;
2544 /* If this is a conversion, protect what's inside the conversion. */
2545 if (code == NON_LVALUE_EXPR
2546 || CONVERT_EXPR_CODE_P (code)
2547 || code == VIEW_CONVERT_EXPR)
2548 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2550 /* If we're indirectly referencing something, we only need to protect the
2551 address since the data itself can't change in these situations. */
2552 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2554 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2555 TREE_READONLY (t) = TYPE_READONLY (type);
2556 return t;
2559 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2560 This may be more efficient, but will also allow us to more easily find
2561 the match for the PLACEHOLDER_EXPR. */
2562 if (code == COMPONENT_REF
2563 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2564 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2565 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2567 /* If this is a fat pointer or a scalar, just make a SAVE_EXPR. Likewise
2568 for a CALL_EXPR as large objects are returned via invisible reference
2569 in most ABIs so the temporary will directly be filled by the callee. */
2570 if (TYPE_IS_FAT_POINTER_P (type)
2571 || !AGGREGATE_TYPE_P (type)
2572 || code == CALL_EXPR)
2573 return save_expr (exp);
2575 /* Otherwise reference, protect the address and dereference. */
2576 return
2577 build_unary_op (INDIRECT_REF, type,
2578 save_expr (build_unary_op (ADDR_EXPR,
2579 build_reference_type (type),
2580 exp)));
2583 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2584 argument to force evaluation of everything. */
2586 static tree
2587 gnat_stabilize_reference_1 (tree e, void *data)
2589 const bool force = *(bool *)data;
2590 enum tree_code code = TREE_CODE (e);
2591 tree type = TREE_TYPE (e);
2592 tree result;
2594 if (gnat_stable_expr_p (e))
2595 return e;
2597 switch (TREE_CODE_CLASS (code))
2599 case tcc_exceptional:
2600 case tcc_declaration:
2601 case tcc_comparison:
2602 case tcc_expression:
2603 case tcc_reference:
2604 case tcc_vl_exp:
2605 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2606 fat pointer. This may be more efficient, but will also allow
2607 us to more easily find the match for the PLACEHOLDER_EXPR. */
2608 if (code == COMPONENT_REF
2609 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2610 result
2611 = build3 (code, type,
2612 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2613 TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2614 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2615 so that it will only be evaluated once. */
2616 /* The tcc_reference and tcc_comparison classes could be handled as
2617 below, but it is generally faster to only evaluate them once. */
2618 else if (TREE_SIDE_EFFECTS (e) || force)
2619 return save_expr (e);
2620 else
2621 return e;
2622 break;
2624 case tcc_binary:
2625 /* Recursively stabilize each operand. */
2626 result
2627 = build2 (code, type,
2628 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2629 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), data));
2630 break;
2632 case tcc_unary:
2633 /* Recursively stabilize each operand. */
2634 result
2635 = build1 (code, type,
2636 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data));
2637 break;
2639 default:
2640 gcc_unreachable ();
2643 TREE_READONLY (result) = TREE_READONLY (e);
2644 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2645 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2647 return result;
2650 /* This is equivalent to stabilize_reference in tree.c but we know how to
2651 handle our own nodes and we take extra arguments. FORCE says whether to
2652 force evaluation of everything in REF. INIT is set to the first arm of
2653 a COMPOUND_EXPR present in REF, if any. */
2655 tree
2656 gnat_stabilize_reference (tree ref, bool force, tree *init)
2658 return
2659 gnat_rewrite_reference (ref, gnat_stabilize_reference_1, &force, init);
2662 /* Rewrite reference REF and call FUNC on each expression within REF in the
2663 process. DATA is passed unmodified to FUNC. INIT is set to the first
2664 arm of a COMPOUND_EXPR present in REF, if any. */
2666 tree
2667 gnat_rewrite_reference (tree ref, rewrite_fn func, void *data, tree *init)
2669 tree type = TREE_TYPE (ref);
2670 enum tree_code code = TREE_CODE (ref);
2671 tree result;
2673 switch (code)
2675 case CONST_DECL:
2676 case VAR_DECL:
2677 case PARM_DECL:
2678 case RESULT_DECL:
2679 /* No action is needed in this case. */
2680 return ref;
2682 CASE_CONVERT:
2683 case FLOAT_EXPR:
2684 case FIX_TRUNC_EXPR:
2685 case REALPART_EXPR:
2686 case IMAGPART_EXPR:
2687 case VIEW_CONVERT_EXPR:
2688 result
2689 = build1 (code, type,
2690 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2691 init));
2692 break;
2694 case INDIRECT_REF:
2695 case UNCONSTRAINED_ARRAY_REF:
2696 result = build1 (code, type, func (TREE_OPERAND (ref, 0), data));
2697 break;
2699 case COMPONENT_REF:
2700 result = build3 (COMPONENT_REF, type,
2701 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2702 data, init),
2703 TREE_OPERAND (ref, 1), NULL_TREE);
2704 break;
2706 case BIT_FIELD_REF:
2707 result = build3 (BIT_FIELD_REF, type,
2708 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2709 data, init),
2710 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
2711 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
2712 break;
2714 case ARRAY_REF:
2715 case ARRAY_RANGE_REF:
2716 result
2717 = build4 (code, type,
2718 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2719 init),
2720 func (TREE_OPERAND (ref, 1), data),
2721 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
2722 break;
2724 case COMPOUND_EXPR:
2725 gcc_assert (!*init);
2726 *init = TREE_OPERAND (ref, 0);
2727 /* We expect only the pattern built in Call_to_gnu. */
2728 gcc_assert (DECL_P (TREE_OPERAND (ref, 1))
2729 || (TREE_CODE (TREE_OPERAND (ref, 1)) == COMPONENT_REF
2730 && DECL_P (TREE_OPERAND (TREE_OPERAND (ref, 1), 0))));
2731 return TREE_OPERAND (ref, 1);
2733 case CALL_EXPR:
2735 /* This can only be an atomic load. */
2736 gcc_assert (call_is_atomic_load (ref));
2738 /* An atomic load is an INDIRECT_REF of its first argument. */
2739 tree t = CALL_EXPR_ARG (ref, 0);
2740 if (TREE_CODE (t) == NOP_EXPR)
2741 t = TREE_OPERAND (t, 0);
2742 if (TREE_CODE (t) == ADDR_EXPR)
2743 t = build1 (ADDR_EXPR, TREE_TYPE (t),
2744 gnat_rewrite_reference (TREE_OPERAND (t, 0), func, data,
2745 init));
2746 else
2747 t = func (t, data);
2748 t = fold_convert (TREE_TYPE (CALL_EXPR_ARG (ref, 0)), t);
2750 result = build_call_expr (TREE_OPERAND (CALL_EXPR_FN (ref), 0), 2,
2751 t, CALL_EXPR_ARG (ref, 1));
2753 break;
2755 case ERROR_MARK:
2756 case NULL_EXPR:
2757 return ref;
2759 default:
2760 gcc_unreachable ();
2763 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2764 may not be sustained across some paths, such as the way via build1 for
2765 INDIRECT_REF. We reset those flags here in the general case, which is
2766 consistent with the GCC version of this routine.
2768 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2769 paths introduce side-effects where there was none initially (e.g. if a
2770 SAVE_EXPR is built) and we also want to keep track of that. */
2771 TREE_READONLY (result) = TREE_READONLY (ref);
2772 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2773 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2775 if (code == INDIRECT_REF
2776 || code == UNCONSTRAINED_ARRAY_REF
2777 || code == ARRAY_REF
2778 || code == ARRAY_RANGE_REF)
2779 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2781 return result;
2784 /* This is equivalent to get_inner_reference in expr.c but it returns the
2785 ultimate containing object only if the reference (lvalue) is constant,
2786 i.e. if it doesn't depend on the context in which it is evaluated. */
2788 tree
2789 get_inner_constant_reference (tree exp)
2791 while (true)
2793 switch (TREE_CODE (exp))
2795 case BIT_FIELD_REF:
2796 break;
2798 case COMPONENT_REF:
2799 if (TREE_OPERAND (exp, 2))
2800 return NULL_TREE;
2802 if (!TREE_CONSTANT (DECL_FIELD_OFFSET (TREE_OPERAND (exp, 1))))
2803 return NULL_TREE;
2804 break;
2806 case ARRAY_REF:
2807 case ARRAY_RANGE_REF:
2809 if (TREE_OPERAND (exp, 2) || TREE_OPERAND (exp, 3))
2810 return NULL_TREE;
2812 tree array_type = TREE_TYPE (TREE_OPERAND (exp, 0));
2813 if (!TREE_CONSTANT (TREE_OPERAND (exp, 1))
2814 || !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
2815 || !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (array_type))))
2816 return NULL_TREE;
2818 break;
2820 case REALPART_EXPR:
2821 case IMAGPART_EXPR:
2822 case VIEW_CONVERT_EXPR:
2823 break;
2825 default:
2826 goto done;
2829 exp = TREE_OPERAND (exp, 0);
2832 done:
2833 return exp;
2836 /* Return true if EXPR is the addition or the subtraction of a constant and,
2837 if so, set *ADD to the addend, *CST to the constant and *MINUS_P to true
2838 if this is a subtraction. */
2840 bool
2841 is_simple_additive_expression (tree expr, tree *add, tree *cst, bool *minus_p)
2843 /* Skip overflow checks. */
2844 if (TREE_CODE (expr) == COND_EXPR
2845 && TREE_CODE (COND_EXPR_THEN (expr)) == COMPOUND_EXPR
2846 && TREE_CODE (TREE_OPERAND (COND_EXPR_THEN (expr), 0)) == CALL_EXPR
2847 && get_callee_fndecl (TREE_OPERAND (COND_EXPR_THEN (expr), 0))
2848 == gnat_raise_decls[CE_Overflow_Check_Failed])
2849 expr = COND_EXPR_ELSE (expr);
2851 if (TREE_CODE (expr) == PLUS_EXPR)
2853 if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
2855 *add = TREE_OPERAND (expr, 1);
2856 *cst = TREE_OPERAND (expr, 0);
2857 *minus_p = false;
2858 return true;
2860 else if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
2862 *add = TREE_OPERAND (expr, 0);
2863 *cst = TREE_OPERAND (expr, 1);
2864 *minus_p = false;
2865 return true;
2868 else if (TREE_CODE (expr) == MINUS_EXPR)
2870 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
2872 *add = TREE_OPERAND (expr, 0);
2873 *cst = TREE_OPERAND (expr, 1);
2874 *minus_p = true;
2875 return true;
2879 return false;
2882 /* If EXPR is an expression that is invariant in the current function, in the
2883 sense that it can be evaluated anywhere in the function and any number of
2884 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2886 tree
2887 gnat_invariant_expr (tree expr)
2889 const tree type = TREE_TYPE (expr);
2890 tree add, cst;
2891 bool minus_p;
2893 expr = remove_conversions (expr, false);
2895 /* Look through temporaries created to capture values. */
2896 while ((TREE_CODE (expr) == CONST_DECL
2897 || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2898 && decl_function_context (expr) == current_function_decl
2899 && DECL_INITIAL (expr))
2901 expr = DECL_INITIAL (expr);
2902 /* Look into CONSTRUCTORs built to initialize padded types. */
2903 if (TYPE_IS_PADDING_P (TREE_TYPE (expr)))
2904 expr = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (expr))), expr);
2905 expr = remove_conversions (expr, false);
2908 /* We are only interested in scalar types at the moment and, even if we may
2909 have gone through padding types in the above loop, we must be back to a
2910 scalar value at this point. */
2911 if (AGGREGATE_TYPE_P (TREE_TYPE (expr)))
2912 return NULL_TREE;
2914 if (TREE_CONSTANT (expr))
2915 return fold_convert (type, expr);
2917 /* Deal with addition or subtraction of constants. */
2918 if (is_simple_additive_expression (expr, &add, &cst, &minus_p))
2920 add = gnat_invariant_expr (add);
2921 if (add)
2922 return
2923 fold_build2 (minus_p ? MINUS_EXPR : PLUS_EXPR, type,
2924 fold_convert (type, add), fold_convert (type, cst));
2925 else
2926 return NULL_TREE;
2929 bool invariant_p = false;
2930 tree t = expr;
2932 while (true)
2934 switch (TREE_CODE (t))
2936 case COMPONENT_REF:
2937 if (TREE_OPERAND (t, 2))
2938 return NULL_TREE;
2939 invariant_p |= DECL_INVARIANT_P (TREE_OPERAND (t, 1));
2940 break;
2942 case ARRAY_REF:
2943 case ARRAY_RANGE_REF:
2944 if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
2945 || TREE_OPERAND (t, 2)
2946 || TREE_OPERAND (t, 3))
2947 return NULL_TREE;
2948 break;
2950 case BIT_FIELD_REF:
2951 case REALPART_EXPR:
2952 case IMAGPART_EXPR:
2953 case VIEW_CONVERT_EXPR:
2954 CASE_CONVERT:
2955 break;
2957 case INDIRECT_REF:
2958 if ((!invariant_p && !TREE_READONLY (t)) || TREE_SIDE_EFFECTS (t))
2959 return NULL_TREE;
2960 invariant_p = false;
2961 break;
2963 default:
2964 goto object;
2967 t = TREE_OPERAND (t, 0);
2970 object:
2971 if (TREE_SIDE_EFFECTS (t))
2972 return NULL_TREE;
2974 if (TREE_CODE (t) == CONST_DECL
2975 && (DECL_EXTERNAL (t)
2976 || decl_function_context (t) != current_function_decl))
2977 return fold_convert (type, expr);
2979 if (!invariant_p && !TREE_READONLY (t))
2980 return NULL_TREE;
2982 if (TREE_CODE (t) == PARM_DECL)
2983 return fold_convert (type, expr);
2985 if (TREE_CODE (t) == VAR_DECL
2986 && (DECL_EXTERNAL (t)
2987 || decl_function_context (t) != current_function_decl))
2988 return fold_convert (type, expr);
2990 return NULL_TREE;