PR ada/62235
[official-gcc.git] / gcc / ada / gcc-interface / utils2.c
blobcd377913008f14f071bbabd629fe66871270a89a
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2016, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "memmodel.h"
30 #include "tm.h"
31 #include "vec.h"
32 #include "alias.h"
33 #include "tree.h"
34 #include "inchash.h"
35 #include "fold-const.h"
36 #include "stor-layout.h"
37 #include "stringpool.h"
38 #include "varasm.h"
39 #include "flags.h"
40 #include "toplev.h"
41 #include "ggc.h"
42 #include "tree-inline.h"
44 #include "ada.h"
45 #include "types.h"
46 #include "atree.h"
47 #include "elists.h"
48 #include "namet.h"
49 #include "nlists.h"
50 #include "snames.h"
51 #include "stringt.h"
52 #include "uintp.h"
53 #include "fe.h"
54 #include "sinfo.h"
55 #include "einfo.h"
56 #include "ada-tree.h"
57 #include "gigi.h"
59 /* Return the base type of TYPE. */
61 tree
62 get_base_type (tree type)
64 if (TREE_CODE (type) == RECORD_TYPE
65 && TYPE_JUSTIFIED_MODULAR_P (type))
66 type = TREE_TYPE (TYPE_FIELDS (type));
68 while (TREE_TYPE (type)
69 && (TREE_CODE (type) == INTEGER_TYPE
70 || TREE_CODE (type) == REAL_TYPE))
71 type = TREE_TYPE (type);
73 return type;
76 /* EXP is a GCC tree representing an address. See if we can find how strictly
77 the object at this address is aligned and, if so, return the alignment of
78 the object in bits. Otherwise return 0. */
80 unsigned int
81 known_alignment (tree exp)
83 unsigned int this_alignment;
84 unsigned int lhs, rhs;
86 switch (TREE_CODE (exp))
88 CASE_CONVERT:
89 case VIEW_CONVERT_EXPR:
90 case NON_LVALUE_EXPR:
91 /* Conversions between pointers and integers don't change the alignment
92 of the underlying object. */
93 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
94 break;
96 case COMPOUND_EXPR:
97 /* The value of a COMPOUND_EXPR is that of its second operand. */
98 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
99 break;
101 case PLUS_EXPR:
102 case MINUS_EXPR:
103 /* If two addresses are added, the alignment of the result is the
104 minimum of the two alignments. */
105 lhs = known_alignment (TREE_OPERAND (exp, 0));
106 rhs = known_alignment (TREE_OPERAND (exp, 1));
107 this_alignment = MIN (lhs, rhs);
108 break;
110 case POINTER_PLUS_EXPR:
111 /* If this is the pattern built for aligning types, decode it. */
112 if (TREE_CODE (TREE_OPERAND (exp, 1)) == BIT_AND_EXPR
113 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)) == NEGATE_EXPR)
115 tree op = TREE_OPERAND (TREE_OPERAND (exp, 1), 1);
116 return
117 known_alignment (fold_build1 (BIT_NOT_EXPR, TREE_TYPE (op), op));
120 /* If we don't know the alignment of the offset, we assume that
121 of the base. */
122 lhs = known_alignment (TREE_OPERAND (exp, 0));
123 rhs = known_alignment (TREE_OPERAND (exp, 1));
125 if (rhs == 0)
126 this_alignment = lhs;
127 else
128 this_alignment = MIN (lhs, rhs);
129 break;
131 case COND_EXPR:
132 /* If there is a choice between two values, use the smaller one. */
133 lhs = known_alignment (TREE_OPERAND (exp, 1));
134 rhs = known_alignment (TREE_OPERAND (exp, 2));
135 this_alignment = MIN (lhs, rhs);
136 break;
138 case INTEGER_CST:
140 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
141 /* The first part of this represents the lowest bit in the constant,
142 but it is originally in bytes, not bits. */
143 this_alignment = (c & -c) * BITS_PER_UNIT;
145 break;
147 case MULT_EXPR:
148 /* If we know the alignment of just one side, use it. Otherwise,
149 use the product of the alignments. */
150 lhs = known_alignment (TREE_OPERAND (exp, 0));
151 rhs = known_alignment (TREE_OPERAND (exp, 1));
153 if (lhs == 0)
154 this_alignment = rhs;
155 else if (rhs == 0)
156 this_alignment = lhs;
157 else
158 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
159 break;
161 case BIT_AND_EXPR:
162 /* A bit-and expression is as aligned as the maximum alignment of the
163 operands. We typically get here for a complex lhs and a constant
164 negative power of two on the rhs to force an explicit alignment, so
165 don't bother looking at the lhs. */
166 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
167 break;
169 case ADDR_EXPR:
170 this_alignment = expr_align (TREE_OPERAND (exp, 0));
171 break;
173 case CALL_EXPR:
175 tree fndecl = get_callee_fndecl (exp);
176 if (fndecl == malloc_decl || fndecl == realloc_decl)
177 return get_target_system_allocator_alignment () * BITS_PER_UNIT;
179 tree t = maybe_inline_call_in_expr (exp);
180 if (t)
181 return known_alignment (t);
184 /* ... fall through ... */
186 default:
187 /* For other pointer expressions, we assume that the pointed-to object
188 is at least as aligned as the pointed-to type. Beware that we can
189 have a dummy type here (e.g. a Taft Amendment type), for which the
190 alignment is meaningless and should be ignored. */
191 if (POINTER_TYPE_P (TREE_TYPE (exp))
192 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp)))
193 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (exp))))
194 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
195 else
196 this_alignment = 0;
197 break;
200 return this_alignment;
203 /* We have a comparison or assignment operation on two types, T1 and T2, which
204 are either both array types or both record types. T1 is assumed to be for
205 the left hand side operand, and T2 for the right hand side. Return the
206 type that both operands should be converted to for the operation, if any.
207 Otherwise return zero. */
209 static tree
210 find_common_type (tree t1, tree t2)
212 /* ??? As of today, various constructs lead to here with types of different
213 sizes even when both constants (e.g. tagged types, packable vs regular
214 component types, padded vs unpadded types, ...). While some of these
215 would better be handled upstream (types should be made consistent before
216 calling into build_binary_op), some others are really expected and we
217 have to be careful. */
219 const bool variable_record_on_lhs
220 = (TREE_CODE (t1) == RECORD_TYPE
221 && TREE_CODE (t2) == RECORD_TYPE
222 && get_variant_part (t1)
223 && !get_variant_part (t2));
225 const bool variable_array_on_lhs
226 = (TREE_CODE (t1) == ARRAY_TYPE
227 && TREE_CODE (t2) == ARRAY_TYPE
228 && !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)))
229 && TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t2))));
231 /* We must avoid writing more than what the target can hold if this is for
232 an assignment and the case of tagged types is handled in build_binary_op
233 so we use the lhs type if it is known to be smaller or of constant size
234 and the rhs type is not, whatever the modes. We also force t1 in case of
235 constant size equality to minimize occurrences of view conversions on the
236 lhs of an assignment, except for the case of types with a variable part
237 on the lhs but not on the rhs to make the conversion simpler. */
238 if (TREE_CONSTANT (TYPE_SIZE (t1))
239 && (!TREE_CONSTANT (TYPE_SIZE (t2))
240 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
241 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
242 && !variable_record_on_lhs
243 && !variable_array_on_lhs)))
244 return t1;
246 /* Otherwise, if the lhs type is non-BLKmode, use it, except for the case of
247 a non-BLKmode rhs and array types with a variable part on the lhs but not
248 on the rhs to make sure the conversion is preserved during gimplification.
249 Note that we know that we will not have any alignment problems since, if
250 we did, the non-BLKmode type could not have been used. */
251 if (TYPE_MODE (t1) != BLKmode
252 && (TYPE_MODE (t2) == BLKmode || !variable_array_on_lhs))
253 return t1;
255 /* If the rhs type is of constant size, use it whatever the modes. At
256 this point it is known to be smaller, or of constant size and the
257 lhs type is not. */
258 if (TREE_CONSTANT (TYPE_SIZE (t2)))
259 return t2;
261 /* Otherwise, if the rhs type is non-BLKmode, use it. */
262 if (TYPE_MODE (t2) != BLKmode)
263 return t2;
265 /* In this case, both types have variable size and BLKmode. It's
266 probably best to leave the "type mismatch" because changing it
267 could cause a bad self-referential reference. */
268 return NULL_TREE;
271 /* Return an expression tree representing an equality comparison of A1 and A2,
272 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
274 Two arrays are equal in one of two ways: (1) if both have zero length in
275 some dimension (not necessarily the same dimension) or (2) if the lengths
276 in each dimension are equal and the data is equal. We perform the length
277 tests in as efficient a manner as possible. */
279 static tree
280 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
282 tree result = convert (result_type, boolean_true_node);
283 tree a1_is_null = convert (result_type, boolean_false_node);
284 tree a2_is_null = convert (result_type, boolean_false_node);
285 tree t1 = TREE_TYPE (a1);
286 tree t2 = TREE_TYPE (a2);
287 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
288 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
289 bool length_zero_p = false;
291 /* If the operands have side-effects, they need to be evaluated only once
292 in spite of the multiple references in the comparison. */
293 if (a1_side_effects_p)
294 a1 = gnat_protect_expr (a1);
296 if (a2_side_effects_p)
297 a2 = gnat_protect_expr (a2);
299 /* Process each dimension separately and compare the lengths. If any
300 dimension has a length known to be zero, set LENGTH_ZERO_P to true
301 in order to suppress the comparison of the data at the end. */
302 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
304 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
305 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
306 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
307 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
308 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
309 size_one_node);
310 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
311 size_one_node);
312 tree comparison, this_a1_is_null, this_a2_is_null;
314 /* If the length of the first array is a constant, swap our operands
315 unless the length of the second array is the constant zero. */
316 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
318 tree tem;
319 bool btem;
321 tem = a1, a1 = a2, a2 = tem;
322 tem = t1, t1 = t2, t2 = tem;
323 tem = lb1, lb1 = lb2, lb2 = tem;
324 tem = ub1, ub1 = ub2, ub2 = tem;
325 tem = length1, length1 = length2, length2 = tem;
326 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
327 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
328 a2_side_effects_p = btem;
331 /* If the length of the second array is the constant zero, we can just
332 use the original stored bounds for the first array and see whether
333 last < first holds. */
334 if (integer_zerop (length2))
336 tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
338 length_zero_p = true;
341 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
343 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
345 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
346 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
347 if (EXPR_P (comparison))
348 SET_EXPR_LOCATION (comparison, loc);
350 this_a1_is_null = comparison;
351 this_a2_is_null = convert (result_type, boolean_true_node);
354 /* Otherwise, if the length is some other constant value, we know that
355 this dimension in the second array cannot be superflat, so we can
356 just use its length computed from the actual stored bounds. */
357 else if (TREE_CODE (length2) == INTEGER_CST)
359 tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
362 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
364 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
365 /* Note that we know that UB2 and LB2 are constant and hence
366 cannot contain a PLACEHOLDER_EXPR. */
368 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
370 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
372 comparison
373 = fold_build2_loc (loc, EQ_EXPR, result_type,
374 build_binary_op (MINUS_EXPR, b, ub1, lb1),
375 build_binary_op (MINUS_EXPR, b, ub2, lb2));
376 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
377 if (EXPR_P (comparison))
378 SET_EXPR_LOCATION (comparison, loc);
380 this_a1_is_null
381 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
383 this_a2_is_null = convert (result_type, boolean_false_node);
386 /* Otherwise, compare the computed lengths. */
387 else
389 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
390 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
392 comparison
393 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
395 /* If the length expression is of the form (cond ? val : 0), assume
396 that cond is equivalent to (length != 0). That's guaranteed by
397 construction of the array types in gnat_to_gnu_entity. */
398 if (TREE_CODE (length1) == COND_EXPR
399 && integer_zerop (TREE_OPERAND (length1, 2)))
400 this_a1_is_null
401 = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
402 else
403 this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
404 length1, size_zero_node);
406 /* Likewise for the second array. */
407 if (TREE_CODE (length2) == COND_EXPR
408 && integer_zerop (TREE_OPERAND (length2, 2)))
409 this_a2_is_null
410 = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
411 else
412 this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
413 length2, size_zero_node);
416 /* Append expressions for this dimension to the final expressions. */
417 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
418 result, comparison);
420 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
421 this_a1_is_null, a1_is_null);
423 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
424 this_a2_is_null, a2_is_null);
426 t1 = TREE_TYPE (t1);
427 t2 = TREE_TYPE (t2);
430 /* Unless the length of some dimension is known to be zero, compare the
431 data in the array. */
432 if (!length_zero_p)
434 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
435 tree comparison;
437 if (type)
439 a1 = convert (type, a1),
440 a2 = convert (type, a2);
443 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
445 result
446 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
449 /* The result is also true if both sizes are zero. */
450 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
451 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
452 a1_is_null, a2_is_null),
453 result);
455 /* If the operands have side-effects, they need to be evaluated before
456 doing the tests above since the place they otherwise would end up
457 being evaluated at run time could be wrong. */
458 if (a1_side_effects_p)
459 result = build2 (COMPOUND_EXPR, result_type, a1, result);
461 if (a2_side_effects_p)
462 result = build2 (COMPOUND_EXPR, result_type, a2, result);
464 return result;
467 /* Return an expression tree representing an equality comparison of P1 and P2,
468 two objects of fat pointer type. The result should be of type RESULT_TYPE.
470 Two fat pointers are equal in one of two ways: (1) if both have a null
471 pointer to the array or (2) if they contain the same couple of pointers.
472 We perform the comparison in as efficient a manner as possible. */
474 static tree
475 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
477 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
478 tree p1_array_is_null, p2_array_is_null;
480 /* If either operand has side-effects, they have to be evaluated only once
481 in spite of the multiple references to the operand in the comparison. */
482 p1 = gnat_protect_expr (p1);
483 p2 = gnat_protect_expr (p2);
485 /* The constant folder doesn't fold fat pointer types so we do it here. */
486 if (TREE_CODE (p1) == CONSTRUCTOR)
487 p1_array = CONSTRUCTOR_ELT (p1, 0)->value;
488 else
489 p1_array = build_component_ref (p1, TYPE_FIELDS (TREE_TYPE (p1)), true);
491 p1_array_is_null
492 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
493 fold_convert_loc (loc, TREE_TYPE (p1_array),
494 null_pointer_node));
496 if (TREE_CODE (p2) == CONSTRUCTOR)
497 p2_array = CONSTRUCTOR_ELT (p2, 0)->value;
498 else
499 p2_array = build_component_ref (p2, TYPE_FIELDS (TREE_TYPE (p2)), true);
501 p2_array_is_null
502 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
503 fold_convert_loc (loc, TREE_TYPE (p2_array),
504 null_pointer_node));
506 /* If one of the pointers to the array is null, just compare the other. */
507 if (integer_zerop (p1_array))
508 return p2_array_is_null;
509 else if (integer_zerop (p2_array))
510 return p1_array_is_null;
512 /* Otherwise, do the fully-fledged comparison. */
513 same_array
514 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
516 if (TREE_CODE (p1) == CONSTRUCTOR)
517 p1_bounds = CONSTRUCTOR_ELT (p1, 1)->value;
518 else
519 p1_bounds
520 = build_component_ref (p1, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))),
521 true);
523 if (TREE_CODE (p2) == CONSTRUCTOR)
524 p2_bounds = CONSTRUCTOR_ELT (p2, 1)->value;
525 else
526 p2_bounds
527 = build_component_ref (p2, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))),
528 true);
530 same_bounds
531 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
533 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
534 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
535 build_binary_op (TRUTH_ORIF_EXPR, result_type,
536 p1_array_is_null, same_bounds));
539 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
540 type TYPE. We know that TYPE is a modular type with a nonbinary
541 modulus. */
543 static tree
544 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
545 tree rhs)
547 tree modulus = TYPE_MODULUS (type);
548 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
549 unsigned int precision;
550 bool unsignedp = true;
551 tree op_type = type;
552 tree result;
554 /* If this is an addition of a constant, convert it to a subtraction
555 of a constant since we can do that faster. */
556 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
558 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
559 op_code = MINUS_EXPR;
562 /* For the logical operations, we only need PRECISION bits. For
563 addition and subtraction, we need one more and for multiplication we
564 need twice as many. But we never want to make a size smaller than
565 our size. */
566 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
567 needed_precision += 1;
568 else if (op_code == MULT_EXPR)
569 needed_precision *= 2;
571 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
573 /* Unsigned will do for everything but subtraction. */
574 if (op_code == MINUS_EXPR)
575 unsignedp = false;
577 /* If our type is the wrong signedness or isn't wide enough, make a new
578 type and convert both our operands to it. */
579 if (TYPE_PRECISION (op_type) < precision
580 || TYPE_UNSIGNED (op_type) != unsignedp)
582 /* Copy the type so we ensure it can be modified to make it modular. */
583 op_type = copy_type (gnat_type_for_size (precision, unsignedp));
584 modulus = convert (op_type, modulus);
585 SET_TYPE_MODULUS (op_type, modulus);
586 TYPE_MODULAR_P (op_type) = 1;
587 lhs = convert (op_type, lhs);
588 rhs = convert (op_type, rhs);
591 /* Do the operation, then we'll fix it up. */
592 result = fold_build2 (op_code, op_type, lhs, rhs);
594 /* For multiplication, we have no choice but to do a full modulus
595 operation. However, we want to do this in the narrowest
596 possible size. */
597 if (op_code == MULT_EXPR)
599 /* Copy the type so we ensure it can be modified to make it modular. */
600 tree div_type = copy_type (gnat_type_for_size (needed_precision, 1));
601 modulus = convert (div_type, modulus);
602 SET_TYPE_MODULUS (div_type, modulus);
603 TYPE_MODULAR_P (div_type) = 1;
604 result = convert (op_type,
605 fold_build2 (TRUNC_MOD_EXPR, div_type,
606 convert (div_type, result), modulus));
609 /* For subtraction, add the modulus back if we are negative. */
610 else if (op_code == MINUS_EXPR)
612 result = gnat_protect_expr (result);
613 result = fold_build3 (COND_EXPR, op_type,
614 fold_build2 (LT_EXPR, boolean_type_node, result,
615 build_int_cst (op_type, 0)),
616 fold_build2 (PLUS_EXPR, op_type, result, modulus),
617 result);
620 /* For the other operations, subtract the modulus if we are >= it. */
621 else
623 result = gnat_protect_expr (result);
624 result = fold_build3 (COND_EXPR, op_type,
625 fold_build2 (GE_EXPR, boolean_type_node,
626 result, modulus),
627 fold_build2 (MINUS_EXPR, op_type,
628 result, modulus),
629 result);
632 return convert (type, result);
635 /* This page contains routines that implement the Ada semantics with regard
636 to atomic objects. They are fully piggybacked on the middle-end support
637 for atomic loads and stores.
639 *** Memory barriers and volatile objects ***
641 We implement the weakened form of the C.6(16) clause that was introduced
642 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
643 implementable without significant performance hits on modern platforms.
645 We also take advantage of the requirements imposed on shared variables by
646 9.10 (conditions for sequential actions) to have non-erroneous execution
647 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
648 volatile updates with regard to sequential actions, i.e. with regard to
649 reads or updates of atomic objects.
651 As such, an update of an atomic object by a task requires that all earlier
652 accesses to volatile objects have completed. Similarly, later accesses to
653 volatile objects cannot be reordered before the update of the atomic object.
654 So, memory barriers both before and after the atomic update are needed.
656 For a read of an atomic object, to avoid seeing writes of volatile objects
657 by a task earlier than by the other tasks, a memory barrier is needed before
658 the atomic read. Finally, to avoid reordering later reads or updates of
659 volatile objects to before the atomic read, a barrier is needed after the
660 atomic read.
662 So, memory barriers are needed before and after atomic reads and updates.
663 And, in order to simplify the implementation, we use full memory barriers
664 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
666 /* Return the size of TYPE, which must be a positive power of 2. */
668 static unsigned int
669 resolve_atomic_size (tree type)
671 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
673 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
674 return size;
676 /* We shouldn't reach here without having already detected that the size
677 isn't compatible with an atomic access. */
678 gcc_assert (Serious_Errors_Detected);
680 return 0;
683 /* Build an atomic load for the underlying atomic object in SRC. SYNC is
684 true if the load requires synchronization. */
686 tree
687 build_atomic_load (tree src, bool sync)
689 tree ptr_type
690 = build_pointer_type
691 (build_qualified_type (void_type_node,
692 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
693 tree mem_model
694 = build_int_cst (integer_type_node,
695 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
696 tree orig_src = src;
697 tree t, addr, val;
698 unsigned int size;
699 int fncode;
701 /* Remove conversions to get the address of the underlying object. */
702 src = remove_conversions (src, false);
703 size = resolve_atomic_size (TREE_TYPE (src));
704 if (size == 0)
705 return orig_src;
707 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
708 t = builtin_decl_implicit ((enum built_in_function) fncode);
710 addr = build_unary_op (ADDR_EXPR, ptr_type, src);
711 val = build_call_expr (t, 2, addr, mem_model);
713 /* First reinterpret the loaded bits in the original type of the load,
714 then convert to the expected result type. */
715 t = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (src), val);
716 return convert (TREE_TYPE (orig_src), t);
719 /* Build an atomic store from SRC to the underlying atomic object in DEST.
720 SYNC is true if the store requires synchronization. */
722 tree
723 build_atomic_store (tree dest, tree src, bool sync)
725 tree ptr_type
726 = build_pointer_type
727 (build_qualified_type (void_type_node,
728 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
729 tree mem_model
730 = build_int_cst (integer_type_node,
731 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
732 tree orig_dest = dest;
733 tree t, int_type, addr;
734 unsigned int size;
735 int fncode;
737 /* Remove conversions to get the address of the underlying object. */
738 dest = remove_conversions (dest, false);
739 size = resolve_atomic_size (TREE_TYPE (dest));
740 if (size == 0)
741 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
743 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
744 t = builtin_decl_implicit ((enum built_in_function) fncode);
745 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
747 /* First convert the bits to be stored to the original type of the store,
748 then reinterpret them in the effective type. But if the original type
749 is a padded type with the same size, convert to the inner type instead,
750 as we don't want to artificially introduce a CONSTRUCTOR here. */
751 if (TYPE_IS_PADDING_P (TREE_TYPE (dest))
752 && TYPE_SIZE (TREE_TYPE (dest))
753 == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest)))))
754 src = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest))), src);
755 else
756 src = convert (TREE_TYPE (dest), src);
757 src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
758 addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
760 return build_call_expr (t, 3, addr, src, mem_model);
763 /* Build a load-modify-store sequence from SRC to DEST. GNAT_NODE is used for
764 the location of the sequence. Note that, even though the load and the store
765 are both atomic, the sequence itself is not atomic. */
767 tree
768 build_load_modify_store (tree dest, tree src, Node_Id gnat_node)
770 /* We will be modifying DEST below so we build a copy. */
771 dest = copy_node (dest);
772 tree ref = dest;
774 while (handled_component_p (ref))
776 /* The load should already have been generated during the translation
777 of the GNAT destination tree; find it out in the GNU tree. */
778 if (TREE_CODE (TREE_OPERAND (ref, 0)) == VIEW_CONVERT_EXPR)
780 tree op = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
781 if (TREE_CODE (op) == CALL_EXPR && call_is_atomic_load (op))
783 tree type = TREE_TYPE (TREE_OPERAND (ref, 0));
784 tree t = CALL_EXPR_ARG (op, 0);
785 tree obj, temp, stmt;
787 /* Find out the loaded object. */
788 if (TREE_CODE (t) == NOP_EXPR)
789 t = TREE_OPERAND (t, 0);
790 if (TREE_CODE (t) == ADDR_EXPR)
791 obj = TREE_OPERAND (t, 0);
792 else
793 obj = build1 (INDIRECT_REF, type, t);
795 /* Drop atomic and volatile qualifiers for the temporary. */
796 type = TYPE_MAIN_VARIANT (type);
798 /* And drop BLKmode, if need be, to put it into a register. */
799 if (TYPE_MODE (type) == BLKmode)
801 unsigned int size = tree_to_uhwi (TYPE_SIZE (type));
802 type = copy_type (type);
803 SET_TYPE_MODE (type, mode_for_size (size, MODE_INT, 0));
806 /* Create the temporary by inserting a SAVE_EXPR. */
807 temp = build1 (SAVE_EXPR, type,
808 build1 (VIEW_CONVERT_EXPR, type, op));
809 TREE_OPERAND (ref, 0) = temp;
811 start_stmt_group ();
813 /* Build the modify of the temporary. */
814 stmt = build_binary_op (MODIFY_EXPR, NULL_TREE, dest, src);
815 add_stmt_with_node (stmt, gnat_node);
817 /* Build the store to the object. */
818 stmt = build_atomic_store (obj, temp, false);
819 add_stmt_with_node (stmt, gnat_node);
821 return end_stmt_group ();
825 TREE_OPERAND (ref, 0) = copy_node (TREE_OPERAND (ref, 0));
826 ref = TREE_OPERAND (ref, 0);
829 /* Something went wrong earlier if we have not found the atomic load. */
830 gcc_unreachable ();
833 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
834 desired for the result. Usually the operation is to be performed
835 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
836 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
837 case the type to be used will be derived from the operands.
838 Don't fold the result if NO_FOLD is true.
840 This function is very much unlike the ones for C and C++ since we
841 have already done any type conversion and matching required. All we
842 have to do here is validate the work done by SEM and handle subtypes. */
844 tree
845 build_binary_op (enum tree_code op_code, tree result_type,
846 tree left_operand, tree right_operand,
847 bool no_fold)
849 tree left_type = TREE_TYPE (left_operand);
850 tree right_type = TREE_TYPE (right_operand);
851 tree left_base_type = get_base_type (left_type);
852 tree right_base_type = get_base_type (right_type);
853 tree operation_type = result_type;
854 tree best_type = NULL_TREE;
855 tree modulus, result;
856 bool has_side_effects = false;
858 if (operation_type
859 && TREE_CODE (operation_type) == RECORD_TYPE
860 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
861 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
863 if (operation_type
864 && TREE_CODE (operation_type) == INTEGER_TYPE
865 && TYPE_EXTRA_SUBTYPE_P (operation_type))
866 operation_type = get_base_type (operation_type);
868 modulus = (operation_type
869 && TREE_CODE (operation_type) == INTEGER_TYPE
870 && TYPE_MODULAR_P (operation_type)
871 ? TYPE_MODULUS (operation_type) : NULL_TREE);
873 switch (op_code)
875 case INIT_EXPR:
876 case MODIFY_EXPR:
877 gcc_checking_assert (!result_type);
879 /* If there were integral or pointer conversions on the LHS, remove
880 them; we'll be putting them back below if needed. Likewise for
881 conversions between array and record types, except for justified
882 modular types. But don't do this if the right operand is not
883 BLKmode (for packed arrays) unless we are not changing the mode. */
884 while ((CONVERT_EXPR_P (left_operand)
885 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
886 && (((INTEGRAL_TYPE_P (left_type)
887 || POINTER_TYPE_P (left_type))
888 && (INTEGRAL_TYPE_P (TREE_TYPE
889 (TREE_OPERAND (left_operand, 0)))
890 || POINTER_TYPE_P (TREE_TYPE
891 (TREE_OPERAND (left_operand, 0)))))
892 || (((TREE_CODE (left_type) == RECORD_TYPE
893 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
894 || TREE_CODE (left_type) == ARRAY_TYPE)
895 && ((TREE_CODE (TREE_TYPE
896 (TREE_OPERAND (left_operand, 0)))
897 == RECORD_TYPE)
898 || (TREE_CODE (TREE_TYPE
899 (TREE_OPERAND (left_operand, 0)))
900 == ARRAY_TYPE))
901 && (TYPE_MODE (right_type) == BLKmode
902 || (TYPE_MODE (left_type)
903 == TYPE_MODE (TREE_TYPE
904 (TREE_OPERAND
905 (left_operand, 0))))))))
907 left_operand = TREE_OPERAND (left_operand, 0);
908 left_type = TREE_TYPE (left_operand);
911 /* If a class-wide type may be involved, force use of the RHS type. */
912 if ((TREE_CODE (right_type) == RECORD_TYPE
913 || TREE_CODE (right_type) == UNION_TYPE)
914 && TYPE_ALIGN_OK (right_type))
915 operation_type = right_type;
917 /* If we are copying between padded objects with compatible types, use
918 the padded view of the objects, this is very likely more efficient.
919 Likewise for a padded object that is assigned a constructor, if we
920 can convert the constructor to the inner type, to avoid putting a
921 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
922 actually copied anything. */
923 else if (TYPE_IS_PADDING_P (left_type)
924 && TREE_CONSTANT (TYPE_SIZE (left_type))
925 && ((TREE_CODE (right_operand) == COMPONENT_REF
926 && TYPE_MAIN_VARIANT (left_type)
927 == TYPE_MAIN_VARIANT
928 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
929 || (TREE_CODE (right_operand) == CONSTRUCTOR
930 && !CONTAINS_PLACEHOLDER_P
931 (DECL_SIZE (TYPE_FIELDS (left_type)))))
932 && !integer_zerop (TYPE_SIZE (right_type)))
934 /* We make an exception for a BLKmode type padding a non-BLKmode
935 inner type and do the conversion of the LHS right away, since
936 unchecked_convert wouldn't do it properly. */
937 if (TYPE_MODE (left_type) == BLKmode
938 && TYPE_MODE (right_type) != BLKmode
939 && TREE_CODE (right_operand) != CONSTRUCTOR)
941 operation_type = right_type;
942 left_operand = convert (operation_type, left_operand);
943 left_type = operation_type;
945 else
946 operation_type = left_type;
949 /* If we have a call to a function that returns with variable size, use
950 the RHS type in case we want to use the return slot optimization. */
951 else if (TREE_CODE (right_operand) == CALL_EXPR
952 && return_type_with_variable_size_p (right_type))
953 operation_type = right_type;
955 /* Find the best type to use for copying between aggregate types. */
956 else if (((TREE_CODE (left_type) == ARRAY_TYPE
957 && TREE_CODE (right_type) == ARRAY_TYPE)
958 || (TREE_CODE (left_type) == RECORD_TYPE
959 && TREE_CODE (right_type) == RECORD_TYPE))
960 && (best_type = find_common_type (left_type, right_type)))
961 operation_type = best_type;
963 /* Otherwise use the LHS type. */
964 else
965 operation_type = left_type;
967 /* Ensure everything on the LHS is valid. If we have a field reference,
968 strip anything that get_inner_reference can handle. Then remove any
969 conversions between types having the same code and mode. And mark
970 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
971 either an INDIRECT_REF, a NULL_EXPR, a SAVE_EXPR or a DECL node. */
972 result = left_operand;
973 while (true)
975 tree restype = TREE_TYPE (result);
977 if (TREE_CODE (result) == COMPONENT_REF
978 || TREE_CODE (result) == ARRAY_REF
979 || TREE_CODE (result) == ARRAY_RANGE_REF)
980 while (handled_component_p (result))
981 result = TREE_OPERAND (result, 0);
982 else if (TREE_CODE (result) == REALPART_EXPR
983 || TREE_CODE (result) == IMAGPART_EXPR
984 || (CONVERT_EXPR_P (result)
985 && (((TREE_CODE (restype)
986 == TREE_CODE (TREE_TYPE
987 (TREE_OPERAND (result, 0))))
988 && (TYPE_MODE (TREE_TYPE
989 (TREE_OPERAND (result, 0)))
990 == TYPE_MODE (restype)))
991 || TYPE_ALIGN_OK (restype))))
992 result = TREE_OPERAND (result, 0);
993 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
995 TREE_ADDRESSABLE (result) = 1;
996 result = TREE_OPERAND (result, 0);
998 else
999 break;
1002 gcc_assert (TREE_CODE (result) == INDIRECT_REF
1003 || TREE_CODE (result) == NULL_EXPR
1004 || TREE_CODE (result) == SAVE_EXPR
1005 || DECL_P (result));
1007 /* Convert the right operand to the operation type unless it is
1008 either already of the correct type or if the type involves a
1009 placeholder, since the RHS may not have the same record type. */
1010 if (operation_type != right_type
1011 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
1013 right_operand = convert (operation_type, right_operand);
1014 right_type = operation_type;
1017 /* If the left operand is not of the same type as the operation
1018 type, wrap it up in a VIEW_CONVERT_EXPR. */
1019 if (left_type != operation_type)
1020 left_operand = unchecked_convert (operation_type, left_operand, false);
1022 has_side_effects = true;
1023 modulus = NULL_TREE;
1024 break;
1026 case ARRAY_REF:
1027 if (!operation_type)
1028 operation_type = TREE_TYPE (left_type);
1030 /* ... fall through ... */
1032 case ARRAY_RANGE_REF:
1033 /* First look through conversion between type variants. Note that
1034 this changes neither the operation type nor the type domain. */
1035 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
1036 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
1037 == TYPE_MAIN_VARIANT (left_type))
1039 left_operand = TREE_OPERAND (left_operand, 0);
1040 left_type = TREE_TYPE (left_operand);
1043 /* For a range, make sure the element type is consistent. */
1044 if (op_code == ARRAY_RANGE_REF
1045 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
1046 operation_type = build_array_type (TREE_TYPE (left_type),
1047 TYPE_DOMAIN (operation_type));
1049 /* Then convert the right operand to its base type. This will prevent
1050 unneeded sign conversions when sizetype is wider than integer. */
1051 right_operand = convert (right_base_type, right_operand);
1052 right_operand = convert_to_index_type (right_operand);
1053 modulus = NULL_TREE;
1054 break;
1056 case TRUTH_ANDIF_EXPR:
1057 case TRUTH_ORIF_EXPR:
1058 case TRUTH_AND_EXPR:
1059 case TRUTH_OR_EXPR:
1060 case TRUTH_XOR_EXPR:
1061 gcc_checking_assert
1062 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1063 operation_type = left_base_type;
1064 left_operand = convert (operation_type, left_operand);
1065 right_operand = convert (operation_type, right_operand);
1066 break;
1068 case GE_EXPR:
1069 case LE_EXPR:
1070 case GT_EXPR:
1071 case LT_EXPR:
1072 case EQ_EXPR:
1073 case NE_EXPR:
1074 gcc_checking_assert
1075 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1076 /* If either operand is a NULL_EXPR, just return a new one. */
1077 if (TREE_CODE (left_operand) == NULL_EXPR)
1078 return build2 (op_code, result_type,
1079 build1 (NULL_EXPR, integer_type_node,
1080 TREE_OPERAND (left_operand, 0)),
1081 integer_zero_node);
1083 else if (TREE_CODE (right_operand) == NULL_EXPR)
1084 return build2 (op_code, result_type,
1085 build1 (NULL_EXPR, integer_type_node,
1086 TREE_OPERAND (right_operand, 0)),
1087 integer_zero_node);
1089 /* If either object is a justified modular types, get the
1090 fields from within. */
1091 if (TREE_CODE (left_type) == RECORD_TYPE
1092 && TYPE_JUSTIFIED_MODULAR_P (left_type))
1094 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
1095 left_operand);
1096 left_type = TREE_TYPE (left_operand);
1097 left_base_type = get_base_type (left_type);
1100 if (TREE_CODE (right_type) == RECORD_TYPE
1101 && TYPE_JUSTIFIED_MODULAR_P (right_type))
1103 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
1104 right_operand);
1105 right_type = TREE_TYPE (right_operand);
1106 right_base_type = get_base_type (right_type);
1109 /* If both objects are arrays, compare them specially. */
1110 if ((TREE_CODE (left_type) == ARRAY_TYPE
1111 || (TREE_CODE (left_type) == INTEGER_TYPE
1112 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
1113 && (TREE_CODE (right_type) == ARRAY_TYPE
1114 || (TREE_CODE (right_type) == INTEGER_TYPE
1115 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1117 result = compare_arrays (input_location,
1118 result_type, left_operand, right_operand);
1119 if (op_code == NE_EXPR)
1120 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1121 else
1122 gcc_assert (op_code == EQ_EXPR);
1124 return result;
1127 /* Otherwise, the base types must be the same, unless they are both fat
1128 pointer types or record types. In the latter case, use the best type
1129 and convert both operands to that type. */
1130 if (left_base_type != right_base_type)
1132 if (TYPE_IS_FAT_POINTER_P (left_base_type)
1133 && TYPE_IS_FAT_POINTER_P (right_base_type))
1135 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1136 == TYPE_MAIN_VARIANT (right_base_type));
1137 best_type = left_base_type;
1140 else if (TREE_CODE (left_base_type) == RECORD_TYPE
1141 && TREE_CODE (right_base_type) == RECORD_TYPE)
1143 /* The only way this is permitted is if both types have the same
1144 name. In that case, one of them must not be self-referential.
1145 Use it as the best type. Even better with a fixed size. */
1146 gcc_assert (TYPE_NAME (left_base_type)
1147 && TYPE_NAME (left_base_type)
1148 == TYPE_NAME (right_base_type));
1150 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1151 best_type = left_base_type;
1152 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1153 best_type = right_base_type;
1154 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1155 best_type = left_base_type;
1156 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1157 best_type = right_base_type;
1158 else
1159 gcc_unreachable ();
1162 else if (POINTER_TYPE_P (left_base_type)
1163 && POINTER_TYPE_P (right_base_type))
1165 gcc_assert (TREE_TYPE (left_base_type)
1166 == TREE_TYPE (right_base_type));
1167 best_type = left_base_type;
1169 else
1170 gcc_unreachable ();
1172 left_operand = convert (best_type, left_operand);
1173 right_operand = convert (best_type, right_operand);
1175 else
1177 left_operand = convert (left_base_type, left_operand);
1178 right_operand = convert (right_base_type, right_operand);
1181 /* If both objects are fat pointers, compare them specially. */
1182 if (TYPE_IS_FAT_POINTER_P (left_base_type))
1184 result
1185 = compare_fat_pointers (input_location,
1186 result_type, left_operand, right_operand);
1187 if (op_code == NE_EXPR)
1188 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1189 else
1190 gcc_assert (op_code == EQ_EXPR);
1192 return result;
1195 modulus = NULL_TREE;
1196 break;
1198 case LSHIFT_EXPR:
1199 case RSHIFT_EXPR:
1200 case LROTATE_EXPR:
1201 case RROTATE_EXPR:
1202 /* The RHS of a shift can be any type. Also, ignore any modulus
1203 (we used to abort, but this is needed for unchecked conversion
1204 to modular types). Otherwise, processing is the same as normal. */
1205 gcc_assert (operation_type == left_base_type);
1206 modulus = NULL_TREE;
1207 left_operand = convert (operation_type, left_operand);
1208 break;
1210 case BIT_AND_EXPR:
1211 case BIT_IOR_EXPR:
1212 case BIT_XOR_EXPR:
1213 /* For binary modulus, if the inputs are in range, so are the
1214 outputs. */
1215 if (modulus && integer_pow2p (modulus))
1216 modulus = NULL_TREE;
1217 goto common;
1219 case COMPLEX_EXPR:
1220 gcc_assert (TREE_TYPE (result_type) == left_base_type
1221 && TREE_TYPE (result_type) == right_base_type);
1222 left_operand = convert (left_base_type, left_operand);
1223 right_operand = convert (right_base_type, right_operand);
1224 break;
1226 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1227 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1228 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1229 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1230 /* These always produce results lower than either operand. */
1231 modulus = NULL_TREE;
1232 goto common;
1234 case POINTER_PLUS_EXPR:
1235 gcc_assert (operation_type == left_base_type
1236 && sizetype == right_base_type);
1237 left_operand = convert (operation_type, left_operand);
1238 right_operand = convert (sizetype, right_operand);
1239 break;
1241 case PLUS_NOMOD_EXPR:
1242 case MINUS_NOMOD_EXPR:
1243 if (op_code == PLUS_NOMOD_EXPR)
1244 op_code = PLUS_EXPR;
1245 else
1246 op_code = MINUS_EXPR;
1247 modulus = NULL_TREE;
1249 /* ... fall through ... */
1251 case PLUS_EXPR:
1252 case MINUS_EXPR:
1253 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1254 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1255 these types but can generate addition/subtraction for Succ/Pred. */
1256 if (operation_type
1257 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1258 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1259 operation_type = left_base_type = right_base_type
1260 = gnat_type_for_mode (TYPE_MODE (operation_type),
1261 TYPE_UNSIGNED (operation_type));
1263 /* ... fall through ... */
1265 default:
1266 common:
1267 /* The result type should be the same as the base types of the
1268 both operands (and they should be the same). Convert
1269 everything to the result type. */
1271 gcc_assert (operation_type == left_base_type
1272 && left_base_type == right_base_type);
1273 left_operand = convert (operation_type, left_operand);
1274 right_operand = convert (operation_type, right_operand);
1277 if (modulus && !integer_pow2p (modulus))
1279 result = nonbinary_modular_operation (op_code, operation_type,
1280 left_operand, right_operand);
1281 modulus = NULL_TREE;
1283 /* If either operand is a NULL_EXPR, just return a new one. */
1284 else if (TREE_CODE (left_operand) == NULL_EXPR)
1285 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1286 else if (TREE_CODE (right_operand) == NULL_EXPR)
1287 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1288 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1290 result = build4 (op_code, operation_type, left_operand, right_operand,
1291 NULL_TREE, NULL_TREE);
1292 if (!no_fold)
1293 result = fold (result);
1295 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1296 result = build2 (op_code, void_type_node, left_operand, right_operand);
1297 else if (no_fold)
1298 result = build2 (op_code, operation_type, left_operand, right_operand);
1299 else
1300 result
1301 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1303 if (TREE_CONSTANT (result))
1305 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1307 if (TYPE_VOLATILE (operation_type))
1308 TREE_THIS_VOLATILE (result) = 1;
1310 else
1311 TREE_CONSTANT (result)
1312 |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1314 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1316 /* If we are working with modular types, perform the MOD operation
1317 if something above hasn't eliminated the need for it. */
1318 if (modulus)
1320 modulus = convert (operation_type, modulus);
1321 if (no_fold)
1322 result = build2 (FLOOR_MOD_EXPR, operation_type, result, modulus);
1323 else
1324 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result, modulus);
1327 if (result_type && result_type != operation_type)
1328 result = convert (result_type, result);
1330 return result;
1333 /* Similar, but for unary operations. */
1335 tree
1336 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1338 tree type = TREE_TYPE (operand);
1339 tree base_type = get_base_type (type);
1340 tree operation_type = result_type;
1341 tree result;
1343 if (operation_type
1344 && TREE_CODE (operation_type) == RECORD_TYPE
1345 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1346 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1348 if (operation_type
1349 && TREE_CODE (operation_type) == INTEGER_TYPE
1350 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1351 operation_type = get_base_type (operation_type);
1353 switch (op_code)
1355 case REALPART_EXPR:
1356 case IMAGPART_EXPR:
1357 if (!operation_type)
1358 result_type = operation_type = TREE_TYPE (type);
1359 else
1360 gcc_assert (result_type == TREE_TYPE (type));
1362 result = fold_build1 (op_code, operation_type, operand);
1363 break;
1365 case TRUTH_NOT_EXPR:
1366 gcc_checking_assert
1367 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1368 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1369 /* When not optimizing, fold the result as invert_truthvalue_loc
1370 doesn't fold the result of comparisons. This is intended to undo
1371 the trick used for boolean rvalues in gnat_to_gnu. */
1372 if (!optimize)
1373 result = fold (result);
1374 break;
1376 case ATTR_ADDR_EXPR:
1377 case ADDR_EXPR:
1378 switch (TREE_CODE (operand))
1380 case INDIRECT_REF:
1381 case UNCONSTRAINED_ARRAY_REF:
1382 result = TREE_OPERAND (operand, 0);
1384 /* Make sure the type here is a pointer, not a reference.
1385 GCC wants pointer types for function addresses. */
1386 if (!result_type)
1387 result_type = build_pointer_type (type);
1389 /* If the underlying object can alias everything, propagate the
1390 property since we are effectively retrieving the object. */
1391 if (POINTER_TYPE_P (TREE_TYPE (result))
1392 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1394 if (TREE_CODE (result_type) == POINTER_TYPE
1395 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1396 result_type
1397 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1398 TYPE_MODE (result_type),
1399 true);
1400 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1401 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1402 result_type
1403 = build_reference_type_for_mode (TREE_TYPE (result_type),
1404 TYPE_MODE (result_type),
1405 true);
1407 break;
1409 case NULL_EXPR:
1410 result = operand;
1411 TREE_TYPE (result) = type = build_pointer_type (type);
1412 break;
1414 case COMPOUND_EXPR:
1415 /* Fold a compound expression if it has unconstrained array type
1416 since the middle-end cannot handle it. But we don't it in the
1417 general case because it may introduce aliasing issues if the
1418 first operand is an indirect assignment and the second operand
1419 the corresponding address, e.g. for an allocator. However do
1420 it for a return value to expose it for later recognition. */
1421 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE
1422 || (TREE_CODE (TREE_OPERAND (operand, 1)) == VAR_DECL
1423 && DECL_RETURN_VALUE_P (TREE_OPERAND (operand, 1))))
1425 result = build_unary_op (ADDR_EXPR, result_type,
1426 TREE_OPERAND (operand, 1));
1427 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1428 TREE_OPERAND (operand, 0), result);
1429 break;
1431 goto common;
1433 case ARRAY_REF:
1434 case ARRAY_RANGE_REF:
1435 case COMPONENT_REF:
1436 case BIT_FIELD_REF:
1437 /* If this is for 'Address, find the address of the prefix and add
1438 the offset to the field. Otherwise, do this the normal way. */
1439 if (op_code == ATTR_ADDR_EXPR)
1441 HOST_WIDE_INT bitsize;
1442 HOST_WIDE_INT bitpos;
1443 tree offset, inner;
1444 machine_mode mode;
1445 int unsignedp, reversep, volatilep;
1447 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1448 &mode, &unsignedp, &reversep,
1449 &volatilep);
1451 /* If INNER is a padding type whose field has a self-referential
1452 size, convert to that inner type. We know the offset is zero
1453 and we need to have that type visible. */
1454 if (type_is_padding_self_referential (TREE_TYPE (inner)))
1455 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1456 inner);
1458 /* Compute the offset as a byte offset from INNER. */
1459 if (!offset)
1460 offset = size_zero_node;
1462 offset = size_binop (PLUS_EXPR, offset,
1463 size_int (bitpos / BITS_PER_UNIT));
1465 /* Take the address of INNER, convert it to a pointer to our type
1466 and add the offset. */
1467 inner = build_unary_op (ADDR_EXPR,
1468 build_pointer_type (TREE_TYPE (operand)),
1469 inner);
1470 result = build_binary_op (POINTER_PLUS_EXPR, TREE_TYPE (inner),
1471 inner, offset);
1472 break;
1474 goto common;
1476 case CONSTRUCTOR:
1477 /* If this is just a constructor for a padded record, we can
1478 just take the address of the single field and convert it to
1479 a pointer to our type. */
1480 if (TYPE_IS_PADDING_P (type))
1482 result
1483 = build_unary_op (ADDR_EXPR,
1484 build_pointer_type (TREE_TYPE (operand)),
1485 CONSTRUCTOR_ELT (operand, 0)->value);
1486 break;
1488 goto common;
1490 case NOP_EXPR:
1491 if (AGGREGATE_TYPE_P (type)
1492 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1493 return build_unary_op (ADDR_EXPR, result_type,
1494 TREE_OPERAND (operand, 0));
1496 /* ... fallthru ... */
1498 case VIEW_CONVERT_EXPR:
1499 /* If this just a variant conversion or if the conversion doesn't
1500 change the mode, get the result type from this type and go down.
1501 This is needed for conversions of CONST_DECLs, to eventually get
1502 to the address of their CORRESPONDING_VARs. */
1503 if ((TYPE_MAIN_VARIANT (type)
1504 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1505 || (TYPE_MODE (type) != BLKmode
1506 && (TYPE_MODE (type)
1507 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1508 return build_unary_op (ADDR_EXPR,
1509 (result_type ? result_type
1510 : build_pointer_type (type)),
1511 TREE_OPERAND (operand, 0));
1512 goto common;
1514 case CONST_DECL:
1515 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1517 /* ... fall through ... */
1519 default:
1520 common:
1522 /* If we are taking the address of a padded record whose field
1523 contains a template, take the address of the field. */
1524 if (TYPE_IS_PADDING_P (type)
1525 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1526 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1528 type = TREE_TYPE (TYPE_FIELDS (type));
1529 operand = convert (type, operand);
1532 gnat_mark_addressable (operand);
1533 result = build_fold_addr_expr (operand);
1536 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1537 break;
1539 case INDIRECT_REF:
1541 tree t = remove_conversions (operand, false);
1542 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1544 /* If TYPE is a thin pointer, either first retrieve the base if this
1545 is an expression with an offset built for the initialization of an
1546 object with an unconstrained nominal subtype, or else convert to
1547 the fat pointer. */
1548 if (TYPE_IS_THIN_POINTER_P (type))
1550 tree rec_type = TREE_TYPE (type);
1552 if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1553 && TREE_OPERAND (operand, 1)
1554 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1555 && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1557 operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1558 type = TREE_TYPE (operand);
1560 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1562 operand
1563 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1564 operand);
1565 type = TREE_TYPE (operand);
1569 /* If we want to refer to an unconstrained array, use the appropriate
1570 expression. But this will never survive down to the back-end. */
1571 if (TYPE_IS_FAT_POINTER_P (type))
1573 result = build1 (UNCONSTRAINED_ARRAY_REF,
1574 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1575 TREE_READONLY (result)
1576 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1579 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1580 else if (TREE_CODE (operand) == ADDR_EXPR)
1581 result = TREE_OPERAND (operand, 0);
1583 /* Otherwise, build and fold the indirect reference. */
1584 else
1586 result = build_fold_indirect_ref (operand);
1587 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1590 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1592 TREE_SIDE_EFFECTS (result) = 1;
1593 if (TREE_CODE (result) == INDIRECT_REF)
1594 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1597 if ((TREE_CODE (result) == INDIRECT_REF
1598 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1599 && can_never_be_null)
1600 TREE_THIS_NOTRAP (result) = 1;
1602 break;
1605 case NEGATE_EXPR:
1606 case BIT_NOT_EXPR:
1608 tree modulus = ((operation_type
1609 && TREE_CODE (operation_type) == INTEGER_TYPE
1610 && TYPE_MODULAR_P (operation_type))
1611 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1612 int mod_pow2 = modulus && integer_pow2p (modulus);
1614 /* If this is a modular type, there are various possibilities
1615 depending on the operation and whether the modulus is a
1616 power of two or not. */
1618 if (modulus)
1620 gcc_assert (operation_type == base_type);
1621 operand = convert (operation_type, operand);
1623 /* The fastest in the negate case for binary modulus is
1624 the straightforward code; the TRUNC_MOD_EXPR below
1625 is an AND operation. */
1626 if (op_code == NEGATE_EXPR && mod_pow2)
1627 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1628 fold_build1 (NEGATE_EXPR, operation_type,
1629 operand),
1630 modulus);
1632 /* For nonbinary negate case, return zero for zero operand,
1633 else return the modulus minus the operand. If the modulus
1634 is a power of two minus one, we can do the subtraction
1635 as an XOR since it is equivalent and faster on most machines. */
1636 else if (op_code == NEGATE_EXPR && !mod_pow2)
1638 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1639 modulus,
1640 build_int_cst (operation_type,
1641 1))))
1642 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1643 operand, modulus);
1644 else
1645 result = fold_build2 (MINUS_EXPR, operation_type,
1646 modulus, operand);
1648 result = fold_build3 (COND_EXPR, operation_type,
1649 fold_build2 (NE_EXPR,
1650 boolean_type_node,
1651 operand,
1652 build_int_cst
1653 (operation_type, 0)),
1654 result, operand);
1656 else
1658 /* For the NOT cases, we need a constant equal to
1659 the modulus minus one. For a binary modulus, we
1660 XOR against the constant and subtract the operand from
1661 that constant for nonbinary modulus. */
1663 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1664 build_int_cst (operation_type, 1));
1666 if (mod_pow2)
1667 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1668 operand, cnst);
1669 else
1670 result = fold_build2 (MINUS_EXPR, operation_type,
1671 cnst, operand);
1674 break;
1678 /* ... fall through ... */
1680 default:
1681 gcc_assert (operation_type == base_type);
1682 result = fold_build1 (op_code, operation_type,
1683 convert (operation_type, operand));
1686 if (result_type && TREE_TYPE (result) != result_type)
1687 result = convert (result_type, result);
1689 return result;
1692 /* Similar, but for COND_EXPR. */
1694 tree
1695 build_cond_expr (tree result_type, tree condition_operand,
1696 tree true_operand, tree false_operand)
1698 bool addr_p = false;
1699 tree result;
1701 /* The front-end verified that result, true and false operands have
1702 same base type. Convert everything to the result type. */
1703 true_operand = convert (result_type, true_operand);
1704 false_operand = convert (result_type, false_operand);
1706 /* If the result type is unconstrained, take the address of the operands and
1707 then dereference the result. Likewise if the result type is passed by
1708 reference, because creating a temporary of this type is not allowed. */
1709 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1710 || TYPE_IS_BY_REFERENCE_P (result_type)
1711 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1713 result_type = build_pointer_type (result_type);
1714 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1715 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1716 addr_p = true;
1719 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1720 true_operand, false_operand);
1722 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1723 in both arms, make sure it gets evaluated by moving it ahead of the
1724 conditional expression. This is necessary because it is evaluated
1725 in only one place at run time and would otherwise be uninitialized
1726 in one of the arms. */
1727 true_operand = skip_simple_arithmetic (true_operand);
1728 false_operand = skip_simple_arithmetic (false_operand);
1730 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1731 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1733 if (addr_p)
1734 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1736 return result;
1739 /* Similar, but for COMPOUND_EXPR. */
1741 tree
1742 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1744 bool addr_p = false;
1745 tree result;
1747 /* If the result type is unconstrained, take the address of the operand and
1748 then dereference the result. Likewise if the result type is passed by
1749 reference, but this is natively handled in the gimplifier. */
1750 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1751 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1753 result_type = build_pointer_type (result_type);
1754 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1755 addr_p = true;
1758 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1759 expr_operand);
1761 if (addr_p)
1762 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1764 return result;
1767 /* Conveniently construct a function call expression. FNDECL names the
1768 function to be called, N is the number of arguments, and the "..."
1769 parameters are the argument expressions. Unlike build_call_expr
1770 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1772 tree
1773 build_call_n_expr (tree fndecl, int n, ...)
1775 va_list ap;
1776 tree fntype = TREE_TYPE (fndecl);
1777 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1779 va_start (ap, n);
1780 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1781 va_end (ap);
1782 return fn;
1785 /* Build a goto to LABEL for a raise, with an optional call to Local_Raise.
1786 MSG gives the exception's identity for the call to Local_Raise, if any. */
1788 static tree
1789 build_goto_raise (tree label, int msg)
1791 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1792 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1794 /* If Local_Raise is present, build Local_Raise (Exception'Identity). */
1795 if (Present (local_raise))
1797 tree gnu_local_raise
1798 = gnat_to_gnu_entity (local_raise, NULL_TREE, false);
1799 tree gnu_exception_entity
1800 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, false);
1801 tree gnu_call
1802 = build_call_n_expr (gnu_local_raise, 1,
1803 build_unary_op (ADDR_EXPR, NULL_TREE,
1804 gnu_exception_entity));
1805 gnu_result
1806 = build2 (COMPOUND_EXPR, void_type_node, gnu_call, gnu_result);
1809 return gnu_result;
1812 /* Expand the SLOC of GNAT_NODE, if present, into tree location information
1813 pointed to by FILENAME, LINE and COL. Fall back to the current location
1814 if GNAT_NODE is absent or has no SLOC. */
1816 static void
1817 expand_sloc (Node_Id gnat_node, tree *filename, tree *line, tree *col)
1819 const char *str;
1820 int line_number, column_number;
1822 if (Debug_Flag_NN || Exception_Locations_Suppressed)
1824 str = "";
1825 line_number = 0;
1826 column_number = 0;
1828 else if (Present (gnat_node) && Sloc (gnat_node) != No_Location)
1830 str = Get_Name_String
1831 (Debug_Source_Name (Get_Source_File_Index (Sloc (gnat_node))));
1832 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1833 column_number = Get_Column_Number (Sloc (gnat_node));
1835 else
1837 str = lbasename (LOCATION_FILE (input_location));
1838 line_number = LOCATION_LINE (input_location);
1839 column_number = LOCATION_COLUMN (input_location);
1842 const int len = strlen (str);
1843 *filename = build_string (len, str);
1844 TREE_TYPE (*filename) = build_array_type (char_type_node,
1845 build_index_type (size_int (len)));
1846 *line = build_int_cst (NULL_TREE, line_number);
1847 if (col)
1848 *col = build_int_cst (NULL_TREE, column_number);
1851 /* Build a call to a function that raises an exception and passes file name
1852 and line number, if requested. MSG says which exception function to call.
1853 GNAT_NODE is the node conveying the source location for which the error
1854 should be signaled, or Empty in which case the error is signaled for the
1855 current location. KIND says which kind of exception node this is for,
1856 among N_Raise_{Constraint,Storage,Program}_Error. */
1858 tree
1859 build_call_raise (int msg, Node_Id gnat_node, char kind)
1861 tree fndecl = gnat_raise_decls[msg];
1862 tree label = get_exception_label (kind);
1863 tree filename, line;
1865 /* If this is to be done as a goto, handle that case. */
1866 if (label)
1867 return build_goto_raise (label, msg);
1869 expand_sloc (gnat_node, &filename, &line, NULL);
1871 return
1872 build_call_n_expr (fndecl, 2,
1873 build1 (ADDR_EXPR,
1874 build_pointer_type (char_type_node),
1875 filename),
1876 line);
1879 /* Similar to build_call_raise, with extra information about the column
1880 where the check failed. */
1882 tree
1883 build_call_raise_column (int msg, Node_Id gnat_node, char kind)
1885 tree fndecl = gnat_raise_decls_ext[msg];
1886 tree label = get_exception_label (kind);
1887 tree filename, line, col;
1889 /* If this is to be done as a goto, handle that case. */
1890 if (label)
1891 return build_goto_raise (label, msg);
1893 expand_sloc (gnat_node, &filename, &line, &col);
1895 return
1896 build_call_n_expr (fndecl, 3,
1897 build1 (ADDR_EXPR,
1898 build_pointer_type (char_type_node),
1899 filename),
1900 line, col);
1903 /* Similar to build_call_raise_column, for an index or range check exception ,
1904 with extra information of the form "INDEX out of range FIRST..LAST". */
1906 tree
1907 build_call_raise_range (int msg, Node_Id gnat_node, char kind,
1908 tree index, tree first, tree last)
1910 tree fndecl = gnat_raise_decls_ext[msg];
1911 tree label = get_exception_label (kind);
1912 tree filename, line, col;
1914 /* If this is to be done as a goto, handle that case. */
1915 if (label)
1916 return build_goto_raise (label, msg);
1918 expand_sloc (gnat_node, &filename, &line, &col);
1920 return
1921 build_call_n_expr (fndecl, 6,
1922 build1 (ADDR_EXPR,
1923 build_pointer_type (char_type_node),
1924 filename),
1925 line, col,
1926 convert (integer_type_node, index),
1927 convert (integer_type_node, first),
1928 convert (integer_type_node, last));
1931 /* qsort comparer for the bit positions of two constructor elements
1932 for record components. */
1934 static int
1935 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1937 const constructor_elt * const elmt1 = (const constructor_elt *) rt1;
1938 const constructor_elt * const elmt2 = (const constructor_elt *) rt2;
1939 const_tree const field1 = elmt1->index;
1940 const_tree const field2 = elmt2->index;
1941 const int ret
1942 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1944 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1947 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1949 tree
1950 gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
1952 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1953 bool read_only = true;
1954 bool side_effects = false;
1955 tree result, obj, val;
1956 unsigned int n_elmts;
1958 /* Scan the elements to see if they are all constant or if any has side
1959 effects, to let us set global flags on the resulting constructor. Count
1960 the elements along the way for possible sorting purposes below. */
1961 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1963 /* The predicate must be in keeping with output_constructor. */
1964 if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1965 || (TREE_CODE (type) == RECORD_TYPE
1966 && CONSTRUCTOR_BITFIELD_P (obj)
1967 && !initializer_constant_valid_for_bitfield_p (val))
1968 || !initializer_constant_valid_p (val,
1969 TREE_TYPE (val),
1970 TYPE_REVERSE_STORAGE_ORDER (type)))
1971 allconstant = false;
1973 if (!TREE_READONLY (val))
1974 read_only = false;
1976 if (TREE_SIDE_EFFECTS (val))
1977 side_effects = true;
1980 /* For record types with constant components only, sort field list
1981 by increasing bit position. This is necessary to ensure the
1982 constructor can be output as static data. */
1983 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1984 v->qsort (compare_elmt_bitpos);
1986 result = build_constructor (type, v);
1987 CONSTRUCTOR_NO_CLEARING (result) = 1;
1988 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1989 TREE_SIDE_EFFECTS (result) = side_effects;
1990 TREE_READONLY (result) = TYPE_READONLY (type) || read_only || allconstant;
1991 return result;
1994 /* Return a COMPONENT_REF to access FIELD in RECORD, or NULL_TREE if the field
1995 is not found in the record. Don't fold the result if NO_FOLD is true. */
1997 static tree
1998 build_simple_component_ref (tree record, tree field, bool no_fold)
2000 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (record));
2001 tree ref;
2003 gcc_assert (RECORD_OR_UNION_TYPE_P (type) && COMPLETE_TYPE_P (type));
2005 /* Try to fold a conversion from another record or union type unless the type
2006 contains a placeholder as it might be needed for a later substitution. */
2007 if (TREE_CODE (record) == VIEW_CONVERT_EXPR
2008 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (record, 0)))
2009 && !type_contains_placeholder_p (type))
2011 tree op = TREE_OPERAND (record, 0);
2013 /* If this is an unpadding operation, convert the underlying object to
2014 the unpadded type directly. */
2015 if (TYPE_IS_PADDING_P (type) && field == TYPE_FIELDS (type))
2016 return convert (TREE_TYPE (field), op);
2018 /* Otherwise try to access FIELD directly in the underlying type, but
2019 make sure that the form of the reference doesn't change too much;
2020 this can happen for an unconstrained bit-packed array type whose
2021 constrained form can be an integer type. */
2022 ref = build_simple_component_ref (op, field, no_fold);
2023 if (ref && TREE_CODE (TREE_TYPE (ref)) == TREE_CODE (TREE_TYPE (field)))
2024 return ref;
2027 /* If this field is not in the specified record, see if we can find a field
2028 in the specified record whose original field is the same as this one. */
2029 if (DECL_CONTEXT (field) != type)
2031 tree new_field;
2033 /* First loop through normal components. */
2034 for (new_field = TYPE_FIELDS (type);
2035 new_field;
2036 new_field = DECL_CHAIN (new_field))
2037 if (SAME_FIELD_P (field, new_field))
2038 break;
2040 /* Next, loop through DECL_INTERNAL_P components if we haven't found the
2041 component in the first search. Doing this search in two steps is
2042 required to avoid hidden homonymous fields in the _Parent field. */
2043 if (!new_field)
2044 for (new_field = TYPE_FIELDS (type);
2045 new_field;
2046 new_field = DECL_CHAIN (new_field))
2047 if (DECL_INTERNAL_P (new_field)
2048 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (new_field)))
2050 tree field_ref
2051 = build_simple_component_ref (record, new_field, no_fold);
2052 ref = build_simple_component_ref (field_ref, field, no_fold);
2053 if (ref)
2054 return ref;
2057 field = new_field;
2060 if (!field)
2061 return NULL_TREE;
2063 /* If the field's offset has overflowed, do not try to access it, as doing
2064 so may trigger sanity checks deeper in the back-end. Note that we don't
2065 need to warn since this will be done on trying to declare the object. */
2066 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
2067 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
2068 return NULL_TREE;
2070 ref = build3 (COMPONENT_REF, TREE_TYPE (field), record, field, NULL_TREE);
2072 if (TREE_READONLY (record)
2073 || TREE_READONLY (field)
2074 || TYPE_READONLY (type))
2075 TREE_READONLY (ref) = 1;
2077 if (TREE_THIS_VOLATILE (record)
2078 || TREE_THIS_VOLATILE (field)
2079 || TYPE_VOLATILE (type))
2080 TREE_THIS_VOLATILE (ref) = 1;
2082 if (no_fold)
2083 return ref;
2085 /* The generic folder may punt in this case because the inner array type
2086 can be self-referential, but folding is in fact not problematic. */
2087 if (TREE_CODE (record) == CONSTRUCTOR
2088 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record)))
2090 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (record);
2091 unsigned HOST_WIDE_INT idx;
2092 tree index, value;
2093 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
2094 if (index == field)
2095 return value;
2096 return ref;
2099 return fold (ref);
2102 /* Likewise, but return NULL_EXPR and generate a Constraint_Error if the
2103 field is not found in the record. */
2105 tree
2106 build_component_ref (tree record, tree field, bool no_fold)
2108 tree ref = build_simple_component_ref (record, field, no_fold);
2109 if (ref)
2110 return ref;
2112 /* Assume this is an invalid user field so raise Constraint_Error. */
2113 return build1 (NULL_EXPR, TREE_TYPE (field),
2114 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2115 N_Raise_Constraint_Error));
2118 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2119 identically. Process the case where a GNAT_PROC to call is provided. */
2121 static inline tree
2122 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2123 Entity_Id gnat_proc, Entity_Id gnat_pool)
2125 tree gnu_proc = gnat_to_gnu (gnat_proc);
2126 tree gnu_call;
2128 /* A storage pool's underlying type is a record type (for both predefined
2129 storage pools and GNAT simple storage pools). The secondary stack uses
2130 the same mechanism, but its pool object (SS_Pool) is an integer. */
2131 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2133 /* The size is the third parameter; the alignment is the
2134 same type. */
2135 Entity_Id gnat_size_type
2136 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2137 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2139 tree gnu_pool = gnat_to_gnu (gnat_pool);
2140 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2141 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2143 gnu_size = convert (gnu_size_type, gnu_size);
2144 gnu_align = convert (gnu_size_type, gnu_align);
2146 /* The first arg is always the address of the storage pool; next
2147 comes the address of the object, for a deallocator, then the
2148 size and alignment. */
2149 if (gnu_obj)
2150 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2151 gnu_size, gnu_align);
2152 else
2153 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2154 gnu_size, gnu_align);
2157 /* Secondary stack case. */
2158 else
2160 /* The size is the second parameter. */
2161 Entity_Id gnat_size_type
2162 = Etype (Next_Formal (First_Formal (gnat_proc)));
2163 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2165 gnu_size = convert (gnu_size_type, gnu_size);
2167 /* The first arg is the address of the object, for a deallocator,
2168 then the size. */
2169 if (gnu_obj)
2170 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2171 else
2172 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2175 return gnu_call;
2178 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2179 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2180 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2181 latter offers. */
2183 static inline tree
2184 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2186 /* When the DATA_TYPE alignment is stricter than what malloc offers
2187 (super-aligned case), we allocate an "aligning" wrapper type and return
2188 the address of its single data field with the malloc's return value
2189 stored just in front. */
2191 unsigned int data_align = TYPE_ALIGN (data_type);
2192 unsigned int system_allocator_alignment
2193 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2195 tree aligning_type
2196 = ((data_align > system_allocator_alignment)
2197 ? make_aligning_type (data_type, data_align, data_size,
2198 system_allocator_alignment,
2199 POINTER_SIZE / BITS_PER_UNIT,
2200 gnat_node)
2201 : NULL_TREE);
2203 tree size_to_malloc
2204 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2206 tree malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2208 if (aligning_type)
2210 /* Latch malloc's return value and get a pointer to the aligning field
2211 first. */
2212 tree storage_ptr = gnat_protect_expr (malloc_ptr);
2214 tree aligning_record_addr
2215 = convert (build_pointer_type (aligning_type), storage_ptr);
2217 tree aligning_record
2218 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2220 tree aligning_field
2221 = build_component_ref (aligning_record, TYPE_FIELDS (aligning_type),
2222 false);
2224 tree aligning_field_addr
2225 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2227 /* Then arrange to store the allocator's return value ahead
2228 and return. */
2229 tree storage_ptr_slot_addr
2230 = build_binary_op (POINTER_PLUS_EXPR, ptr_type_node,
2231 convert (ptr_type_node, aligning_field_addr),
2232 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2233 / BITS_PER_UNIT));
2235 tree storage_ptr_slot
2236 = build_unary_op (INDIRECT_REF, NULL_TREE,
2237 convert (build_pointer_type (ptr_type_node),
2238 storage_ptr_slot_addr));
2240 return
2241 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2242 build_binary_op (INIT_EXPR, NULL_TREE,
2243 storage_ptr_slot, storage_ptr),
2244 aligning_field_addr);
2246 else
2247 return malloc_ptr;
2250 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2251 designated by DATA_PTR using the __gnat_free entry point. */
2253 static inline tree
2254 maybe_wrap_free (tree data_ptr, tree data_type)
2256 /* In the regular alignment case, we pass the data pointer straight to free.
2257 In the superaligned case, we need to retrieve the initial allocator
2258 return value, stored in front of the data block at allocation time. */
2260 unsigned int data_align = TYPE_ALIGN (data_type);
2261 unsigned int system_allocator_alignment
2262 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2264 tree free_ptr;
2266 if (data_align > system_allocator_alignment)
2268 /* DATA_FRONT_PTR (void *)
2269 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2270 tree data_front_ptr
2271 = build_binary_op
2272 (POINTER_PLUS_EXPR, ptr_type_node,
2273 convert (ptr_type_node, data_ptr),
2274 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2276 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2277 free_ptr
2278 = build_unary_op
2279 (INDIRECT_REF, NULL_TREE,
2280 convert (build_pointer_type (ptr_type_node), data_front_ptr));
2282 else
2283 free_ptr = data_ptr;
2285 return build_call_n_expr (free_decl, 1, free_ptr);
2288 /* Build a GCC tree to call an allocation or deallocation function.
2289 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2290 generate an allocator.
2292 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2293 object type, used to determine the to-be-honored address alignment.
2294 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2295 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2296 to provide an error location for restriction violation messages. */
2298 tree
2299 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2300 Entity_Id gnat_proc, Entity_Id gnat_pool,
2301 Node_Id gnat_node)
2303 /* Explicit proc to call ? This one is assumed to deal with the type
2304 alignment constraints. */
2305 if (Present (gnat_proc))
2306 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2307 gnat_proc, gnat_pool);
2309 /* Otherwise, object to "free" or "malloc" with possible special processing
2310 for alignments stricter than what the default allocator honors. */
2311 else if (gnu_obj)
2312 return maybe_wrap_free (gnu_obj, gnu_type);
2313 else
2315 /* Assert that we no longer can be called with this special pool. */
2316 gcc_assert (gnat_pool != -1);
2318 /* Check that we aren't violating the associated restriction. */
2319 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2321 Check_No_Implicit_Heap_Alloc (gnat_node);
2322 if (Has_Task (Etype (gnat_node)))
2323 Check_No_Implicit_Task_Alloc (gnat_node);
2324 if (Has_Protected (Etype (gnat_node)))
2325 Check_No_Implicit_Protected_Alloc (gnat_node);
2327 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2331 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2332 initial value is INIT, if INIT is nonzero. Convert the expression to
2333 RESULT_TYPE, which must be some pointer type, and return the result.
2335 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2336 the storage pool to use. GNAT_NODE is used to provide an error
2337 location for restriction violation messages. If IGNORE_INIT_TYPE is
2338 true, ignore the type of INIT for the purpose of determining the size;
2339 this will cause the maximum size to be allocated if TYPE is of
2340 self-referential size. */
2342 tree
2343 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2344 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2346 tree size, storage, storage_deref, storage_init;
2348 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2349 if (init && TREE_CODE (init) == NULL_EXPR)
2350 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2352 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2353 else if (init && TREE_CODE (init) == COND_EXPR)
2354 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2355 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2356 gnat_proc, gnat_pool, gnat_node,
2357 ignore_init_type),
2358 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2359 gnat_proc, gnat_pool, gnat_node,
2360 ignore_init_type));
2362 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2363 sizes of the object and its template. Allocate the whole thing and
2364 fill in the parts that are known. */
2365 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2367 tree storage_type
2368 = build_unc_object_type_from_ptr (result_type, type,
2369 get_identifier ("ALLOC"), false);
2370 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2371 tree storage_ptr_type = build_pointer_type (storage_type);
2373 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2374 init);
2376 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2377 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2378 size = size_int (-1);
2380 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2381 gnat_proc, gnat_pool, gnat_node);
2382 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2383 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2384 TREE_THIS_NOTRAP (storage_deref) = 1;
2386 /* If there is an initializing expression, then make a constructor for
2387 the entire object including the bounds and copy it into the object.
2388 If there is no initializing expression, just set the bounds. */
2389 if (init)
2391 vec<constructor_elt, va_gc> *v;
2392 vec_alloc (v, 2);
2394 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2395 build_template (template_type, type, init));
2396 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2397 init);
2398 storage_init
2399 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2400 gnat_build_constructor (storage_type, v));
2402 else
2403 storage_init
2404 = build_binary_op (INIT_EXPR, NULL_TREE,
2405 build_component_ref (storage_deref,
2406 TYPE_FIELDS (storage_type),
2407 false),
2408 build_template (template_type, type, NULL_TREE));
2410 return build2 (COMPOUND_EXPR, result_type,
2411 storage_init, convert (result_type, storage));
2414 size = TYPE_SIZE_UNIT (type);
2416 /* If we have an initializing expression, see if its size is simpler
2417 than the size from the type. */
2418 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2419 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2420 || CONTAINS_PLACEHOLDER_P (size)))
2421 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2423 /* If the size is still self-referential, reference the initializing
2424 expression, if it is present. If not, this must have been a
2425 call to allocate a library-level object, in which case we use
2426 the maximum size. */
2427 if (CONTAINS_PLACEHOLDER_P (size))
2429 if (!ignore_init_type && init)
2430 size = substitute_placeholder_in_expr (size, init);
2431 else
2432 size = max_size (size, true);
2435 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2436 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2437 size = size_int (-1);
2439 storage = convert (result_type,
2440 build_call_alloc_dealloc (NULL_TREE, size, type,
2441 gnat_proc, gnat_pool,
2442 gnat_node));
2444 /* If we have an initial value, protect the new address, assign the value
2445 and return the address with a COMPOUND_EXPR. */
2446 if (init)
2448 storage = gnat_protect_expr (storage);
2449 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2450 TREE_THIS_NOTRAP (storage_deref) = 1;
2451 storage_init
2452 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2453 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2456 return storage;
2459 /* Indicate that we need to take the address of T and that it therefore
2460 should not be allocated in a register. Return true if successful. */
2462 bool
2463 gnat_mark_addressable (tree t)
2465 while (true)
2466 switch (TREE_CODE (t))
2468 case ADDR_EXPR:
2469 case COMPONENT_REF:
2470 case ARRAY_REF:
2471 case ARRAY_RANGE_REF:
2472 case REALPART_EXPR:
2473 case IMAGPART_EXPR:
2474 case VIEW_CONVERT_EXPR:
2475 case NON_LVALUE_EXPR:
2476 CASE_CONVERT:
2477 t = TREE_OPERAND (t, 0);
2478 break;
2480 case COMPOUND_EXPR:
2481 t = TREE_OPERAND (t, 1);
2482 break;
2484 case CONSTRUCTOR:
2485 TREE_ADDRESSABLE (t) = 1;
2486 return true;
2488 case VAR_DECL:
2489 case PARM_DECL:
2490 case RESULT_DECL:
2491 TREE_ADDRESSABLE (t) = 1;
2492 return true;
2494 case FUNCTION_DECL:
2495 TREE_ADDRESSABLE (t) = 1;
2496 return true;
2498 case CONST_DECL:
2499 return DECL_CONST_CORRESPONDING_VAR (t)
2500 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2502 default:
2503 return true;
2507 /* Return true if EXP is a stable expression for the purpose of the functions
2508 below and, therefore, can be returned unmodified by them. We accept things
2509 that are actual constants or that have already been handled. */
2511 static bool
2512 gnat_stable_expr_p (tree exp)
2514 enum tree_code code = TREE_CODE (exp);
2515 return TREE_CONSTANT (exp) || code == NULL_EXPR || code == SAVE_EXPR;
2518 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2519 but we know how to handle our own nodes. */
2521 tree
2522 gnat_save_expr (tree exp)
2524 tree type = TREE_TYPE (exp);
2525 enum tree_code code = TREE_CODE (exp);
2527 if (gnat_stable_expr_p (exp))
2528 return exp;
2530 if (code == UNCONSTRAINED_ARRAY_REF)
2532 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2533 TREE_READONLY (t) = TYPE_READONLY (type);
2534 return t;
2537 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2538 This may be more efficient, but will also allow us to more easily find
2539 the match for the PLACEHOLDER_EXPR. */
2540 if (code == COMPONENT_REF
2541 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2542 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2543 TREE_OPERAND (exp, 1), NULL_TREE);
2545 return save_expr (exp);
2548 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2549 is optimized under the assumption that EXP's value doesn't change before
2550 its subsequent reuse(s) except through its potential reevaluation. */
2552 tree
2553 gnat_protect_expr (tree exp)
2555 tree type = TREE_TYPE (exp);
2556 enum tree_code code = TREE_CODE (exp);
2558 if (gnat_stable_expr_p (exp))
2559 return exp;
2561 /* If EXP has no side effects, we theoretically don't need to do anything.
2562 However, we may be recursively passed more and more complex expressions
2563 involving checks which will be reused multiple times and eventually be
2564 unshared for gimplification; in order to avoid a complexity explosion
2565 at that point, we protect any expressions more complex than a simple
2566 arithmetic expression. */
2567 if (!TREE_SIDE_EFFECTS (exp))
2569 tree inner = skip_simple_arithmetic (exp);
2570 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2571 return exp;
2574 /* If this is a conversion, protect what's inside the conversion. */
2575 if (code == NON_LVALUE_EXPR
2576 || CONVERT_EXPR_CODE_P (code)
2577 || code == VIEW_CONVERT_EXPR)
2578 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2580 /* If we're indirectly referencing something, we only need to protect the
2581 address since the data itself can't change in these situations. */
2582 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2584 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2585 TREE_READONLY (t) = TYPE_READONLY (type);
2586 return t;
2589 /* Likewise if we're indirectly referencing part of something. */
2590 if (code == COMPONENT_REF
2591 && TREE_CODE (TREE_OPERAND (exp, 0)) == INDIRECT_REF)
2592 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2593 TREE_OPERAND (exp, 1), NULL_TREE);
2595 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2596 This may be more efficient, but will also allow us to more easily find
2597 the match for the PLACEHOLDER_EXPR. */
2598 if (code == COMPONENT_REF
2599 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2600 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2601 TREE_OPERAND (exp, 1), NULL_TREE);
2603 /* If this is a fat pointer or a scalar, just make a SAVE_EXPR. Likewise
2604 for a CALL_EXPR as large objects are returned via invisible reference
2605 in most ABIs so the temporary will directly be filled by the callee. */
2606 if (TYPE_IS_FAT_POINTER_P (type)
2607 || !AGGREGATE_TYPE_P (type)
2608 || code == CALL_EXPR)
2609 return save_expr (exp);
2611 /* Otherwise reference, protect the address and dereference. */
2612 return
2613 build_unary_op (INDIRECT_REF, type,
2614 save_expr (build_unary_op (ADDR_EXPR, NULL_TREE, exp)));
2617 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2618 argument to force evaluation of everything. */
2620 static tree
2621 gnat_stabilize_reference_1 (tree e, void *data)
2623 const bool force = *(bool *)data;
2624 enum tree_code code = TREE_CODE (e);
2625 tree type = TREE_TYPE (e);
2626 tree result;
2628 if (gnat_stable_expr_p (e))
2629 return e;
2631 switch (TREE_CODE_CLASS (code))
2633 case tcc_exceptional:
2634 case tcc_declaration:
2635 case tcc_comparison:
2636 case tcc_expression:
2637 case tcc_reference:
2638 case tcc_vl_exp:
2639 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2640 fat pointer. This may be more efficient, but will also allow
2641 us to more easily find the match for the PLACEHOLDER_EXPR. */
2642 if (code == COMPONENT_REF
2643 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2644 result
2645 = build3 (code, type,
2646 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2647 TREE_OPERAND (e, 1), NULL_TREE);
2648 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2649 so that it will only be evaluated once. */
2650 /* The tcc_reference and tcc_comparison classes could be handled as
2651 below, but it is generally faster to only evaluate them once. */
2652 else if (TREE_SIDE_EFFECTS (e) || force)
2653 return save_expr (e);
2654 else
2655 return e;
2656 break;
2658 case tcc_binary:
2659 /* Recursively stabilize each operand. */
2660 result
2661 = build2 (code, type,
2662 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2663 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), data));
2664 break;
2666 case tcc_unary:
2667 /* Recursively stabilize each operand. */
2668 result
2669 = build1 (code, type,
2670 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data));
2671 break;
2673 default:
2674 gcc_unreachable ();
2677 TREE_READONLY (result) = TREE_READONLY (e);
2678 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2679 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2681 return result;
2684 /* This is equivalent to stabilize_reference in tree.c but we know how to
2685 handle our own nodes and we take extra arguments. FORCE says whether to
2686 force evaluation of everything in REF. INIT is set to the first arm of
2687 a COMPOUND_EXPR present in REF, if any. */
2689 tree
2690 gnat_stabilize_reference (tree ref, bool force, tree *init)
2692 return
2693 gnat_rewrite_reference (ref, gnat_stabilize_reference_1, &force, init);
2696 /* Rewrite reference REF and call FUNC on each expression within REF in the
2697 process. DATA is passed unmodified to FUNC. INIT is set to the first
2698 arm of a COMPOUND_EXPR present in REF, if any. */
2700 tree
2701 gnat_rewrite_reference (tree ref, rewrite_fn func, void *data, tree *init)
2703 tree type = TREE_TYPE (ref);
2704 enum tree_code code = TREE_CODE (ref);
2705 tree result;
2707 switch (code)
2709 case CONST_DECL:
2710 case VAR_DECL:
2711 case PARM_DECL:
2712 case RESULT_DECL:
2713 /* No action is needed in this case. */
2714 return ref;
2716 CASE_CONVERT:
2717 case FLOAT_EXPR:
2718 case FIX_TRUNC_EXPR:
2719 case REALPART_EXPR:
2720 case IMAGPART_EXPR:
2721 case VIEW_CONVERT_EXPR:
2722 result
2723 = build1 (code, type,
2724 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2725 init));
2726 break;
2728 case INDIRECT_REF:
2729 case UNCONSTRAINED_ARRAY_REF:
2730 result = build1 (code, type, func (TREE_OPERAND (ref, 0), data));
2731 break;
2733 case COMPONENT_REF:
2734 result = build3 (COMPONENT_REF, type,
2735 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2736 data, init),
2737 TREE_OPERAND (ref, 1), NULL_TREE);
2738 break;
2740 case BIT_FIELD_REF:
2741 result = build3 (BIT_FIELD_REF, type,
2742 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2743 data, init),
2744 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
2745 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
2746 break;
2748 case ARRAY_REF:
2749 case ARRAY_RANGE_REF:
2750 result
2751 = build4 (code, type,
2752 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2753 init),
2754 func (TREE_OPERAND (ref, 1), data),
2755 TREE_OPERAND (ref, 2), NULL_TREE);
2756 break;
2758 case COMPOUND_EXPR:
2759 gcc_assert (!*init);
2760 *init = TREE_OPERAND (ref, 0);
2761 /* We expect only the pattern built in Call_to_gnu. */
2762 gcc_assert (DECL_P (TREE_OPERAND (ref, 1))
2763 || (TREE_CODE (TREE_OPERAND (ref, 1)) == COMPONENT_REF
2764 && DECL_P (TREE_OPERAND (TREE_OPERAND (ref, 1), 0))));
2765 return TREE_OPERAND (ref, 1);
2767 case CALL_EXPR:
2769 /* This can only be an atomic load. */
2770 gcc_assert (call_is_atomic_load (ref));
2772 /* An atomic load is an INDIRECT_REF of its first argument. */
2773 tree t = CALL_EXPR_ARG (ref, 0);
2774 if (TREE_CODE (t) == NOP_EXPR)
2775 t = TREE_OPERAND (t, 0);
2776 if (TREE_CODE (t) == ADDR_EXPR)
2777 t = build1 (ADDR_EXPR, TREE_TYPE (t),
2778 gnat_rewrite_reference (TREE_OPERAND (t, 0), func, data,
2779 init));
2780 else
2781 t = func (t, data);
2782 t = fold_convert (TREE_TYPE (CALL_EXPR_ARG (ref, 0)), t);
2784 result = build_call_expr (TREE_OPERAND (CALL_EXPR_FN (ref), 0), 2,
2785 t, CALL_EXPR_ARG (ref, 1));
2787 break;
2789 case ERROR_MARK:
2790 case NULL_EXPR:
2791 return ref;
2793 default:
2794 gcc_unreachable ();
2797 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2798 may not be sustained across some paths, such as the way via build1 for
2799 INDIRECT_REF. We reset those flags here in the general case, which is
2800 consistent with the GCC version of this routine.
2802 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2803 paths introduce side-effects where there was none initially (e.g. if a
2804 SAVE_EXPR is built) and we also want to keep track of that. */
2805 TREE_READONLY (result) = TREE_READONLY (ref);
2806 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2807 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2809 if (code == INDIRECT_REF
2810 || code == UNCONSTRAINED_ARRAY_REF
2811 || code == ARRAY_REF
2812 || code == ARRAY_RANGE_REF)
2813 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2815 return result;
2818 /* This is equivalent to get_inner_reference in expr.c but it returns the
2819 ultimate containing object only if the reference (lvalue) is constant,
2820 i.e. if it doesn't depend on the context in which it is evaluated. */
2822 tree
2823 get_inner_constant_reference (tree exp)
2825 while (true)
2827 switch (TREE_CODE (exp))
2829 case BIT_FIELD_REF:
2830 break;
2832 case COMPONENT_REF:
2833 if (!TREE_CONSTANT (DECL_FIELD_OFFSET (TREE_OPERAND (exp, 1))))
2834 return NULL_TREE;
2835 break;
2837 case ARRAY_REF:
2838 case ARRAY_RANGE_REF:
2840 if (TREE_OPERAND (exp, 2))
2841 return NULL_TREE;
2843 tree array_type = TREE_TYPE (TREE_OPERAND (exp, 0));
2844 if (!TREE_CONSTANT (TREE_OPERAND (exp, 1))
2845 || !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
2846 || !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (array_type))))
2847 return NULL_TREE;
2849 break;
2851 case REALPART_EXPR:
2852 case IMAGPART_EXPR:
2853 case VIEW_CONVERT_EXPR:
2854 break;
2856 default:
2857 goto done;
2860 exp = TREE_OPERAND (exp, 0);
2863 done:
2864 return exp;
2867 /* Return true if EXPR is the addition or the subtraction of a constant and,
2868 if so, set *ADD to the addend, *CST to the constant and *MINUS_P to true
2869 if this is a subtraction. */
2871 bool
2872 is_simple_additive_expression (tree expr, tree *add, tree *cst, bool *minus_p)
2874 /* Skip overflow checks. */
2875 if (TREE_CODE (expr) == COND_EXPR
2876 && TREE_CODE (COND_EXPR_THEN (expr)) == COMPOUND_EXPR
2877 && TREE_CODE (TREE_OPERAND (COND_EXPR_THEN (expr), 0)) == CALL_EXPR
2878 && get_callee_fndecl (TREE_OPERAND (COND_EXPR_THEN (expr), 0))
2879 == gnat_raise_decls[CE_Overflow_Check_Failed])
2880 expr = COND_EXPR_ELSE (expr);
2882 if (TREE_CODE (expr) == PLUS_EXPR)
2884 if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
2886 *add = TREE_OPERAND (expr, 1);
2887 *cst = TREE_OPERAND (expr, 0);
2888 *minus_p = false;
2889 return true;
2891 else if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
2893 *add = TREE_OPERAND (expr, 0);
2894 *cst = TREE_OPERAND (expr, 1);
2895 *minus_p = false;
2896 return true;
2899 else if (TREE_CODE (expr) == MINUS_EXPR)
2901 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
2903 *add = TREE_OPERAND (expr, 0);
2904 *cst = TREE_OPERAND (expr, 1);
2905 *minus_p = true;
2906 return true;
2910 return false;
2913 /* If EXPR is an expression that is invariant in the current function, in the
2914 sense that it can be evaluated anywhere in the function and any number of
2915 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2917 tree
2918 gnat_invariant_expr (tree expr)
2920 const tree type = TREE_TYPE (expr);
2921 tree add, cst;
2922 bool minus_p;
2924 expr = remove_conversions (expr, false);
2926 /* Look through temporaries created to capture values. */
2927 while ((TREE_CODE (expr) == CONST_DECL
2928 || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2929 && decl_function_context (expr) == current_function_decl
2930 && DECL_INITIAL (expr))
2932 expr = DECL_INITIAL (expr);
2933 /* Look into CONSTRUCTORs built to initialize padded types. */
2934 if (TYPE_IS_PADDING_P (TREE_TYPE (expr)))
2935 expr = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (expr))), expr);
2936 expr = remove_conversions (expr, false);
2939 /* We are only interested in scalar types at the moment and, even if we may
2940 have gone through padding types in the above loop, we must be back to a
2941 scalar value at this point. */
2942 if (AGGREGATE_TYPE_P (TREE_TYPE (expr)))
2943 return NULL_TREE;
2945 if (TREE_CONSTANT (expr))
2946 return fold_convert (type, expr);
2948 /* Deal with addition or subtraction of constants. */
2949 if (is_simple_additive_expression (expr, &add, &cst, &minus_p))
2951 add = gnat_invariant_expr (add);
2952 if (add)
2953 return
2954 fold_build2 (minus_p ? MINUS_EXPR : PLUS_EXPR, type,
2955 fold_convert (type, add), fold_convert (type, cst));
2956 else
2957 return NULL_TREE;
2960 bool invariant_p = false;
2961 tree t = expr;
2963 while (true)
2965 switch (TREE_CODE (t))
2967 case COMPONENT_REF:
2968 invariant_p |= DECL_INVARIANT_P (TREE_OPERAND (t, 1));
2969 break;
2971 case ARRAY_REF:
2972 case ARRAY_RANGE_REF:
2973 if (!TREE_CONSTANT (TREE_OPERAND (t, 1)) || TREE_OPERAND (t, 2))
2974 return NULL_TREE;
2975 break;
2977 case BIT_FIELD_REF:
2978 case REALPART_EXPR:
2979 case IMAGPART_EXPR:
2980 case VIEW_CONVERT_EXPR:
2981 CASE_CONVERT:
2982 break;
2984 case INDIRECT_REF:
2985 if ((!invariant_p && !TREE_READONLY (t)) || TREE_SIDE_EFFECTS (t))
2986 return NULL_TREE;
2987 invariant_p = false;
2988 break;
2990 default:
2991 goto object;
2994 t = TREE_OPERAND (t, 0);
2997 object:
2998 if (TREE_SIDE_EFFECTS (t))
2999 return NULL_TREE;
3001 if (TREE_CODE (t) == CONST_DECL
3002 && (DECL_EXTERNAL (t)
3003 || decl_function_context (t) != current_function_decl))
3004 return fold_convert (type, expr);
3006 if (!invariant_p && !TREE_READONLY (t))
3007 return NULL_TREE;
3009 if (TREE_CODE (t) == PARM_DECL)
3010 return fold_convert (type, expr);
3012 if (TREE_CODE (t) == VAR_DECL
3013 && (DECL_EXTERNAL (t)
3014 || decl_function_context (t) != current_function_decl))
3015 return fold_convert (type, expr);
3017 return NULL_TREE;