* gcc-interface/utils2.c (build_allocator): In type_annotate_only mode
[official-gcc.git] / gcc / ada / gcc-interface / utils2.c
blob072e1052c16bffe5193a9f1c1a601c0a3e520561
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2016, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "memmodel.h"
30 #include "tm.h"
31 #include "vec.h"
32 #include "alias.h"
33 #include "tree.h"
34 #include "inchash.h"
35 #include "fold-const.h"
36 #include "stor-layout.h"
37 #include "stringpool.h"
38 #include "varasm.h"
39 #include "flags.h"
40 #include "toplev.h"
41 #include "ggc.h"
42 #include "tree-inline.h"
44 #include "ada.h"
45 #include "types.h"
46 #include "atree.h"
47 #include "elists.h"
48 #include "namet.h"
49 #include "nlists.h"
50 #include "snames.h"
51 #include "stringt.h"
52 #include "uintp.h"
53 #include "fe.h"
54 #include "sinfo.h"
55 #include "einfo.h"
56 #include "ada-tree.h"
57 #include "gigi.h"
59 /* Return the base type of TYPE. */
61 tree
62 get_base_type (tree type)
64 if (TREE_CODE (type) == RECORD_TYPE
65 && TYPE_JUSTIFIED_MODULAR_P (type))
66 type = TREE_TYPE (TYPE_FIELDS (type));
68 while (TREE_TYPE (type)
69 && (TREE_CODE (type) == INTEGER_TYPE
70 || TREE_CODE (type) == REAL_TYPE))
71 type = TREE_TYPE (type);
73 return type;
76 /* EXP is a GCC tree representing an address. See if we can find how strictly
77 the object at this address is aligned and, if so, return the alignment of
78 the object in bits. Otherwise return 0. */
80 unsigned int
81 known_alignment (tree exp)
83 unsigned int this_alignment;
84 unsigned int lhs, rhs;
86 switch (TREE_CODE (exp))
88 CASE_CONVERT:
89 case VIEW_CONVERT_EXPR:
90 case NON_LVALUE_EXPR:
91 /* Conversions between pointers and integers don't change the alignment
92 of the underlying object. */
93 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
94 break;
96 case COMPOUND_EXPR:
97 /* The value of a COMPOUND_EXPR is that of its second operand. */
98 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
99 break;
101 case PLUS_EXPR:
102 case MINUS_EXPR:
103 /* If two addresses are added, the alignment of the result is the
104 minimum of the two alignments. */
105 lhs = known_alignment (TREE_OPERAND (exp, 0));
106 rhs = known_alignment (TREE_OPERAND (exp, 1));
107 this_alignment = MIN (lhs, rhs);
108 break;
110 case POINTER_PLUS_EXPR:
111 /* If this is the pattern built for aligning types, decode it. */
112 if (TREE_CODE (TREE_OPERAND (exp, 1)) == BIT_AND_EXPR
113 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)) == NEGATE_EXPR)
115 tree op = TREE_OPERAND (TREE_OPERAND (exp, 1), 1);
116 return
117 known_alignment (fold_build1 (BIT_NOT_EXPR, TREE_TYPE (op), op));
120 /* If we don't know the alignment of the offset, we assume that
121 of the base. */
122 lhs = known_alignment (TREE_OPERAND (exp, 0));
123 rhs = known_alignment (TREE_OPERAND (exp, 1));
125 if (rhs == 0)
126 this_alignment = lhs;
127 else
128 this_alignment = MIN (lhs, rhs);
129 break;
131 case COND_EXPR:
132 /* If there is a choice between two values, use the smaller one. */
133 lhs = known_alignment (TREE_OPERAND (exp, 1));
134 rhs = known_alignment (TREE_OPERAND (exp, 2));
135 this_alignment = MIN (lhs, rhs);
136 break;
138 case INTEGER_CST:
140 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
141 /* The first part of this represents the lowest bit in the constant,
142 but it is originally in bytes, not bits. */
143 this_alignment = (c & -c) * BITS_PER_UNIT;
145 break;
147 case MULT_EXPR:
148 /* If we know the alignment of just one side, use it. Otherwise,
149 use the product of the alignments. */
150 lhs = known_alignment (TREE_OPERAND (exp, 0));
151 rhs = known_alignment (TREE_OPERAND (exp, 1));
153 if (lhs == 0)
154 this_alignment = rhs;
155 else if (rhs == 0)
156 this_alignment = lhs;
157 else
158 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
159 break;
161 case BIT_AND_EXPR:
162 /* A bit-and expression is as aligned as the maximum alignment of the
163 operands. We typically get here for a complex lhs and a constant
164 negative power of two on the rhs to force an explicit alignment, so
165 don't bother looking at the lhs. */
166 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
167 break;
169 case ADDR_EXPR:
170 this_alignment = expr_align (TREE_OPERAND (exp, 0));
171 break;
173 case CALL_EXPR:
175 tree fndecl = get_callee_fndecl (exp);
176 if (fndecl == malloc_decl || fndecl == realloc_decl)
177 return get_target_system_allocator_alignment () * BITS_PER_UNIT;
179 tree t = maybe_inline_call_in_expr (exp);
180 if (t)
181 return known_alignment (t);
184 /* ... fall through ... */
186 default:
187 /* For other pointer expressions, we assume that the pointed-to object
188 is at least as aligned as the pointed-to type. Beware that we can
189 have a dummy type here (e.g. a Taft Amendment type), for which the
190 alignment is meaningless and should be ignored. */
191 if (POINTER_TYPE_P (TREE_TYPE (exp))
192 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp)))
193 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (exp))))
194 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
195 else
196 this_alignment = 0;
197 break;
200 return this_alignment;
203 /* We have a comparison or assignment operation on two types, T1 and T2, which
204 are either both array types or both record types. T1 is assumed to be for
205 the left hand side operand, and T2 for the right hand side. Return the
206 type that both operands should be converted to for the operation, if any.
207 Otherwise return zero. */
209 static tree
210 find_common_type (tree t1, tree t2)
212 /* ??? As of today, various constructs lead to here with types of different
213 sizes even when both constants (e.g. tagged types, packable vs regular
214 component types, padded vs unpadded types, ...). While some of these
215 would better be handled upstream (types should be made consistent before
216 calling into build_binary_op), some others are really expected and we
217 have to be careful. */
219 const bool variable_record_on_lhs
220 = (TREE_CODE (t1) == RECORD_TYPE
221 && TREE_CODE (t2) == RECORD_TYPE
222 && get_variant_part (t1)
223 && !get_variant_part (t2));
225 const bool variable_array_on_lhs
226 = (TREE_CODE (t1) == ARRAY_TYPE
227 && TREE_CODE (t2) == ARRAY_TYPE
228 && !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)))
229 && TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t2))));
231 /* We must avoid writing more than what the target can hold if this is for
232 an assignment and the case of tagged types is handled in build_binary_op
233 so we use the lhs type if it is known to be smaller or of constant size
234 and the rhs type is not, whatever the modes. We also force t1 in case of
235 constant size equality to minimize occurrences of view conversions on the
236 lhs of an assignment, except for the case of types with a variable part
237 on the lhs but not on the rhs to make the conversion simpler. */
238 if (TREE_CONSTANT (TYPE_SIZE (t1))
239 && (!TREE_CONSTANT (TYPE_SIZE (t2))
240 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
241 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
242 && !variable_record_on_lhs
243 && !variable_array_on_lhs)))
244 return t1;
246 /* Otherwise, if the lhs type is non-BLKmode, use it, except for the case of
247 a non-BLKmode rhs and array types with a variable part on the lhs but not
248 on the rhs to make sure the conversion is preserved during gimplification.
249 Note that we know that we will not have any alignment problems since, if
250 we did, the non-BLKmode type could not have been used. */
251 if (TYPE_MODE (t1) != BLKmode
252 && (TYPE_MODE (t2) == BLKmode || !variable_array_on_lhs))
253 return t1;
255 /* If the rhs type is of constant size, use it whatever the modes. At
256 this point it is known to be smaller, or of constant size and the
257 lhs type is not. */
258 if (TREE_CONSTANT (TYPE_SIZE (t2)))
259 return t2;
261 /* Otherwise, if the rhs type is non-BLKmode, use it. */
262 if (TYPE_MODE (t2) != BLKmode)
263 return t2;
265 /* In this case, both types have variable size and BLKmode. It's
266 probably best to leave the "type mismatch" because changing it
267 could cause a bad self-referential reference. */
268 return NULL_TREE;
271 /* Return an expression tree representing an equality comparison of A1 and A2,
272 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
274 Two arrays are equal in one of two ways: (1) if both have zero length in
275 some dimension (not necessarily the same dimension) or (2) if the lengths
276 in each dimension are equal and the data is equal. We perform the length
277 tests in as efficient a manner as possible. */
279 static tree
280 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
282 tree result = convert (result_type, boolean_true_node);
283 tree a1_is_null = convert (result_type, boolean_false_node);
284 tree a2_is_null = convert (result_type, boolean_false_node);
285 tree t1 = TREE_TYPE (a1);
286 tree t2 = TREE_TYPE (a2);
287 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
288 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
289 bool length_zero_p = false;
291 /* If the operands have side-effects, they need to be evaluated only once
292 in spite of the multiple references in the comparison. */
293 if (a1_side_effects_p)
294 a1 = gnat_protect_expr (a1);
296 if (a2_side_effects_p)
297 a2 = gnat_protect_expr (a2);
299 /* Process each dimension separately and compare the lengths. If any
300 dimension has a length known to be zero, set LENGTH_ZERO_P to true
301 in order to suppress the comparison of the data at the end. */
302 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
304 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
305 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
306 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
307 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
308 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
309 size_one_node);
310 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
311 size_one_node);
312 tree comparison, this_a1_is_null, this_a2_is_null;
314 /* If the length of the first array is a constant, swap our operands
315 unless the length of the second array is the constant zero. */
316 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
318 tree tem;
319 bool btem;
321 tem = a1, a1 = a2, a2 = tem;
322 tem = t1, t1 = t2, t2 = tem;
323 tem = lb1, lb1 = lb2, lb2 = tem;
324 tem = ub1, ub1 = ub2, ub2 = tem;
325 tem = length1, length1 = length2, length2 = tem;
326 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
327 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
328 a2_side_effects_p = btem;
331 /* If the length of the second array is the constant zero, we can just
332 use the original stored bounds for the first array and see whether
333 last < first holds. */
334 if (integer_zerop (length2))
336 tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
338 length_zero_p = true;
341 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
343 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
345 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
346 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
347 if (EXPR_P (comparison))
348 SET_EXPR_LOCATION (comparison, loc);
350 this_a1_is_null = comparison;
351 this_a2_is_null = convert (result_type, boolean_true_node);
354 /* Otherwise, if the length is some other constant value, we know that
355 this dimension in the second array cannot be superflat, so we can
356 just use its length computed from the actual stored bounds. */
357 else if (TREE_CODE (length2) == INTEGER_CST)
359 tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
362 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
364 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
365 /* Note that we know that UB2 and LB2 are constant and hence
366 cannot contain a PLACEHOLDER_EXPR. */
368 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
370 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
372 comparison
373 = fold_build2_loc (loc, EQ_EXPR, result_type,
374 build_binary_op (MINUS_EXPR, b, ub1, lb1),
375 build_binary_op (MINUS_EXPR, b, ub2, lb2));
376 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
377 if (EXPR_P (comparison))
378 SET_EXPR_LOCATION (comparison, loc);
380 this_a1_is_null
381 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
383 this_a2_is_null = convert (result_type, boolean_false_node);
386 /* Otherwise, compare the computed lengths. */
387 else
389 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
390 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
392 comparison
393 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
395 /* If the length expression is of the form (cond ? val : 0), assume
396 that cond is equivalent to (length != 0). That's guaranteed by
397 construction of the array types in gnat_to_gnu_entity. */
398 if (TREE_CODE (length1) == COND_EXPR
399 && integer_zerop (TREE_OPERAND (length1, 2)))
400 this_a1_is_null
401 = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
402 else
403 this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
404 length1, size_zero_node);
406 /* Likewise for the second array. */
407 if (TREE_CODE (length2) == COND_EXPR
408 && integer_zerop (TREE_OPERAND (length2, 2)))
409 this_a2_is_null
410 = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
411 else
412 this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
413 length2, size_zero_node);
416 /* Append expressions for this dimension to the final expressions. */
417 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
418 result, comparison);
420 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
421 this_a1_is_null, a1_is_null);
423 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
424 this_a2_is_null, a2_is_null);
426 t1 = TREE_TYPE (t1);
427 t2 = TREE_TYPE (t2);
430 /* Unless the length of some dimension is known to be zero, compare the
431 data in the array. */
432 if (!length_zero_p)
434 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
435 tree comparison;
437 if (type)
439 a1 = convert (type, a1),
440 a2 = convert (type, a2);
443 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
445 result
446 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
449 /* The result is also true if both sizes are zero. */
450 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
451 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
452 a1_is_null, a2_is_null),
453 result);
455 /* If the operands have side-effects, they need to be evaluated before
456 doing the tests above since the place they otherwise would end up
457 being evaluated at run time could be wrong. */
458 if (a1_side_effects_p)
459 result = build2 (COMPOUND_EXPR, result_type, a1, result);
461 if (a2_side_effects_p)
462 result = build2 (COMPOUND_EXPR, result_type, a2, result);
464 return result;
467 /* Return an expression tree representing an equality comparison of P1 and P2,
468 two objects of fat pointer type. The result should be of type RESULT_TYPE.
470 Two fat pointers are equal in one of two ways: (1) if both have a null
471 pointer to the array or (2) if they contain the same couple of pointers.
472 We perform the comparison in as efficient a manner as possible. */
474 static tree
475 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
477 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
478 tree p1_array_is_null, p2_array_is_null;
480 /* If either operand has side-effects, they have to be evaluated only once
481 in spite of the multiple references to the operand in the comparison. */
482 p1 = gnat_protect_expr (p1);
483 p2 = gnat_protect_expr (p2);
485 /* The constant folder doesn't fold fat pointer types so we do it here. */
486 if (TREE_CODE (p1) == CONSTRUCTOR)
487 p1_array = CONSTRUCTOR_ELT (p1, 0)->value;
488 else
489 p1_array = build_component_ref (p1, TYPE_FIELDS (TREE_TYPE (p1)), true);
491 p1_array_is_null
492 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
493 fold_convert_loc (loc, TREE_TYPE (p1_array),
494 null_pointer_node));
496 if (TREE_CODE (p2) == CONSTRUCTOR)
497 p2_array = CONSTRUCTOR_ELT (p2, 0)->value;
498 else
499 p2_array = build_component_ref (p2, TYPE_FIELDS (TREE_TYPE (p2)), true);
501 p2_array_is_null
502 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
503 fold_convert_loc (loc, TREE_TYPE (p2_array),
504 null_pointer_node));
506 /* If one of the pointers to the array is null, just compare the other. */
507 if (integer_zerop (p1_array))
508 return p2_array_is_null;
509 else if (integer_zerop (p2_array))
510 return p1_array_is_null;
512 /* Otherwise, do the fully-fledged comparison. */
513 same_array
514 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
516 if (TREE_CODE (p1) == CONSTRUCTOR)
517 p1_bounds = CONSTRUCTOR_ELT (p1, 1)->value;
518 else
519 p1_bounds
520 = build_component_ref (p1, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))),
521 true);
523 if (TREE_CODE (p2) == CONSTRUCTOR)
524 p2_bounds = CONSTRUCTOR_ELT (p2, 1)->value;
525 else
526 p2_bounds
527 = build_component_ref (p2, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))),
528 true);
530 same_bounds
531 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
533 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
534 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
535 build_binary_op (TRUTH_ORIF_EXPR, result_type,
536 p1_array_is_null, same_bounds));
539 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
540 type TYPE. We know that TYPE is a modular type with a nonbinary
541 modulus. */
543 static tree
544 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
545 tree rhs)
547 tree modulus = TYPE_MODULUS (type);
548 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
549 unsigned int precision;
550 bool unsignedp = true;
551 tree op_type = type;
552 tree result;
554 /* If this is an addition of a constant, convert it to a subtraction
555 of a constant since we can do that faster. */
556 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
558 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
559 op_code = MINUS_EXPR;
562 /* For the logical operations, we only need PRECISION bits. For
563 addition and subtraction, we need one more and for multiplication we
564 need twice as many. But we never want to make a size smaller than
565 our size. */
566 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
567 needed_precision += 1;
568 else if (op_code == MULT_EXPR)
569 needed_precision *= 2;
571 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
573 /* Unsigned will do for everything but subtraction. */
574 if (op_code == MINUS_EXPR)
575 unsignedp = false;
577 /* If our type is the wrong signedness or isn't wide enough, make a new
578 type and convert both our operands to it. */
579 if (TYPE_PRECISION (op_type) < precision
580 || TYPE_UNSIGNED (op_type) != unsignedp)
582 /* Copy the type so we ensure it can be modified to make it modular. */
583 op_type = copy_type (gnat_type_for_size (precision, unsignedp));
584 modulus = convert (op_type, modulus);
585 SET_TYPE_MODULUS (op_type, modulus);
586 TYPE_MODULAR_P (op_type) = 1;
587 lhs = convert (op_type, lhs);
588 rhs = convert (op_type, rhs);
591 /* Do the operation, then we'll fix it up. */
592 result = fold_build2 (op_code, op_type, lhs, rhs);
594 /* For multiplication, we have no choice but to do a full modulus
595 operation. However, we want to do this in the narrowest
596 possible size. */
597 if (op_code == MULT_EXPR)
599 /* Copy the type so we ensure it can be modified to make it modular. */
600 tree div_type = copy_type (gnat_type_for_size (needed_precision, 1));
601 modulus = convert (div_type, modulus);
602 SET_TYPE_MODULUS (div_type, modulus);
603 TYPE_MODULAR_P (div_type) = 1;
604 result = convert (op_type,
605 fold_build2 (TRUNC_MOD_EXPR, div_type,
606 convert (div_type, result), modulus));
609 /* For subtraction, add the modulus back if we are negative. */
610 else if (op_code == MINUS_EXPR)
612 result = gnat_protect_expr (result);
613 result = fold_build3 (COND_EXPR, op_type,
614 fold_build2 (LT_EXPR, boolean_type_node, result,
615 build_int_cst (op_type, 0)),
616 fold_build2 (PLUS_EXPR, op_type, result, modulus),
617 result);
620 /* For the other operations, subtract the modulus if we are >= it. */
621 else
623 result = gnat_protect_expr (result);
624 result = fold_build3 (COND_EXPR, op_type,
625 fold_build2 (GE_EXPR, boolean_type_node,
626 result, modulus),
627 fold_build2 (MINUS_EXPR, op_type,
628 result, modulus),
629 result);
632 return convert (type, result);
635 /* This page contains routines that implement the Ada semantics with regard
636 to atomic objects. They are fully piggybacked on the middle-end support
637 for atomic loads and stores.
639 *** Memory barriers and volatile objects ***
641 We implement the weakened form of the C.6(16) clause that was introduced
642 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
643 implementable without significant performance hits on modern platforms.
645 We also take advantage of the requirements imposed on shared variables by
646 9.10 (conditions for sequential actions) to have non-erroneous execution
647 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
648 volatile updates with regard to sequential actions, i.e. with regard to
649 reads or updates of atomic objects.
651 As such, an update of an atomic object by a task requires that all earlier
652 accesses to volatile objects have completed. Similarly, later accesses to
653 volatile objects cannot be reordered before the update of the atomic object.
654 So, memory barriers both before and after the atomic update are needed.
656 For a read of an atomic object, to avoid seeing writes of volatile objects
657 by a task earlier than by the other tasks, a memory barrier is needed before
658 the atomic read. Finally, to avoid reordering later reads or updates of
659 volatile objects to before the atomic read, a barrier is needed after the
660 atomic read.
662 So, memory barriers are needed before and after atomic reads and updates.
663 And, in order to simplify the implementation, we use full memory barriers
664 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
666 /* Return the size of TYPE, which must be a positive power of 2. */
668 static unsigned int
669 resolve_atomic_size (tree type)
671 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
673 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
674 return size;
676 /* We shouldn't reach here without having already detected that the size
677 isn't compatible with an atomic access. */
678 gcc_assert (Serious_Errors_Detected);
680 return 0;
683 /* Build an atomic load for the underlying atomic object in SRC. SYNC is
684 true if the load requires synchronization. */
686 tree
687 build_atomic_load (tree src, bool sync)
689 tree ptr_type
690 = build_pointer_type
691 (build_qualified_type (void_type_node,
692 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
693 tree mem_model
694 = build_int_cst (integer_type_node,
695 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
696 tree orig_src = src;
697 tree t, addr, val;
698 unsigned int size;
699 int fncode;
701 /* Remove conversions to get the address of the underlying object. */
702 src = remove_conversions (src, false);
703 size = resolve_atomic_size (TREE_TYPE (src));
704 if (size == 0)
705 return orig_src;
707 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
708 t = builtin_decl_implicit ((enum built_in_function) fncode);
710 addr = build_unary_op (ADDR_EXPR, ptr_type, src);
711 val = build_call_expr (t, 2, addr, mem_model);
713 /* First reinterpret the loaded bits in the original type of the load,
714 then convert to the expected result type. */
715 t = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (src), val);
716 return convert (TREE_TYPE (orig_src), t);
719 /* Build an atomic store from SRC to the underlying atomic object in DEST.
720 SYNC is true if the store requires synchronization. */
722 tree
723 build_atomic_store (tree dest, tree src, bool sync)
725 tree ptr_type
726 = build_pointer_type
727 (build_qualified_type (void_type_node,
728 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
729 tree mem_model
730 = build_int_cst (integer_type_node,
731 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
732 tree orig_dest = dest;
733 tree t, int_type, addr;
734 unsigned int size;
735 int fncode;
737 /* Remove conversions to get the address of the underlying object. */
738 dest = remove_conversions (dest, false);
739 size = resolve_atomic_size (TREE_TYPE (dest));
740 if (size == 0)
741 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
743 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
744 t = builtin_decl_implicit ((enum built_in_function) fncode);
745 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
747 /* First convert the bits to be stored to the original type of the store,
748 then reinterpret them in the effective type. But if the original type
749 is a padded type with the same size, convert to the inner type instead,
750 as we don't want to artificially introduce a CONSTRUCTOR here. */
751 if (TYPE_IS_PADDING_P (TREE_TYPE (dest))
752 && TYPE_SIZE (TREE_TYPE (dest))
753 == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest)))))
754 src = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest))), src);
755 else
756 src = convert (TREE_TYPE (dest), src);
757 src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
758 addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
760 return build_call_expr (t, 3, addr, src, mem_model);
763 /* Build a load-modify-store sequence from SRC to DEST. GNAT_NODE is used for
764 the location of the sequence. Note that, even though the load and the store
765 are both atomic, the sequence itself is not atomic. */
767 tree
768 build_load_modify_store (tree dest, tree src, Node_Id gnat_node)
770 /* We will be modifying DEST below so we build a copy. */
771 dest = copy_node (dest);
772 tree ref = dest;
774 while (handled_component_p (ref))
776 /* The load should already have been generated during the translation
777 of the GNAT destination tree; find it out in the GNU tree. */
778 if (TREE_CODE (TREE_OPERAND (ref, 0)) == VIEW_CONVERT_EXPR)
780 tree op = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
781 if (TREE_CODE (op) == CALL_EXPR && call_is_atomic_load (op))
783 tree type = TREE_TYPE (TREE_OPERAND (ref, 0));
784 tree t = CALL_EXPR_ARG (op, 0);
785 tree obj, temp, stmt;
787 /* Find out the loaded object. */
788 if (TREE_CODE (t) == NOP_EXPR)
789 t = TREE_OPERAND (t, 0);
790 if (TREE_CODE (t) == ADDR_EXPR)
791 obj = TREE_OPERAND (t, 0);
792 else
793 obj = build1 (INDIRECT_REF, type, t);
795 /* Drop atomic and volatile qualifiers for the temporary. */
796 type = TYPE_MAIN_VARIANT (type);
798 /* And drop BLKmode, if need be, to put it into a register. */
799 if (TYPE_MODE (type) == BLKmode)
801 unsigned int size = tree_to_uhwi (TYPE_SIZE (type));
802 type = copy_type (type);
803 machine_mode mode = int_mode_for_size (size, 0).else_blk ();
804 SET_TYPE_MODE (type, mode);
807 /* Create the temporary by inserting a SAVE_EXPR. */
808 temp = build1 (SAVE_EXPR, type,
809 build1 (VIEW_CONVERT_EXPR, type, op));
810 TREE_OPERAND (ref, 0) = temp;
812 start_stmt_group ();
814 /* Build the modify of the temporary. */
815 stmt = build_binary_op (MODIFY_EXPR, NULL_TREE, dest, src);
816 add_stmt_with_node (stmt, gnat_node);
818 /* Build the store to the object. */
819 stmt = build_atomic_store (obj, temp, false);
820 add_stmt_with_node (stmt, gnat_node);
822 return end_stmt_group ();
826 TREE_OPERAND (ref, 0) = copy_node (TREE_OPERAND (ref, 0));
827 ref = TREE_OPERAND (ref, 0);
830 /* Something went wrong earlier if we have not found the atomic load. */
831 gcc_unreachable ();
834 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
835 desired for the result. Usually the operation is to be performed
836 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
837 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
838 case the type to be used will be derived from the operands.
839 Don't fold the result if NO_FOLD is true.
841 This function is very much unlike the ones for C and C++ since we
842 have already done any type conversion and matching required. All we
843 have to do here is validate the work done by SEM and handle subtypes. */
845 tree
846 build_binary_op (enum tree_code op_code, tree result_type,
847 tree left_operand, tree right_operand,
848 bool no_fold)
850 tree left_type = TREE_TYPE (left_operand);
851 tree right_type = TREE_TYPE (right_operand);
852 tree left_base_type = get_base_type (left_type);
853 tree right_base_type = get_base_type (right_type);
854 tree operation_type = result_type;
855 tree best_type = NULL_TREE;
856 tree modulus, result;
857 bool has_side_effects = false;
859 if (operation_type
860 && TREE_CODE (operation_type) == RECORD_TYPE
861 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
862 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
864 if (operation_type
865 && TREE_CODE (operation_type) == INTEGER_TYPE
866 && TYPE_EXTRA_SUBTYPE_P (operation_type))
867 operation_type = get_base_type (operation_type);
869 modulus = (operation_type
870 && TREE_CODE (operation_type) == INTEGER_TYPE
871 && TYPE_MODULAR_P (operation_type)
872 ? TYPE_MODULUS (operation_type) : NULL_TREE);
874 switch (op_code)
876 case INIT_EXPR:
877 case MODIFY_EXPR:
878 gcc_checking_assert (!result_type);
880 /* If there were integral or pointer conversions on the LHS, remove
881 them; we'll be putting them back below if needed. Likewise for
882 conversions between array and record types, except for justified
883 modular types. But don't do this if the right operand is not
884 BLKmode (for packed arrays) unless we are not changing the mode. */
885 while ((CONVERT_EXPR_P (left_operand)
886 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
887 && (((INTEGRAL_TYPE_P (left_type)
888 || POINTER_TYPE_P (left_type))
889 && (INTEGRAL_TYPE_P (TREE_TYPE
890 (TREE_OPERAND (left_operand, 0)))
891 || POINTER_TYPE_P (TREE_TYPE
892 (TREE_OPERAND (left_operand, 0)))))
893 || (((TREE_CODE (left_type) == RECORD_TYPE
894 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
895 || TREE_CODE (left_type) == ARRAY_TYPE)
896 && ((TREE_CODE (TREE_TYPE
897 (TREE_OPERAND (left_operand, 0)))
898 == RECORD_TYPE)
899 || (TREE_CODE (TREE_TYPE
900 (TREE_OPERAND (left_operand, 0)))
901 == ARRAY_TYPE))
902 && (TYPE_MODE (right_type) == BLKmode
903 || (TYPE_MODE (left_type)
904 == TYPE_MODE (TREE_TYPE
905 (TREE_OPERAND
906 (left_operand, 0))))))))
908 left_operand = TREE_OPERAND (left_operand, 0);
909 left_type = TREE_TYPE (left_operand);
912 /* If a class-wide type may be involved, force use of the RHS type. */
913 if ((TREE_CODE (right_type) == RECORD_TYPE
914 || TREE_CODE (right_type) == UNION_TYPE)
915 && TYPE_ALIGN_OK (right_type))
916 operation_type = right_type;
918 /* If we are copying between padded objects with compatible types, use
919 the padded view of the objects, this is very likely more efficient.
920 Likewise for a padded object that is assigned a constructor, if we
921 can convert the constructor to the inner type, to avoid putting a
922 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
923 actually copied anything. */
924 else if (TYPE_IS_PADDING_P (left_type)
925 && TREE_CONSTANT (TYPE_SIZE (left_type))
926 && ((TREE_CODE (right_operand) == COMPONENT_REF
927 && TYPE_MAIN_VARIANT (left_type)
928 == TYPE_MAIN_VARIANT
929 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
930 || (TREE_CODE (right_operand) == CONSTRUCTOR
931 && !CONTAINS_PLACEHOLDER_P
932 (DECL_SIZE (TYPE_FIELDS (left_type)))))
933 && !integer_zerop (TYPE_SIZE (right_type)))
935 /* We make an exception for a BLKmode type padding a non-BLKmode
936 inner type and do the conversion of the LHS right away, since
937 unchecked_convert wouldn't do it properly. */
938 if (TYPE_MODE (left_type) == BLKmode
939 && TYPE_MODE (right_type) != BLKmode
940 && TREE_CODE (right_operand) != CONSTRUCTOR)
942 operation_type = right_type;
943 left_operand = convert (operation_type, left_operand);
944 left_type = operation_type;
946 else
947 operation_type = left_type;
950 /* If we have a call to a function that returns with variable size, use
951 the RHS type in case we want to use the return slot optimization. */
952 else if (TREE_CODE (right_operand) == CALL_EXPR
953 && return_type_with_variable_size_p (right_type))
954 operation_type = right_type;
956 /* Find the best type to use for copying between aggregate types. */
957 else if (((TREE_CODE (left_type) == ARRAY_TYPE
958 && TREE_CODE (right_type) == ARRAY_TYPE)
959 || (TREE_CODE (left_type) == RECORD_TYPE
960 && TREE_CODE (right_type) == RECORD_TYPE))
961 && (best_type = find_common_type (left_type, right_type)))
962 operation_type = best_type;
964 /* Otherwise use the LHS type. */
965 else
966 operation_type = left_type;
968 /* Ensure everything on the LHS is valid. If we have a field reference,
969 strip anything that get_inner_reference can handle. Then remove any
970 conversions between types having the same code and mode. And mark
971 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
972 either an INDIRECT_REF, a NULL_EXPR, a SAVE_EXPR or a DECL node. */
973 result = left_operand;
974 while (true)
976 tree restype = TREE_TYPE (result);
978 if (TREE_CODE (result) == COMPONENT_REF
979 || TREE_CODE (result) == ARRAY_REF
980 || TREE_CODE (result) == ARRAY_RANGE_REF)
981 while (handled_component_p (result))
982 result = TREE_OPERAND (result, 0);
983 else if (TREE_CODE (result) == REALPART_EXPR
984 || TREE_CODE (result) == IMAGPART_EXPR
985 || (CONVERT_EXPR_P (result)
986 && (((TREE_CODE (restype)
987 == TREE_CODE (TREE_TYPE
988 (TREE_OPERAND (result, 0))))
989 && (TYPE_MODE (TREE_TYPE
990 (TREE_OPERAND (result, 0)))
991 == TYPE_MODE (restype)))
992 || TYPE_ALIGN_OK (restype))))
993 result = TREE_OPERAND (result, 0);
994 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
996 TREE_ADDRESSABLE (result) = 1;
997 result = TREE_OPERAND (result, 0);
999 else
1000 break;
1003 gcc_assert (TREE_CODE (result) == INDIRECT_REF
1004 || TREE_CODE (result) == NULL_EXPR
1005 || TREE_CODE (result) == SAVE_EXPR
1006 || DECL_P (result));
1008 /* Convert the right operand to the operation type unless it is
1009 either already of the correct type or if the type involves a
1010 placeholder, since the RHS may not have the same record type. */
1011 if (operation_type != right_type
1012 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
1014 right_operand = convert (operation_type, right_operand);
1015 right_type = operation_type;
1018 /* If the left operand is not of the same type as the operation
1019 type, wrap it up in a VIEW_CONVERT_EXPR. */
1020 if (left_type != operation_type)
1021 left_operand = unchecked_convert (operation_type, left_operand, false);
1023 has_side_effects = true;
1024 modulus = NULL_TREE;
1025 break;
1027 case ARRAY_REF:
1028 if (!operation_type)
1029 operation_type = TREE_TYPE (left_type);
1031 /* ... fall through ... */
1033 case ARRAY_RANGE_REF:
1034 /* First look through conversion between type variants. Note that
1035 this changes neither the operation type nor the type domain. */
1036 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
1037 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
1038 == TYPE_MAIN_VARIANT (left_type))
1040 left_operand = TREE_OPERAND (left_operand, 0);
1041 left_type = TREE_TYPE (left_operand);
1044 /* For a range, make sure the element type is consistent. */
1045 if (op_code == ARRAY_RANGE_REF
1046 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
1047 operation_type = build_array_type (TREE_TYPE (left_type),
1048 TYPE_DOMAIN (operation_type));
1050 /* Then convert the right operand to its base type. This will prevent
1051 unneeded sign conversions when sizetype is wider than integer. */
1052 right_operand = convert (right_base_type, right_operand);
1053 right_operand = convert_to_index_type (right_operand);
1054 modulus = NULL_TREE;
1055 break;
1057 case TRUTH_ANDIF_EXPR:
1058 case TRUTH_ORIF_EXPR:
1059 case TRUTH_AND_EXPR:
1060 case TRUTH_OR_EXPR:
1061 case TRUTH_XOR_EXPR:
1062 gcc_checking_assert
1063 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1064 operation_type = left_base_type;
1065 left_operand = convert (operation_type, left_operand);
1066 right_operand = convert (operation_type, right_operand);
1067 break;
1069 case GE_EXPR:
1070 case LE_EXPR:
1071 case GT_EXPR:
1072 case LT_EXPR:
1073 case EQ_EXPR:
1074 case NE_EXPR:
1075 gcc_checking_assert
1076 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1077 /* If either operand is a NULL_EXPR, just return a new one. */
1078 if (TREE_CODE (left_operand) == NULL_EXPR)
1079 return build2 (op_code, result_type,
1080 build1 (NULL_EXPR, integer_type_node,
1081 TREE_OPERAND (left_operand, 0)),
1082 integer_zero_node);
1084 else if (TREE_CODE (right_operand) == NULL_EXPR)
1085 return build2 (op_code, result_type,
1086 build1 (NULL_EXPR, integer_type_node,
1087 TREE_OPERAND (right_operand, 0)),
1088 integer_zero_node);
1090 /* If either object is a justified modular types, get the
1091 fields from within. */
1092 if (TREE_CODE (left_type) == RECORD_TYPE
1093 && TYPE_JUSTIFIED_MODULAR_P (left_type))
1095 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
1096 left_operand);
1097 left_type = TREE_TYPE (left_operand);
1098 left_base_type = get_base_type (left_type);
1101 if (TREE_CODE (right_type) == RECORD_TYPE
1102 && TYPE_JUSTIFIED_MODULAR_P (right_type))
1104 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
1105 right_operand);
1106 right_type = TREE_TYPE (right_operand);
1107 right_base_type = get_base_type (right_type);
1110 /* If both objects are arrays, compare them specially. */
1111 if ((TREE_CODE (left_type) == ARRAY_TYPE
1112 || (TREE_CODE (left_type) == INTEGER_TYPE
1113 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
1114 && (TREE_CODE (right_type) == ARRAY_TYPE
1115 || (TREE_CODE (right_type) == INTEGER_TYPE
1116 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1118 result = compare_arrays (input_location,
1119 result_type, left_operand, right_operand);
1120 if (op_code == NE_EXPR)
1121 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1122 else
1123 gcc_assert (op_code == EQ_EXPR);
1125 return result;
1128 /* Otherwise, the base types must be the same, unless they are both fat
1129 pointer types or record types. In the latter case, use the best type
1130 and convert both operands to that type. */
1131 if (left_base_type != right_base_type)
1133 if (TYPE_IS_FAT_POINTER_P (left_base_type)
1134 && TYPE_IS_FAT_POINTER_P (right_base_type))
1136 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1137 == TYPE_MAIN_VARIANT (right_base_type));
1138 best_type = left_base_type;
1141 else if (TREE_CODE (left_base_type) == RECORD_TYPE
1142 && TREE_CODE (right_base_type) == RECORD_TYPE)
1144 /* The only way this is permitted is if both types have the same
1145 name. In that case, one of them must not be self-referential.
1146 Use it as the best type. Even better with a fixed size. */
1147 gcc_assert (TYPE_NAME (left_base_type)
1148 && TYPE_NAME (left_base_type)
1149 == TYPE_NAME (right_base_type));
1151 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1152 best_type = left_base_type;
1153 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1154 best_type = right_base_type;
1155 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1156 best_type = left_base_type;
1157 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1158 best_type = right_base_type;
1159 else
1160 gcc_unreachable ();
1163 else if (POINTER_TYPE_P (left_base_type)
1164 && POINTER_TYPE_P (right_base_type))
1166 gcc_assert (TREE_TYPE (left_base_type)
1167 == TREE_TYPE (right_base_type));
1168 best_type = left_base_type;
1170 else
1171 gcc_unreachable ();
1173 left_operand = convert (best_type, left_operand);
1174 right_operand = convert (best_type, right_operand);
1176 else
1178 left_operand = convert (left_base_type, left_operand);
1179 right_operand = convert (right_base_type, right_operand);
1182 /* If both objects are fat pointers, compare them specially. */
1183 if (TYPE_IS_FAT_POINTER_P (left_base_type))
1185 result
1186 = compare_fat_pointers (input_location,
1187 result_type, left_operand, right_operand);
1188 if (op_code == NE_EXPR)
1189 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1190 else
1191 gcc_assert (op_code == EQ_EXPR);
1193 return result;
1196 modulus = NULL_TREE;
1197 break;
1199 case LSHIFT_EXPR:
1200 case RSHIFT_EXPR:
1201 case LROTATE_EXPR:
1202 case RROTATE_EXPR:
1203 /* The RHS of a shift can be any type. Also, ignore any modulus
1204 (we used to abort, but this is needed for unchecked conversion
1205 to modular types). Otherwise, processing is the same as normal. */
1206 gcc_assert (operation_type == left_base_type);
1207 modulus = NULL_TREE;
1208 left_operand = convert (operation_type, left_operand);
1209 break;
1211 case BIT_AND_EXPR:
1212 case BIT_IOR_EXPR:
1213 case BIT_XOR_EXPR:
1214 /* For binary modulus, if the inputs are in range, so are the
1215 outputs. */
1216 if (modulus && integer_pow2p (modulus))
1217 modulus = NULL_TREE;
1218 goto common;
1220 case COMPLEX_EXPR:
1221 gcc_assert (TREE_TYPE (result_type) == left_base_type
1222 && TREE_TYPE (result_type) == right_base_type);
1223 left_operand = convert (left_base_type, left_operand);
1224 right_operand = convert (right_base_type, right_operand);
1225 break;
1227 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1228 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1229 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1230 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1231 /* These always produce results lower than either operand. */
1232 modulus = NULL_TREE;
1233 goto common;
1235 case POINTER_PLUS_EXPR:
1236 gcc_assert (operation_type == left_base_type
1237 && sizetype == right_base_type);
1238 left_operand = convert (operation_type, left_operand);
1239 right_operand = convert (sizetype, right_operand);
1240 break;
1242 case PLUS_NOMOD_EXPR:
1243 case MINUS_NOMOD_EXPR:
1244 if (op_code == PLUS_NOMOD_EXPR)
1245 op_code = PLUS_EXPR;
1246 else
1247 op_code = MINUS_EXPR;
1248 modulus = NULL_TREE;
1250 /* ... fall through ... */
1252 case PLUS_EXPR:
1253 case MINUS_EXPR:
1254 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1255 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1256 these types but can generate addition/subtraction for Succ/Pred. */
1257 if (operation_type
1258 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1259 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1260 operation_type = left_base_type = right_base_type
1261 = gnat_type_for_mode (TYPE_MODE (operation_type),
1262 TYPE_UNSIGNED (operation_type));
1264 /* ... fall through ... */
1266 default:
1267 common:
1268 /* The result type should be the same as the base types of the
1269 both operands (and they should be the same). Convert
1270 everything to the result type. */
1272 gcc_assert (operation_type == left_base_type
1273 && left_base_type == right_base_type);
1274 left_operand = convert (operation_type, left_operand);
1275 right_operand = convert (operation_type, right_operand);
1278 if (modulus && !integer_pow2p (modulus))
1280 result = nonbinary_modular_operation (op_code, operation_type,
1281 left_operand, right_operand);
1282 modulus = NULL_TREE;
1284 /* If either operand is a NULL_EXPR, just return a new one. */
1285 else if (TREE_CODE (left_operand) == NULL_EXPR)
1286 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1287 else if (TREE_CODE (right_operand) == NULL_EXPR)
1288 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1289 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1291 result = build4 (op_code, operation_type, left_operand, right_operand,
1292 NULL_TREE, NULL_TREE);
1293 if (!no_fold)
1294 result = fold (result);
1296 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1297 result = build2 (op_code, void_type_node, left_operand, right_operand);
1298 else if (no_fold)
1299 result = build2 (op_code, operation_type, left_operand, right_operand);
1300 else
1301 result
1302 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1304 if (TREE_CONSTANT (result))
1306 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1308 if (TYPE_VOLATILE (operation_type))
1309 TREE_THIS_VOLATILE (result) = 1;
1311 else
1312 TREE_CONSTANT (result)
1313 |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1315 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1317 /* If we are working with modular types, perform the MOD operation
1318 if something above hasn't eliminated the need for it. */
1319 if (modulus)
1321 modulus = convert (operation_type, modulus);
1322 if (no_fold)
1323 result = build2 (FLOOR_MOD_EXPR, operation_type, result, modulus);
1324 else
1325 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result, modulus);
1328 if (result_type && result_type != operation_type)
1329 result = convert (result_type, result);
1331 return result;
1334 /* Similar, but for unary operations. */
1336 tree
1337 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1339 tree type = TREE_TYPE (operand);
1340 tree base_type = get_base_type (type);
1341 tree operation_type = result_type;
1342 tree result;
1344 if (operation_type
1345 && TREE_CODE (operation_type) == RECORD_TYPE
1346 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1347 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1349 if (operation_type
1350 && TREE_CODE (operation_type) == INTEGER_TYPE
1351 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1352 operation_type = get_base_type (operation_type);
1354 switch (op_code)
1356 case REALPART_EXPR:
1357 case IMAGPART_EXPR:
1358 if (!operation_type)
1359 result_type = operation_type = TREE_TYPE (type);
1360 else
1361 gcc_assert (result_type == TREE_TYPE (type));
1363 result = fold_build1 (op_code, operation_type, operand);
1364 break;
1366 case TRUTH_NOT_EXPR:
1367 gcc_checking_assert
1368 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1369 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1370 /* When not optimizing, fold the result as invert_truthvalue_loc
1371 doesn't fold the result of comparisons. This is intended to undo
1372 the trick used for boolean rvalues in gnat_to_gnu. */
1373 if (!optimize)
1374 result = fold (result);
1375 break;
1377 case ATTR_ADDR_EXPR:
1378 case ADDR_EXPR:
1379 switch (TREE_CODE (operand))
1381 case INDIRECT_REF:
1382 case UNCONSTRAINED_ARRAY_REF:
1383 result = TREE_OPERAND (operand, 0);
1385 /* Make sure the type here is a pointer, not a reference.
1386 GCC wants pointer types for function addresses. */
1387 if (!result_type)
1388 result_type = build_pointer_type (type);
1390 /* If the underlying object can alias everything, propagate the
1391 property since we are effectively retrieving the object. */
1392 if (POINTER_TYPE_P (TREE_TYPE (result))
1393 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1395 if (TREE_CODE (result_type) == POINTER_TYPE
1396 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1397 result_type
1398 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1399 TYPE_MODE (result_type),
1400 true);
1401 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1402 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1403 result_type
1404 = build_reference_type_for_mode (TREE_TYPE (result_type),
1405 TYPE_MODE (result_type),
1406 true);
1408 break;
1410 case NULL_EXPR:
1411 result = operand;
1412 TREE_TYPE (result) = type = build_pointer_type (type);
1413 break;
1415 case COMPOUND_EXPR:
1416 /* Fold a compound expression if it has unconstrained array type
1417 since the middle-end cannot handle it. But we don't it in the
1418 general case because it may introduce aliasing issues if the
1419 first operand is an indirect assignment and the second operand
1420 the corresponding address, e.g. for an allocator. However do
1421 it for a return value to expose it for later recognition. */
1422 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE
1423 || (TREE_CODE (TREE_OPERAND (operand, 1)) == VAR_DECL
1424 && DECL_RETURN_VALUE_P (TREE_OPERAND (operand, 1))))
1426 result = build_unary_op (ADDR_EXPR, result_type,
1427 TREE_OPERAND (operand, 1));
1428 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1429 TREE_OPERAND (operand, 0), result);
1430 break;
1432 goto common;
1434 case ARRAY_REF:
1435 case ARRAY_RANGE_REF:
1436 case COMPONENT_REF:
1437 case BIT_FIELD_REF:
1438 /* If this is for 'Address, find the address of the prefix and add
1439 the offset to the field. Otherwise, do this the normal way. */
1440 if (op_code == ATTR_ADDR_EXPR)
1442 HOST_WIDE_INT bitsize;
1443 HOST_WIDE_INT bitpos;
1444 tree offset, inner;
1445 machine_mode mode;
1446 int unsignedp, reversep, volatilep;
1448 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1449 &mode, &unsignedp, &reversep,
1450 &volatilep);
1452 /* If INNER is a padding type whose field has a self-referential
1453 size, convert to that inner type. We know the offset is zero
1454 and we need to have that type visible. */
1455 if (type_is_padding_self_referential (TREE_TYPE (inner)))
1456 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1457 inner);
1459 /* Compute the offset as a byte offset from INNER. */
1460 if (!offset)
1461 offset = size_zero_node;
1463 offset = size_binop (PLUS_EXPR, offset,
1464 size_int (bitpos / BITS_PER_UNIT));
1466 /* Take the address of INNER, convert it to a pointer to our type
1467 and add the offset. */
1468 inner = build_unary_op (ADDR_EXPR,
1469 build_pointer_type (TREE_TYPE (operand)),
1470 inner);
1471 result = build_binary_op (POINTER_PLUS_EXPR, TREE_TYPE (inner),
1472 inner, offset);
1473 break;
1475 goto common;
1477 case CONSTRUCTOR:
1478 /* If this is just a constructor for a padded record, we can
1479 just take the address of the single field and convert it to
1480 a pointer to our type. */
1481 if (TYPE_IS_PADDING_P (type))
1483 result
1484 = build_unary_op (ADDR_EXPR,
1485 build_pointer_type (TREE_TYPE (operand)),
1486 CONSTRUCTOR_ELT (operand, 0)->value);
1487 break;
1489 goto common;
1491 case NOP_EXPR:
1492 if (AGGREGATE_TYPE_P (type)
1493 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1494 return build_unary_op (ADDR_EXPR, result_type,
1495 TREE_OPERAND (operand, 0));
1497 /* ... fallthru ... */
1499 case VIEW_CONVERT_EXPR:
1500 /* If this just a variant conversion or if the conversion doesn't
1501 change the mode, get the result type from this type and go down.
1502 This is needed for conversions of CONST_DECLs, to eventually get
1503 to the address of their CORRESPONDING_VARs. */
1504 if ((TYPE_MAIN_VARIANT (type)
1505 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1506 || (TYPE_MODE (type) != BLKmode
1507 && (TYPE_MODE (type)
1508 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1509 return build_unary_op (ADDR_EXPR,
1510 (result_type ? result_type
1511 : build_pointer_type (type)),
1512 TREE_OPERAND (operand, 0));
1513 goto common;
1515 case CONST_DECL:
1516 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1518 /* ... fall through ... */
1520 default:
1521 common:
1523 /* If we are taking the address of a padded record whose field
1524 contains a template, take the address of the field. */
1525 if (TYPE_IS_PADDING_P (type)
1526 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1527 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1529 type = TREE_TYPE (TYPE_FIELDS (type));
1530 operand = convert (type, operand);
1533 gnat_mark_addressable (operand);
1534 result = build_fold_addr_expr (operand);
1537 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1538 break;
1540 case INDIRECT_REF:
1542 tree t = remove_conversions (operand, false);
1543 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1545 /* If TYPE is a thin pointer, either first retrieve the base if this
1546 is an expression with an offset built for the initialization of an
1547 object with an unconstrained nominal subtype, or else convert to
1548 the fat pointer. */
1549 if (TYPE_IS_THIN_POINTER_P (type))
1551 tree rec_type = TREE_TYPE (type);
1553 if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1554 && TREE_OPERAND (operand, 1)
1555 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1556 && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1558 operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1559 type = TREE_TYPE (operand);
1561 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1563 operand
1564 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1565 operand);
1566 type = TREE_TYPE (operand);
1570 /* If we want to refer to an unconstrained array, use the appropriate
1571 expression. But this will never survive down to the back-end. */
1572 if (TYPE_IS_FAT_POINTER_P (type))
1574 result = build1 (UNCONSTRAINED_ARRAY_REF,
1575 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1576 TREE_READONLY (result)
1577 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1580 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1581 else if (TREE_CODE (operand) == ADDR_EXPR)
1582 result = TREE_OPERAND (operand, 0);
1584 /* Otherwise, build and fold the indirect reference. */
1585 else
1587 result = build_fold_indirect_ref (operand);
1588 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1591 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1593 TREE_SIDE_EFFECTS (result) = 1;
1594 if (TREE_CODE (result) == INDIRECT_REF)
1595 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1598 if ((TREE_CODE (result) == INDIRECT_REF
1599 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1600 && can_never_be_null)
1601 TREE_THIS_NOTRAP (result) = 1;
1603 break;
1606 case NEGATE_EXPR:
1607 case BIT_NOT_EXPR:
1609 tree modulus = ((operation_type
1610 && TREE_CODE (operation_type) == INTEGER_TYPE
1611 && TYPE_MODULAR_P (operation_type))
1612 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1613 int mod_pow2 = modulus && integer_pow2p (modulus);
1615 /* If this is a modular type, there are various possibilities
1616 depending on the operation and whether the modulus is a
1617 power of two or not. */
1619 if (modulus)
1621 gcc_assert (operation_type == base_type);
1622 operand = convert (operation_type, operand);
1624 /* The fastest in the negate case for binary modulus is
1625 the straightforward code; the TRUNC_MOD_EXPR below
1626 is an AND operation. */
1627 if (op_code == NEGATE_EXPR && mod_pow2)
1628 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1629 fold_build1 (NEGATE_EXPR, operation_type,
1630 operand),
1631 modulus);
1633 /* For nonbinary negate case, return zero for zero operand,
1634 else return the modulus minus the operand. If the modulus
1635 is a power of two minus one, we can do the subtraction
1636 as an XOR since it is equivalent and faster on most machines. */
1637 else if (op_code == NEGATE_EXPR && !mod_pow2)
1639 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1640 modulus,
1641 build_int_cst (operation_type,
1642 1))))
1643 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1644 operand, modulus);
1645 else
1646 result = fold_build2 (MINUS_EXPR, operation_type,
1647 modulus, operand);
1649 result = fold_build3 (COND_EXPR, operation_type,
1650 fold_build2 (NE_EXPR,
1651 boolean_type_node,
1652 operand,
1653 build_int_cst
1654 (operation_type, 0)),
1655 result, operand);
1657 else
1659 /* For the NOT cases, we need a constant equal to
1660 the modulus minus one. For a binary modulus, we
1661 XOR against the constant and subtract the operand from
1662 that constant for nonbinary modulus. */
1664 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1665 build_int_cst (operation_type, 1));
1667 if (mod_pow2)
1668 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1669 operand, cnst);
1670 else
1671 result = fold_build2 (MINUS_EXPR, operation_type,
1672 cnst, operand);
1675 break;
1679 /* ... fall through ... */
1681 default:
1682 gcc_assert (operation_type == base_type);
1683 result = fold_build1 (op_code, operation_type,
1684 convert (operation_type, operand));
1687 if (result_type && TREE_TYPE (result) != result_type)
1688 result = convert (result_type, result);
1690 return result;
1693 /* Similar, but for COND_EXPR. */
1695 tree
1696 build_cond_expr (tree result_type, tree condition_operand,
1697 tree true_operand, tree false_operand)
1699 bool addr_p = false;
1700 tree result;
1702 /* The front-end verified that result, true and false operands have
1703 same base type. Convert everything to the result type. */
1704 true_operand = convert (result_type, true_operand);
1705 false_operand = convert (result_type, false_operand);
1707 /* If the result type is unconstrained, take the address of the operands and
1708 then dereference the result. Likewise if the result type is passed by
1709 reference, because creating a temporary of this type is not allowed. */
1710 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1711 || TYPE_IS_BY_REFERENCE_P (result_type)
1712 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1714 result_type = build_pointer_type (result_type);
1715 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1716 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1717 addr_p = true;
1720 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1721 true_operand, false_operand);
1723 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1724 in both arms, make sure it gets evaluated by moving it ahead of the
1725 conditional expression. This is necessary because it is evaluated
1726 in only one place at run time and would otherwise be uninitialized
1727 in one of the arms. */
1728 true_operand = skip_simple_arithmetic (true_operand);
1729 false_operand = skip_simple_arithmetic (false_operand);
1731 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1732 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1734 if (addr_p)
1735 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1737 return result;
1740 /* Similar, but for COMPOUND_EXPR. */
1742 tree
1743 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1745 bool addr_p = false;
1746 tree result;
1748 /* If the result type is unconstrained, take the address of the operand and
1749 then dereference the result. Likewise if the result type is passed by
1750 reference, but this is natively handled in the gimplifier. */
1751 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1752 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1754 result_type = build_pointer_type (result_type);
1755 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1756 addr_p = true;
1759 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1760 expr_operand);
1762 if (addr_p)
1763 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1765 return result;
1768 /* Conveniently construct a function call expression. FNDECL names the
1769 function to be called, N is the number of arguments, and the "..."
1770 parameters are the argument expressions. Unlike build_call_expr
1771 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1773 tree
1774 build_call_n_expr (tree fndecl, int n, ...)
1776 va_list ap;
1777 tree fntype = TREE_TYPE (fndecl);
1778 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1780 va_start (ap, n);
1781 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1782 va_end (ap);
1783 return fn;
1786 /* Build a goto to LABEL for a raise, with an optional call to Local_Raise.
1787 MSG gives the exception's identity for the call to Local_Raise, if any. */
1789 static tree
1790 build_goto_raise (tree label, int msg)
1792 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1793 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1795 /* If Local_Raise is present, build Local_Raise (Exception'Identity). */
1796 if (Present (local_raise))
1798 tree gnu_local_raise
1799 = gnat_to_gnu_entity (local_raise, NULL_TREE, false);
1800 tree gnu_exception_entity
1801 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, false);
1802 tree gnu_call
1803 = build_call_n_expr (gnu_local_raise, 1,
1804 build_unary_op (ADDR_EXPR, NULL_TREE,
1805 gnu_exception_entity));
1806 gnu_result
1807 = build2 (COMPOUND_EXPR, void_type_node, gnu_call, gnu_result);
1810 return gnu_result;
1813 /* Expand the SLOC of GNAT_NODE, if present, into tree location information
1814 pointed to by FILENAME, LINE and COL. Fall back to the current location
1815 if GNAT_NODE is absent or has no SLOC. */
1817 static void
1818 expand_sloc (Node_Id gnat_node, tree *filename, tree *line, tree *col)
1820 const char *str;
1821 int line_number, column_number;
1823 if (Debug_Flag_NN || Exception_Locations_Suppressed)
1825 str = "";
1826 line_number = 0;
1827 column_number = 0;
1829 else if (Present (gnat_node) && Sloc (gnat_node) != No_Location)
1831 str = Get_Name_String
1832 (Debug_Source_Name (Get_Source_File_Index (Sloc (gnat_node))));
1833 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1834 column_number = Get_Column_Number (Sloc (gnat_node));
1836 else
1838 str = lbasename (LOCATION_FILE (input_location));
1839 line_number = LOCATION_LINE (input_location);
1840 column_number = LOCATION_COLUMN (input_location);
1843 const int len = strlen (str);
1844 *filename = build_string (len, str);
1845 TREE_TYPE (*filename) = build_array_type (char_type_node,
1846 build_index_type (size_int (len)));
1847 *line = build_int_cst (NULL_TREE, line_number);
1848 if (col)
1849 *col = build_int_cst (NULL_TREE, column_number);
1852 /* Build a call to a function that raises an exception and passes file name
1853 and line number, if requested. MSG says which exception function to call.
1854 GNAT_NODE is the node conveying the source location for which the error
1855 should be signaled, or Empty in which case the error is signaled for the
1856 current location. KIND says which kind of exception node this is for,
1857 among N_Raise_{Constraint,Storage,Program}_Error. */
1859 tree
1860 build_call_raise (int msg, Node_Id gnat_node, char kind)
1862 tree fndecl = gnat_raise_decls[msg];
1863 tree label = get_exception_label (kind);
1864 tree filename, line;
1866 /* If this is to be done as a goto, handle that case. */
1867 if (label)
1868 return build_goto_raise (label, msg);
1870 expand_sloc (gnat_node, &filename, &line, NULL);
1872 return
1873 build_call_n_expr (fndecl, 2,
1874 build1 (ADDR_EXPR,
1875 build_pointer_type (char_type_node),
1876 filename),
1877 line);
1880 /* Similar to build_call_raise, with extra information about the column
1881 where the check failed. */
1883 tree
1884 build_call_raise_column (int msg, Node_Id gnat_node, char kind)
1886 tree fndecl = gnat_raise_decls_ext[msg];
1887 tree label = get_exception_label (kind);
1888 tree filename, line, col;
1890 /* If this is to be done as a goto, handle that case. */
1891 if (label)
1892 return build_goto_raise (label, msg);
1894 expand_sloc (gnat_node, &filename, &line, &col);
1896 return
1897 build_call_n_expr (fndecl, 3,
1898 build1 (ADDR_EXPR,
1899 build_pointer_type (char_type_node),
1900 filename),
1901 line, col);
1904 /* Similar to build_call_raise_column, for an index or range check exception ,
1905 with extra information of the form "INDEX out of range FIRST..LAST". */
1907 tree
1908 build_call_raise_range (int msg, Node_Id gnat_node, char kind,
1909 tree index, tree first, tree last)
1911 tree fndecl = gnat_raise_decls_ext[msg];
1912 tree label = get_exception_label (kind);
1913 tree filename, line, col;
1915 /* If this is to be done as a goto, handle that case. */
1916 if (label)
1917 return build_goto_raise (label, msg);
1919 expand_sloc (gnat_node, &filename, &line, &col);
1921 return
1922 build_call_n_expr (fndecl, 6,
1923 build1 (ADDR_EXPR,
1924 build_pointer_type (char_type_node),
1925 filename),
1926 line, col,
1927 convert (integer_type_node, index),
1928 convert (integer_type_node, first),
1929 convert (integer_type_node, last));
1932 /* qsort comparer for the bit positions of two constructor elements
1933 for record components. */
1935 static int
1936 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1938 const constructor_elt * const elmt1 = (const constructor_elt *) rt1;
1939 const constructor_elt * const elmt2 = (const constructor_elt *) rt2;
1940 const_tree const field1 = elmt1->index;
1941 const_tree const field2 = elmt2->index;
1942 const int ret
1943 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1945 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1948 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1950 tree
1951 gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
1953 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1954 bool read_only = true;
1955 bool side_effects = false;
1956 tree result, obj, val;
1957 unsigned int n_elmts;
1959 /* Scan the elements to see if they are all constant or if any has side
1960 effects, to let us set global flags on the resulting constructor. Count
1961 the elements along the way for possible sorting purposes below. */
1962 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1964 /* The predicate must be in keeping with output_constructor. */
1965 if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1966 || (TREE_CODE (type) == RECORD_TYPE
1967 && CONSTRUCTOR_BITFIELD_P (obj)
1968 && !initializer_constant_valid_for_bitfield_p (val))
1969 || !initializer_constant_valid_p (val,
1970 TREE_TYPE (val),
1971 TYPE_REVERSE_STORAGE_ORDER (type)))
1972 allconstant = false;
1974 if (!TREE_READONLY (val))
1975 read_only = false;
1977 if (TREE_SIDE_EFFECTS (val))
1978 side_effects = true;
1981 /* For record types with constant components only, sort field list
1982 by increasing bit position. This is necessary to ensure the
1983 constructor can be output as static data. */
1984 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1985 v->qsort (compare_elmt_bitpos);
1987 result = build_constructor (type, v);
1988 CONSTRUCTOR_NO_CLEARING (result) = 1;
1989 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1990 TREE_SIDE_EFFECTS (result) = side_effects;
1991 TREE_READONLY (result) = TYPE_READONLY (type) || read_only || allconstant;
1992 return result;
1995 /* Return a COMPONENT_REF to access FIELD in RECORD, or NULL_TREE if the field
1996 is not found in the record. Don't fold the result if NO_FOLD is true. */
1998 static tree
1999 build_simple_component_ref (tree record, tree field, bool no_fold)
2001 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (record));
2002 tree ref;
2004 gcc_assert (RECORD_OR_UNION_TYPE_P (type) && COMPLETE_TYPE_P (type));
2006 /* Try to fold a conversion from another record or union type unless the type
2007 contains a placeholder as it might be needed for a later substitution. */
2008 if (TREE_CODE (record) == VIEW_CONVERT_EXPR
2009 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (record, 0)))
2010 && !type_contains_placeholder_p (type))
2012 tree op = TREE_OPERAND (record, 0);
2014 /* If this is an unpadding operation, convert the underlying object to
2015 the unpadded type directly. */
2016 if (TYPE_IS_PADDING_P (type) && field == TYPE_FIELDS (type))
2017 return convert (TREE_TYPE (field), op);
2019 /* Otherwise try to access FIELD directly in the underlying type, but
2020 make sure that the form of the reference doesn't change too much;
2021 this can happen for an unconstrained bit-packed array type whose
2022 constrained form can be an integer type. */
2023 ref = build_simple_component_ref (op, field, no_fold);
2024 if (ref && TREE_CODE (TREE_TYPE (ref)) == TREE_CODE (TREE_TYPE (field)))
2025 return ref;
2028 /* If this field is not in the specified record, see if we can find a field
2029 in the specified record whose original field is the same as this one. */
2030 if (DECL_CONTEXT (field) != type)
2032 tree new_field;
2034 /* First loop through normal components. */
2035 for (new_field = TYPE_FIELDS (type);
2036 new_field;
2037 new_field = DECL_CHAIN (new_field))
2038 if (SAME_FIELD_P (field, new_field))
2039 break;
2041 /* Next, loop through DECL_INTERNAL_P components if we haven't found the
2042 component in the first search. Doing this search in two steps is
2043 required to avoid hidden homonymous fields in the _Parent field. */
2044 if (!new_field)
2045 for (new_field = TYPE_FIELDS (type);
2046 new_field;
2047 new_field = DECL_CHAIN (new_field))
2048 if (DECL_INTERNAL_P (new_field)
2049 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (new_field)))
2051 tree field_ref
2052 = build_simple_component_ref (record, new_field, no_fold);
2053 ref = build_simple_component_ref (field_ref, field, no_fold);
2054 if (ref)
2055 return ref;
2058 field = new_field;
2061 if (!field)
2062 return NULL_TREE;
2064 /* If the field's offset has overflowed, do not try to access it, as doing
2065 so may trigger sanity checks deeper in the back-end. Note that we don't
2066 need to warn since this will be done on trying to declare the object. */
2067 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
2068 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
2069 return NULL_TREE;
2071 ref = build3 (COMPONENT_REF, TREE_TYPE (field), record, field, NULL_TREE);
2073 if (TREE_READONLY (record)
2074 || TREE_READONLY (field)
2075 || TYPE_READONLY (type))
2076 TREE_READONLY (ref) = 1;
2078 if (TREE_THIS_VOLATILE (record)
2079 || TREE_THIS_VOLATILE (field)
2080 || TYPE_VOLATILE (type))
2081 TREE_THIS_VOLATILE (ref) = 1;
2083 if (no_fold)
2084 return ref;
2086 /* The generic folder may punt in this case because the inner array type
2087 can be self-referential, but folding is in fact not problematic. */
2088 if (TREE_CODE (record) == CONSTRUCTOR
2089 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record)))
2091 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (record);
2092 unsigned HOST_WIDE_INT idx;
2093 tree index, value;
2094 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
2095 if (index == field)
2096 return value;
2097 return ref;
2100 return fold (ref);
2103 /* Likewise, but return NULL_EXPR and generate a Constraint_Error if the
2104 field is not found in the record. */
2106 tree
2107 build_component_ref (tree record, tree field, bool no_fold)
2109 tree ref = build_simple_component_ref (record, field, no_fold);
2110 if (ref)
2111 return ref;
2113 /* Assume this is an invalid user field so raise Constraint_Error. */
2114 return build1 (NULL_EXPR, TREE_TYPE (field),
2115 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2116 N_Raise_Constraint_Error));
2119 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2120 identically. Process the case where a GNAT_PROC to call is provided. */
2122 static inline tree
2123 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2124 Entity_Id gnat_proc, Entity_Id gnat_pool)
2126 tree gnu_proc = gnat_to_gnu (gnat_proc);
2127 tree gnu_call;
2129 /* A storage pool's underlying type is a record type (for both predefined
2130 storage pools and GNAT simple storage pools). The secondary stack uses
2131 the same mechanism, but its pool object (SS_Pool) is an integer. */
2132 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2134 /* The size is the third parameter; the alignment is the
2135 same type. */
2136 Entity_Id gnat_size_type
2137 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2138 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2140 tree gnu_pool = gnat_to_gnu (gnat_pool);
2141 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2142 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2144 gnu_size = convert (gnu_size_type, gnu_size);
2145 gnu_align = convert (gnu_size_type, gnu_align);
2147 /* The first arg is always the address of the storage pool; next
2148 comes the address of the object, for a deallocator, then the
2149 size and alignment. */
2150 if (gnu_obj)
2151 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2152 gnu_size, gnu_align);
2153 else
2154 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2155 gnu_size, gnu_align);
2158 /* Secondary stack case. */
2159 else
2161 /* The size is the second parameter. */
2162 Entity_Id gnat_size_type
2163 = Etype (Next_Formal (First_Formal (gnat_proc)));
2164 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2166 gnu_size = convert (gnu_size_type, gnu_size);
2168 /* The first arg is the address of the object, for a deallocator,
2169 then the size. */
2170 if (gnu_obj)
2171 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2172 else
2173 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2176 return gnu_call;
2179 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2180 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2181 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2182 latter offers. */
2184 static inline tree
2185 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2187 /* When the DATA_TYPE alignment is stricter than what malloc offers
2188 (super-aligned case), we allocate an "aligning" wrapper type and return
2189 the address of its single data field with the malloc's return value
2190 stored just in front. */
2192 unsigned int data_align = TYPE_ALIGN (data_type);
2193 unsigned int system_allocator_alignment
2194 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2196 tree aligning_type
2197 = ((data_align > system_allocator_alignment)
2198 ? make_aligning_type (data_type, data_align, data_size,
2199 system_allocator_alignment,
2200 POINTER_SIZE / BITS_PER_UNIT,
2201 gnat_node)
2202 : NULL_TREE);
2204 tree size_to_malloc
2205 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2207 tree malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2209 if (aligning_type)
2211 /* Latch malloc's return value and get a pointer to the aligning field
2212 first. */
2213 tree storage_ptr = gnat_protect_expr (malloc_ptr);
2215 tree aligning_record_addr
2216 = convert (build_pointer_type (aligning_type), storage_ptr);
2218 tree aligning_record
2219 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2221 tree aligning_field
2222 = build_component_ref (aligning_record, TYPE_FIELDS (aligning_type),
2223 false);
2225 tree aligning_field_addr
2226 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2228 /* Then arrange to store the allocator's return value ahead
2229 and return. */
2230 tree storage_ptr_slot_addr
2231 = build_binary_op (POINTER_PLUS_EXPR, ptr_type_node,
2232 convert (ptr_type_node, aligning_field_addr),
2233 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2234 / BITS_PER_UNIT));
2236 tree storage_ptr_slot
2237 = build_unary_op (INDIRECT_REF, NULL_TREE,
2238 convert (build_pointer_type (ptr_type_node),
2239 storage_ptr_slot_addr));
2241 return
2242 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2243 build_binary_op (INIT_EXPR, NULL_TREE,
2244 storage_ptr_slot, storage_ptr),
2245 aligning_field_addr);
2247 else
2248 return malloc_ptr;
2251 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2252 designated by DATA_PTR using the __gnat_free entry point. */
2254 static inline tree
2255 maybe_wrap_free (tree data_ptr, tree data_type)
2257 /* In the regular alignment case, we pass the data pointer straight to free.
2258 In the superaligned case, we need to retrieve the initial allocator
2259 return value, stored in front of the data block at allocation time. */
2261 unsigned int data_align = TYPE_ALIGN (data_type);
2262 unsigned int system_allocator_alignment
2263 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2265 tree free_ptr;
2267 if (data_align > system_allocator_alignment)
2269 /* DATA_FRONT_PTR (void *)
2270 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2271 tree data_front_ptr
2272 = build_binary_op
2273 (POINTER_PLUS_EXPR, ptr_type_node,
2274 convert (ptr_type_node, data_ptr),
2275 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2277 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2278 free_ptr
2279 = build_unary_op
2280 (INDIRECT_REF, NULL_TREE,
2281 convert (build_pointer_type (ptr_type_node), data_front_ptr));
2283 else
2284 free_ptr = data_ptr;
2286 return build_call_n_expr (free_decl, 1, free_ptr);
2289 /* Build a GCC tree to call an allocation or deallocation function.
2290 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2291 generate an allocator.
2293 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2294 object type, used to determine the to-be-honored address alignment.
2295 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2296 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2297 to provide an error location for restriction violation messages. */
2299 tree
2300 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2301 Entity_Id gnat_proc, Entity_Id gnat_pool,
2302 Node_Id gnat_node)
2304 /* Explicit proc to call ? This one is assumed to deal with the type
2305 alignment constraints. */
2306 if (Present (gnat_proc))
2307 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2308 gnat_proc, gnat_pool);
2310 /* Otherwise, object to "free" or "malloc" with possible special processing
2311 for alignments stricter than what the default allocator honors. */
2312 else if (gnu_obj)
2313 return maybe_wrap_free (gnu_obj, gnu_type);
2314 else
2316 /* Assert that we no longer can be called with this special pool. */
2317 gcc_assert (gnat_pool != -1);
2319 /* Check that we aren't violating the associated restriction. */
2320 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2322 Check_No_Implicit_Heap_Alloc (gnat_node);
2323 if (Has_Task (Etype (gnat_node)))
2324 Check_No_Implicit_Task_Alloc (gnat_node);
2325 if (Has_Protected (Etype (gnat_node)))
2326 Check_No_Implicit_Protected_Alloc (gnat_node);
2328 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2332 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2333 initial value is INIT, if INIT is nonzero. Convert the expression to
2334 RESULT_TYPE, which must be some pointer type, and return the result.
2336 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2337 the storage pool to use. GNAT_NODE is used to provide an error
2338 location for restriction violation messages. If IGNORE_INIT_TYPE is
2339 true, ignore the type of INIT for the purpose of determining the size;
2340 this will cause the maximum size to be allocated if TYPE is of
2341 self-referential size. */
2343 tree
2344 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2345 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2347 tree size, storage, storage_deref, storage_init;
2349 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2350 if (init && TREE_CODE (init) == NULL_EXPR)
2351 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2353 /* If we are just annotating types, also return a NULL_EXPR. */
2354 else if (type_annotate_only)
2355 return build1 (NULL_EXPR, result_type,
2356 build_call_raise (CE_Range_Check_Failed, gnat_node,
2357 N_Raise_Constraint_Error));
2359 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2360 else if (init && TREE_CODE (init) == COND_EXPR)
2361 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2362 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2363 gnat_proc, gnat_pool, gnat_node,
2364 ignore_init_type),
2365 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2366 gnat_proc, gnat_pool, gnat_node,
2367 ignore_init_type));
2369 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2370 sizes of the object and its template. Allocate the whole thing and
2371 fill in the parts that are known. */
2372 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2374 tree storage_type
2375 = build_unc_object_type_from_ptr (result_type, type,
2376 get_identifier ("ALLOC"), false);
2377 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2378 tree storage_ptr_type = build_pointer_type (storage_type);
2380 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2381 init);
2383 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2384 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2385 size = size_int (-1);
2387 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2388 gnat_proc, gnat_pool, gnat_node);
2389 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2390 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2391 TREE_THIS_NOTRAP (storage_deref) = 1;
2393 /* If there is an initializing expression, then make a constructor for
2394 the entire object including the bounds and copy it into the object.
2395 If there is no initializing expression, just set the bounds. */
2396 if (init)
2398 vec<constructor_elt, va_gc> *v;
2399 vec_alloc (v, 2);
2401 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2402 build_template (template_type, type, init));
2403 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2404 init);
2405 storage_init
2406 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2407 gnat_build_constructor (storage_type, v));
2409 else
2410 storage_init
2411 = build_binary_op (INIT_EXPR, NULL_TREE,
2412 build_component_ref (storage_deref,
2413 TYPE_FIELDS (storage_type),
2414 false),
2415 build_template (template_type, type, NULL_TREE));
2417 return build2 (COMPOUND_EXPR, result_type,
2418 storage_init, convert (result_type, storage));
2421 size = TYPE_SIZE_UNIT (type);
2423 /* If we have an initializing expression, see if its size is simpler
2424 than the size from the type. */
2425 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2426 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2427 || CONTAINS_PLACEHOLDER_P (size)))
2428 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2430 /* If the size is still self-referential, reference the initializing
2431 expression, if it is present. If not, this must have been a
2432 call to allocate a library-level object, in which case we use
2433 the maximum size. */
2434 if (CONTAINS_PLACEHOLDER_P (size))
2436 if (!ignore_init_type && init)
2437 size = substitute_placeholder_in_expr (size, init);
2438 else
2439 size = max_size (size, true);
2442 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2443 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2444 size = size_int (-1);
2446 storage = convert (result_type,
2447 build_call_alloc_dealloc (NULL_TREE, size, type,
2448 gnat_proc, gnat_pool,
2449 gnat_node));
2451 /* If we have an initial value, protect the new address, assign the value
2452 and return the address with a COMPOUND_EXPR. */
2453 if (init)
2455 storage = gnat_protect_expr (storage);
2456 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2457 TREE_THIS_NOTRAP (storage_deref) = 1;
2458 storage_init
2459 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2460 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2463 return storage;
2466 /* Indicate that we need to take the address of T and that it therefore
2467 should not be allocated in a register. Return true if successful. */
2469 bool
2470 gnat_mark_addressable (tree t)
2472 while (true)
2473 switch (TREE_CODE (t))
2475 case ADDR_EXPR:
2476 case COMPONENT_REF:
2477 case ARRAY_REF:
2478 case ARRAY_RANGE_REF:
2479 case REALPART_EXPR:
2480 case IMAGPART_EXPR:
2481 case VIEW_CONVERT_EXPR:
2482 case NON_LVALUE_EXPR:
2483 CASE_CONVERT:
2484 t = TREE_OPERAND (t, 0);
2485 break;
2487 case COMPOUND_EXPR:
2488 t = TREE_OPERAND (t, 1);
2489 break;
2491 case CONSTRUCTOR:
2492 TREE_ADDRESSABLE (t) = 1;
2493 return true;
2495 case VAR_DECL:
2496 case PARM_DECL:
2497 case RESULT_DECL:
2498 TREE_ADDRESSABLE (t) = 1;
2499 return true;
2501 case FUNCTION_DECL:
2502 TREE_ADDRESSABLE (t) = 1;
2503 return true;
2505 case CONST_DECL:
2506 return DECL_CONST_CORRESPONDING_VAR (t)
2507 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2509 default:
2510 return true;
2514 /* Return true if EXP is a stable expression for the purpose of the functions
2515 below and, therefore, can be returned unmodified by them. We accept things
2516 that are actual constants or that have already been handled. */
2518 static bool
2519 gnat_stable_expr_p (tree exp)
2521 enum tree_code code = TREE_CODE (exp);
2522 return TREE_CONSTANT (exp) || code == NULL_EXPR || code == SAVE_EXPR;
2525 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2526 but we know how to handle our own nodes. */
2528 tree
2529 gnat_save_expr (tree exp)
2531 tree type = TREE_TYPE (exp);
2532 enum tree_code code = TREE_CODE (exp);
2534 if (gnat_stable_expr_p (exp))
2535 return exp;
2537 if (code == UNCONSTRAINED_ARRAY_REF)
2539 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2540 TREE_READONLY (t) = TYPE_READONLY (type);
2541 return t;
2544 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2545 This may be more efficient, but will also allow us to more easily find
2546 the match for the PLACEHOLDER_EXPR. */
2547 if (code == COMPONENT_REF
2548 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2549 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2550 TREE_OPERAND (exp, 1), NULL_TREE);
2552 return save_expr (exp);
2555 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2556 is optimized under the assumption that EXP's value doesn't change before
2557 its subsequent reuse(s) except through its potential reevaluation. */
2559 tree
2560 gnat_protect_expr (tree exp)
2562 tree type = TREE_TYPE (exp);
2563 enum tree_code code = TREE_CODE (exp);
2565 if (gnat_stable_expr_p (exp))
2566 return exp;
2568 /* If EXP has no side effects, we theoretically don't need to do anything.
2569 However, we may be recursively passed more and more complex expressions
2570 involving checks which will be reused multiple times and eventually be
2571 unshared for gimplification; in order to avoid a complexity explosion
2572 at that point, we protect any expressions more complex than a simple
2573 arithmetic expression. */
2574 if (!TREE_SIDE_EFFECTS (exp))
2576 tree inner = skip_simple_arithmetic (exp);
2577 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2578 return exp;
2581 /* If this is a conversion, protect what's inside the conversion. */
2582 if (code == NON_LVALUE_EXPR
2583 || CONVERT_EXPR_CODE_P (code)
2584 || code == VIEW_CONVERT_EXPR)
2585 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2587 /* If we're indirectly referencing something, we only need to protect the
2588 address since the data itself can't change in these situations. */
2589 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2591 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2592 TREE_READONLY (t) = TYPE_READONLY (type);
2593 return t;
2596 /* Likewise if we're indirectly referencing part of something. */
2597 if (code == COMPONENT_REF
2598 && TREE_CODE (TREE_OPERAND (exp, 0)) == INDIRECT_REF)
2599 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2600 TREE_OPERAND (exp, 1), NULL_TREE);
2602 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2603 This may be more efficient, but will also allow us to more easily find
2604 the match for the PLACEHOLDER_EXPR. */
2605 if (code == COMPONENT_REF
2606 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2607 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2608 TREE_OPERAND (exp, 1), NULL_TREE);
2610 /* If this is a fat pointer or a scalar, just make a SAVE_EXPR. Likewise
2611 for a CALL_EXPR as large objects are returned via invisible reference
2612 in most ABIs so the temporary will directly be filled by the callee. */
2613 if (TYPE_IS_FAT_POINTER_P (type)
2614 || !AGGREGATE_TYPE_P (type)
2615 || code == CALL_EXPR)
2616 return save_expr (exp);
2618 /* Otherwise reference, protect the address and dereference. */
2619 return
2620 build_unary_op (INDIRECT_REF, type,
2621 save_expr (build_unary_op (ADDR_EXPR, NULL_TREE, exp)));
2624 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2625 argument to force evaluation of everything. */
2627 static tree
2628 gnat_stabilize_reference_1 (tree e, void *data)
2630 const bool force = *(bool *)data;
2631 enum tree_code code = TREE_CODE (e);
2632 tree type = TREE_TYPE (e);
2633 tree result;
2635 if (gnat_stable_expr_p (e))
2636 return e;
2638 switch (TREE_CODE_CLASS (code))
2640 case tcc_exceptional:
2641 case tcc_declaration:
2642 case tcc_comparison:
2643 case tcc_expression:
2644 case tcc_reference:
2645 case tcc_vl_exp:
2646 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2647 fat pointer. This may be more efficient, but will also allow
2648 us to more easily find the match for the PLACEHOLDER_EXPR. */
2649 if (code == COMPONENT_REF
2650 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2651 result
2652 = build3 (code, type,
2653 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2654 TREE_OPERAND (e, 1), NULL_TREE);
2655 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2656 so that it will only be evaluated once. */
2657 /* The tcc_reference and tcc_comparison classes could be handled as
2658 below, but it is generally faster to only evaluate them once. */
2659 else if (TREE_SIDE_EFFECTS (e) || force)
2660 return save_expr (e);
2661 else
2662 return e;
2663 break;
2665 case tcc_binary:
2666 /* Recursively stabilize each operand. */
2667 result
2668 = build2 (code, type,
2669 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2670 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), data));
2671 break;
2673 case tcc_unary:
2674 /* Recursively stabilize each operand. */
2675 result
2676 = build1 (code, type,
2677 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data));
2678 break;
2680 default:
2681 gcc_unreachable ();
2684 TREE_READONLY (result) = TREE_READONLY (e);
2685 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2686 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2688 return result;
2691 /* This is equivalent to stabilize_reference in tree.c but we know how to
2692 handle our own nodes and we take extra arguments. FORCE says whether to
2693 force evaluation of everything in REF. INIT is set to the first arm of
2694 a COMPOUND_EXPR present in REF, if any. */
2696 tree
2697 gnat_stabilize_reference (tree ref, bool force, tree *init)
2699 return
2700 gnat_rewrite_reference (ref, gnat_stabilize_reference_1, &force, init);
2703 /* Rewrite reference REF and call FUNC on each expression within REF in the
2704 process. DATA is passed unmodified to FUNC. INIT is set to the first
2705 arm of a COMPOUND_EXPR present in REF, if any. */
2707 tree
2708 gnat_rewrite_reference (tree ref, rewrite_fn func, void *data, tree *init)
2710 tree type = TREE_TYPE (ref);
2711 enum tree_code code = TREE_CODE (ref);
2712 tree result;
2714 switch (code)
2716 case CONST_DECL:
2717 case VAR_DECL:
2718 case PARM_DECL:
2719 case RESULT_DECL:
2720 /* No action is needed in this case. */
2721 return ref;
2723 CASE_CONVERT:
2724 case FLOAT_EXPR:
2725 case FIX_TRUNC_EXPR:
2726 case REALPART_EXPR:
2727 case IMAGPART_EXPR:
2728 case VIEW_CONVERT_EXPR:
2729 result
2730 = build1 (code, type,
2731 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2732 init));
2733 break;
2735 case INDIRECT_REF:
2736 case UNCONSTRAINED_ARRAY_REF:
2737 result = build1 (code, type, func (TREE_OPERAND (ref, 0), data));
2738 break;
2740 case COMPONENT_REF:
2741 result = build3 (COMPONENT_REF, type,
2742 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2743 data, init),
2744 TREE_OPERAND (ref, 1), NULL_TREE);
2745 break;
2747 case BIT_FIELD_REF:
2748 result = build3 (BIT_FIELD_REF, type,
2749 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2750 data, init),
2751 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
2752 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
2753 break;
2755 case ARRAY_REF:
2756 case ARRAY_RANGE_REF:
2757 result
2758 = build4 (code, type,
2759 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2760 init),
2761 func (TREE_OPERAND (ref, 1), data),
2762 TREE_OPERAND (ref, 2), NULL_TREE);
2763 break;
2765 case COMPOUND_EXPR:
2766 gcc_assert (!*init);
2767 *init = TREE_OPERAND (ref, 0);
2768 /* We expect only the pattern built in Call_to_gnu. */
2769 gcc_assert (DECL_P (TREE_OPERAND (ref, 1))
2770 || (TREE_CODE (TREE_OPERAND (ref, 1)) == COMPONENT_REF
2771 && DECL_P (TREE_OPERAND (TREE_OPERAND (ref, 1), 0))));
2772 return TREE_OPERAND (ref, 1);
2774 case CALL_EXPR:
2776 /* This can only be an atomic load. */
2777 gcc_assert (call_is_atomic_load (ref));
2779 /* An atomic load is an INDIRECT_REF of its first argument. */
2780 tree t = CALL_EXPR_ARG (ref, 0);
2781 if (TREE_CODE (t) == NOP_EXPR)
2782 t = TREE_OPERAND (t, 0);
2783 if (TREE_CODE (t) == ADDR_EXPR)
2784 t = build1 (ADDR_EXPR, TREE_TYPE (t),
2785 gnat_rewrite_reference (TREE_OPERAND (t, 0), func, data,
2786 init));
2787 else
2788 t = func (t, data);
2789 t = fold_convert (TREE_TYPE (CALL_EXPR_ARG (ref, 0)), t);
2791 result = build_call_expr (TREE_OPERAND (CALL_EXPR_FN (ref), 0), 2,
2792 t, CALL_EXPR_ARG (ref, 1));
2794 break;
2796 case ERROR_MARK:
2797 case NULL_EXPR:
2798 return ref;
2800 default:
2801 gcc_unreachable ();
2804 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2805 may not be sustained across some paths, such as the way via build1 for
2806 INDIRECT_REF. We reset those flags here in the general case, which is
2807 consistent with the GCC version of this routine.
2809 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2810 paths introduce side-effects where there was none initially (e.g. if a
2811 SAVE_EXPR is built) and we also want to keep track of that. */
2812 TREE_READONLY (result) = TREE_READONLY (ref);
2813 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2814 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2816 if (code == INDIRECT_REF
2817 || code == UNCONSTRAINED_ARRAY_REF
2818 || code == ARRAY_REF
2819 || code == ARRAY_RANGE_REF)
2820 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2822 return result;
2825 /* This is equivalent to get_inner_reference in expr.c but it returns the
2826 ultimate containing object only if the reference (lvalue) is constant,
2827 i.e. if it doesn't depend on the context in which it is evaluated. */
2829 tree
2830 get_inner_constant_reference (tree exp)
2832 while (true)
2834 switch (TREE_CODE (exp))
2836 case BIT_FIELD_REF:
2837 break;
2839 case COMPONENT_REF:
2840 if (!TREE_CONSTANT (DECL_FIELD_OFFSET (TREE_OPERAND (exp, 1))))
2841 return NULL_TREE;
2842 break;
2844 case ARRAY_REF:
2845 case ARRAY_RANGE_REF:
2847 if (TREE_OPERAND (exp, 2))
2848 return NULL_TREE;
2850 tree array_type = TREE_TYPE (TREE_OPERAND (exp, 0));
2851 if (!TREE_CONSTANT (TREE_OPERAND (exp, 1))
2852 || !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
2853 || !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (array_type))))
2854 return NULL_TREE;
2856 break;
2858 case REALPART_EXPR:
2859 case IMAGPART_EXPR:
2860 case VIEW_CONVERT_EXPR:
2861 break;
2863 default:
2864 goto done;
2867 exp = TREE_OPERAND (exp, 0);
2870 done:
2871 return exp;
2874 /* Return true if EXPR is the addition or the subtraction of a constant and,
2875 if so, set *ADD to the addend, *CST to the constant and *MINUS_P to true
2876 if this is a subtraction. */
2878 bool
2879 is_simple_additive_expression (tree expr, tree *add, tree *cst, bool *minus_p)
2881 /* Skip overflow checks. */
2882 if (TREE_CODE (expr) == COND_EXPR
2883 && TREE_CODE (COND_EXPR_THEN (expr)) == COMPOUND_EXPR
2884 && TREE_CODE (TREE_OPERAND (COND_EXPR_THEN (expr), 0)) == CALL_EXPR
2885 && get_callee_fndecl (TREE_OPERAND (COND_EXPR_THEN (expr), 0))
2886 == gnat_raise_decls[CE_Overflow_Check_Failed])
2887 expr = COND_EXPR_ELSE (expr);
2889 if (TREE_CODE (expr) == PLUS_EXPR)
2891 if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
2893 *add = TREE_OPERAND (expr, 1);
2894 *cst = TREE_OPERAND (expr, 0);
2895 *minus_p = false;
2896 return true;
2898 else if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
2900 *add = TREE_OPERAND (expr, 0);
2901 *cst = TREE_OPERAND (expr, 1);
2902 *minus_p = false;
2903 return true;
2906 else if (TREE_CODE (expr) == MINUS_EXPR)
2908 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
2910 *add = TREE_OPERAND (expr, 0);
2911 *cst = TREE_OPERAND (expr, 1);
2912 *minus_p = true;
2913 return true;
2917 return false;
2920 /* If EXPR is an expression that is invariant in the current function, in the
2921 sense that it can be evaluated anywhere in the function and any number of
2922 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2924 tree
2925 gnat_invariant_expr (tree expr)
2927 const tree type = TREE_TYPE (expr);
2928 tree add, cst;
2929 bool minus_p;
2931 expr = remove_conversions (expr, false);
2933 /* Look through temporaries created to capture values. */
2934 while ((TREE_CODE (expr) == CONST_DECL
2935 || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2936 && decl_function_context (expr) == current_function_decl
2937 && DECL_INITIAL (expr))
2939 expr = DECL_INITIAL (expr);
2940 /* Look into CONSTRUCTORs built to initialize padded types. */
2941 if (TYPE_IS_PADDING_P (TREE_TYPE (expr)))
2942 expr = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (expr))), expr);
2943 expr = remove_conversions (expr, false);
2946 /* We are only interested in scalar types at the moment and, even if we may
2947 have gone through padding types in the above loop, we must be back to a
2948 scalar value at this point. */
2949 if (AGGREGATE_TYPE_P (TREE_TYPE (expr)))
2950 return NULL_TREE;
2952 if (TREE_CONSTANT (expr))
2953 return fold_convert (type, expr);
2955 /* Deal with addition or subtraction of constants. */
2956 if (is_simple_additive_expression (expr, &add, &cst, &minus_p))
2958 add = gnat_invariant_expr (add);
2959 if (add)
2960 return
2961 fold_build2 (minus_p ? MINUS_EXPR : PLUS_EXPR, type,
2962 fold_convert (type, add), fold_convert (type, cst));
2963 else
2964 return NULL_TREE;
2967 bool invariant_p = false;
2968 tree t = expr;
2970 while (true)
2972 switch (TREE_CODE (t))
2974 case COMPONENT_REF:
2975 invariant_p |= DECL_INVARIANT_P (TREE_OPERAND (t, 1));
2976 break;
2978 case ARRAY_REF:
2979 case ARRAY_RANGE_REF:
2980 if (!TREE_CONSTANT (TREE_OPERAND (t, 1)) || TREE_OPERAND (t, 2))
2981 return NULL_TREE;
2982 break;
2984 case BIT_FIELD_REF:
2985 case REALPART_EXPR:
2986 case IMAGPART_EXPR:
2987 case VIEW_CONVERT_EXPR:
2988 CASE_CONVERT:
2989 break;
2991 case INDIRECT_REF:
2992 if ((!invariant_p && !TREE_READONLY (t)) || TREE_SIDE_EFFECTS (t))
2993 return NULL_TREE;
2994 invariant_p = false;
2995 break;
2997 default:
2998 goto object;
3001 t = TREE_OPERAND (t, 0);
3004 object:
3005 if (TREE_SIDE_EFFECTS (t))
3006 return NULL_TREE;
3008 if (TREE_CODE (t) == CONST_DECL
3009 && (DECL_EXTERNAL (t)
3010 || decl_function_context (t) != current_function_decl))
3011 return fold_convert (type, expr);
3013 if (!invariant_p && !TREE_READONLY (t))
3014 return NULL_TREE;
3016 if (TREE_CODE (t) == PARM_DECL)
3017 return fold_convert (type, expr);
3019 if (TREE_CODE (t) == VAR_DECL
3020 && (DECL_EXTERNAL (t)
3021 || decl_function_context (t) != current_function_decl))
3022 return fold_convert (type, expr);
3024 return NULL_TREE;