* gcc-interface/trans.c (push_range_check_info): Replace early test
[official-gcc.git] / gcc / ada / gcc-interface / utils2.c
blob1296a4996119801f11518dc333b4ece126369769
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2014, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "stor-layout.h"
32 #include "stringpool.h"
33 #include "varasm.h"
34 #include "flags.h"
35 #include "toplev.h"
36 #include "ggc.h"
37 #include "tree-inline.h"
39 #include "ada.h"
40 #include "types.h"
41 #include "atree.h"
42 #include "elists.h"
43 #include "namet.h"
44 #include "nlists.h"
45 #include "snames.h"
46 #include "stringt.h"
47 #include "uintp.h"
48 #include "fe.h"
49 #include "sinfo.h"
50 #include "einfo.h"
51 #include "ada-tree.h"
52 #include "gigi.h"
54 /* Return the base type of TYPE. */
56 tree
57 get_base_type (tree type)
59 if (TREE_CODE (type) == RECORD_TYPE
60 && TYPE_JUSTIFIED_MODULAR_P (type))
61 type = TREE_TYPE (TYPE_FIELDS (type));
63 while (TREE_TYPE (type)
64 && (TREE_CODE (type) == INTEGER_TYPE
65 || TREE_CODE (type) == REAL_TYPE))
66 type = TREE_TYPE (type);
68 return type;
71 /* EXP is a GCC tree representing an address. See if we can find how
72 strictly the object at that address is aligned. Return that alignment
73 in bits. If we don't know anything about the alignment, return 0. */
75 unsigned int
76 known_alignment (tree exp)
78 unsigned int this_alignment;
79 unsigned int lhs, rhs;
81 switch (TREE_CODE (exp))
83 CASE_CONVERT:
84 case VIEW_CONVERT_EXPR:
85 case NON_LVALUE_EXPR:
86 /* Conversions between pointers and integers don't change the alignment
87 of the underlying object. */
88 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
89 break;
91 case COMPOUND_EXPR:
92 /* The value of a COMPOUND_EXPR is that of it's second operand. */
93 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
94 break;
96 case PLUS_EXPR:
97 case MINUS_EXPR:
98 /* If two address are added, the alignment of the result is the
99 minimum of the two alignments. */
100 lhs = known_alignment (TREE_OPERAND (exp, 0));
101 rhs = known_alignment (TREE_OPERAND (exp, 1));
102 this_alignment = MIN (lhs, rhs);
103 break;
105 case POINTER_PLUS_EXPR:
106 lhs = known_alignment (TREE_OPERAND (exp, 0));
107 rhs = known_alignment (TREE_OPERAND (exp, 1));
108 /* If we don't know the alignment of the offset, we assume that
109 of the base. */
110 if (rhs == 0)
111 this_alignment = lhs;
112 else
113 this_alignment = MIN (lhs, rhs);
114 break;
116 case COND_EXPR:
117 /* If there is a choice between two values, use the smallest one. */
118 lhs = known_alignment (TREE_OPERAND (exp, 1));
119 rhs = known_alignment (TREE_OPERAND (exp, 2));
120 this_alignment = MIN (lhs, rhs);
121 break;
123 case INTEGER_CST:
125 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
126 /* The first part of this represents the lowest bit in the constant,
127 but it is originally in bytes, not bits. */
128 this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
130 break;
132 case MULT_EXPR:
133 /* If we know the alignment of just one side, use it. Otherwise,
134 use the product of the alignments. */
135 lhs = known_alignment (TREE_OPERAND (exp, 0));
136 rhs = known_alignment (TREE_OPERAND (exp, 1));
138 if (lhs == 0)
139 this_alignment = rhs;
140 else if (rhs == 0)
141 this_alignment = lhs;
142 else
143 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
144 break;
146 case BIT_AND_EXPR:
147 /* A bit-and expression is as aligned as the maximum alignment of the
148 operands. We typically get here for a complex lhs and a constant
149 negative power of two on the rhs to force an explicit alignment, so
150 don't bother looking at the lhs. */
151 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
152 break;
154 case ADDR_EXPR:
155 this_alignment = expr_align (TREE_OPERAND (exp, 0));
156 break;
158 case CALL_EXPR:
160 tree t = maybe_inline_call_in_expr (exp);
161 if (t)
162 return known_alignment (t);
165 /* Fall through... */
167 default:
168 /* For other pointer expressions, we assume that the pointed-to object
169 is at least as aligned as the pointed-to type. Beware that we can
170 have a dummy type here (e.g. a Taft Amendment type), for which the
171 alignment is meaningless and should be ignored. */
172 if (POINTER_TYPE_P (TREE_TYPE (exp))
173 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
174 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
175 else
176 this_alignment = 0;
177 break;
180 return this_alignment;
183 /* We have a comparison or assignment operation on two types, T1 and T2, which
184 are either both array types or both record types. T1 is assumed to be for
185 the left hand side operand, and T2 for the right hand side. Return the
186 type that both operands should be converted to for the operation, if any.
187 Otherwise return zero. */
189 static tree
190 find_common_type (tree t1, tree t2)
192 /* ??? As of today, various constructs lead to here with types of different
193 sizes even when both constants (e.g. tagged types, packable vs regular
194 component types, padded vs unpadded types, ...). While some of these
195 would better be handled upstream (types should be made consistent before
196 calling into build_binary_op), some others are really expected and we
197 have to be careful. */
199 /* We must avoid writing more than what the target can hold if this is for
200 an assignment and the case of tagged types is handled in build_binary_op
201 so we use the lhs type if it is known to be smaller or of constant size
202 and the rhs type is not, whatever the modes. We also force t1 in case of
203 constant size equality to minimize occurrences of view conversions on the
204 lhs of an assignment, except for the case of record types with a variant
205 part on the lhs but not on the rhs to make the conversion simpler. */
206 if (TREE_CONSTANT (TYPE_SIZE (t1))
207 && (!TREE_CONSTANT (TYPE_SIZE (t2))
208 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
209 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
210 && !(TREE_CODE (t1) == RECORD_TYPE
211 && TREE_CODE (t2) == RECORD_TYPE
212 && get_variant_part (t1) != NULL_TREE
213 && get_variant_part (t2) == NULL_TREE))))
214 return t1;
216 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
217 that we will not have any alignment problems since, if we did, the
218 non-BLKmode type could not have been used. */
219 if (TYPE_MODE (t1) != BLKmode)
220 return t1;
222 /* If the rhs type is of constant size, use it whatever the modes. At
223 this point it is known to be smaller, or of constant size and the
224 lhs type is not. */
225 if (TREE_CONSTANT (TYPE_SIZE (t2)))
226 return t2;
228 /* Otherwise, if the rhs type is non-BLKmode, use it. */
229 if (TYPE_MODE (t2) != BLKmode)
230 return t2;
232 /* In this case, both types have variable size and BLKmode. It's
233 probably best to leave the "type mismatch" because changing it
234 could cause a bad self-referential reference. */
235 return NULL_TREE;
238 /* Return an expression tree representing an equality comparison of A1 and A2,
239 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
241 Two arrays are equal in one of two ways: (1) if both have zero length in
242 some dimension (not necessarily the same dimension) or (2) if the lengths
243 in each dimension are equal and the data is equal. We perform the length
244 tests in as efficient a manner as possible. */
246 static tree
247 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
249 tree result = convert (result_type, boolean_true_node);
250 tree a1_is_null = convert (result_type, boolean_false_node);
251 tree a2_is_null = convert (result_type, boolean_false_node);
252 tree t1 = TREE_TYPE (a1);
253 tree t2 = TREE_TYPE (a2);
254 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
255 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
256 bool length_zero_p = false;
258 /* If either operand has side-effects, they have to be evaluated only once
259 in spite of the multiple references to the operand in the comparison. */
260 if (a1_side_effects_p)
261 a1 = gnat_protect_expr (a1);
263 if (a2_side_effects_p)
264 a2 = gnat_protect_expr (a2);
266 /* Process each dimension separately and compare the lengths. If any
267 dimension has a length known to be zero, set LENGTH_ZERO_P to true
268 in order to suppress the comparison of the data at the end. */
269 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
271 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
272 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
273 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
274 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
275 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
276 size_one_node);
277 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
278 size_one_node);
279 tree comparison, this_a1_is_null, this_a2_is_null;
281 /* If the length of the first array is a constant, swap our operands
282 unless the length of the second array is the constant zero. */
283 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
285 tree tem;
286 bool btem;
288 tem = a1, a1 = a2, a2 = tem;
289 tem = t1, t1 = t2, t2 = tem;
290 tem = lb1, lb1 = lb2, lb2 = tem;
291 tem = ub1, ub1 = ub2, ub2 = tem;
292 tem = length1, length1 = length2, length2 = tem;
293 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
294 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
295 a2_side_effects_p = btem;
298 /* If the length of the second array is the constant zero, we can just
299 use the original stored bounds for the first array and see whether
300 last < first holds. */
301 if (integer_zerop (length2))
303 tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
305 length_zero_p = true;
308 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
310 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
312 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
313 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
314 if (EXPR_P (comparison))
315 SET_EXPR_LOCATION (comparison, loc);
317 this_a1_is_null = comparison;
318 this_a2_is_null = convert (result_type, boolean_true_node);
321 /* Otherwise, if the length is some other constant value, we know that
322 this dimension in the second array cannot be superflat, so we can
323 just use its length computed from the actual stored bounds. */
324 else if (TREE_CODE (length2) == INTEGER_CST)
326 tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
329 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
331 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
332 /* Note that we know that UB2 and LB2 are constant and hence
333 cannot contain a PLACEHOLDER_EXPR. */
335 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
337 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
339 comparison
340 = fold_build2_loc (loc, EQ_EXPR, result_type,
341 build_binary_op (MINUS_EXPR, b, ub1, lb1),
342 build_binary_op (MINUS_EXPR, b, ub2, lb2));
343 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
344 if (EXPR_P (comparison))
345 SET_EXPR_LOCATION (comparison, loc);
347 this_a1_is_null
348 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
350 this_a2_is_null = convert (result_type, boolean_false_node);
353 /* Otherwise, compare the computed lengths. */
354 else
356 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
357 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
359 comparison
360 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
362 /* If the length expression is of the form (cond ? val : 0), assume
363 that cond is equivalent to (length != 0). That's guaranteed by
364 construction of the array types in gnat_to_gnu_entity. */
365 if (TREE_CODE (length1) == COND_EXPR
366 && integer_zerop (TREE_OPERAND (length1, 2)))
367 this_a1_is_null
368 = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
369 else
370 this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
371 length1, size_zero_node);
373 /* Likewise for the second array. */
374 if (TREE_CODE (length2) == COND_EXPR
375 && integer_zerop (TREE_OPERAND (length2, 2)))
376 this_a2_is_null
377 = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
378 else
379 this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
380 length2, size_zero_node);
383 /* Append expressions for this dimension to the final expressions. */
384 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
385 result, comparison);
387 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
388 this_a1_is_null, a1_is_null);
390 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
391 this_a2_is_null, a2_is_null);
393 t1 = TREE_TYPE (t1);
394 t2 = TREE_TYPE (t2);
397 /* Unless the length of some dimension is known to be zero, compare the
398 data in the array. */
399 if (!length_zero_p)
401 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
402 tree comparison;
404 if (type)
406 a1 = convert (type, a1),
407 a2 = convert (type, a2);
410 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
412 result
413 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
416 /* The result is also true if both sizes are zero. */
417 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
418 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
419 a1_is_null, a2_is_null),
420 result);
422 /* If either operand has side-effects, they have to be evaluated before
423 starting the comparison above since the place they would be otherwise
424 evaluated could be wrong. */
425 if (a1_side_effects_p)
426 result = build2 (COMPOUND_EXPR, result_type, a1, result);
428 if (a2_side_effects_p)
429 result = build2 (COMPOUND_EXPR, result_type, a2, result);
431 return result;
434 /* Return an expression tree representing an equality comparison of P1 and P2,
435 two objects of fat pointer type. The result should be of type RESULT_TYPE.
437 Two fat pointers are equal in one of two ways: (1) if both have a null
438 pointer to the array or (2) if they contain the same couple of pointers.
439 We perform the comparison in as efficient a manner as possible. */
441 static tree
442 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
444 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
445 tree p1_array_is_null, p2_array_is_null;
447 /* If either operand has side-effects, they have to be evaluated only once
448 in spite of the multiple references to the operand in the comparison. */
449 p1 = gnat_protect_expr (p1);
450 p2 = gnat_protect_expr (p2);
452 /* The constant folder doesn't fold fat pointer types so we do it here. */
453 if (TREE_CODE (p1) == CONSTRUCTOR)
454 p1_array = (*CONSTRUCTOR_ELTS (p1))[0].value;
455 else
456 p1_array = build_component_ref (p1, NULL_TREE,
457 TYPE_FIELDS (TREE_TYPE (p1)), true);
459 p1_array_is_null
460 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
461 fold_convert_loc (loc, TREE_TYPE (p1_array),
462 null_pointer_node));
464 if (TREE_CODE (p2) == CONSTRUCTOR)
465 p2_array = (*CONSTRUCTOR_ELTS (p2))[0].value;
466 else
467 p2_array = build_component_ref (p2, NULL_TREE,
468 TYPE_FIELDS (TREE_TYPE (p2)), true);
470 p2_array_is_null
471 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
472 fold_convert_loc (loc, TREE_TYPE (p2_array),
473 null_pointer_node));
475 /* If one of the pointers to the array is null, just compare the other. */
476 if (integer_zerop (p1_array))
477 return p2_array_is_null;
478 else if (integer_zerop (p2_array))
479 return p1_array_is_null;
481 /* Otherwise, do the fully-fledged comparison. */
482 same_array
483 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
485 if (TREE_CODE (p1) == CONSTRUCTOR)
486 p1_bounds = (*CONSTRUCTOR_ELTS (p1))[1].value;
487 else
488 p1_bounds
489 = build_component_ref (p1, NULL_TREE,
490 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))), true);
492 if (TREE_CODE (p2) == CONSTRUCTOR)
493 p2_bounds = (*CONSTRUCTOR_ELTS (p2))[1].value;
494 else
495 p2_bounds
496 = build_component_ref (p2, NULL_TREE,
497 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))), true);
499 same_bounds
500 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
502 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
503 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
504 build_binary_op (TRUTH_ORIF_EXPR, result_type,
505 p1_array_is_null, same_bounds));
508 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
509 type TYPE. We know that TYPE is a modular type with a nonbinary
510 modulus. */
512 static tree
513 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
514 tree rhs)
516 tree modulus = TYPE_MODULUS (type);
517 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
518 unsigned int precision;
519 bool unsignedp = true;
520 tree op_type = type;
521 tree result;
523 /* If this is an addition of a constant, convert it to a subtraction
524 of a constant since we can do that faster. */
525 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
527 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
528 op_code = MINUS_EXPR;
531 /* For the logical operations, we only need PRECISION bits. For
532 addition and subtraction, we need one more and for multiplication we
533 need twice as many. But we never want to make a size smaller than
534 our size. */
535 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
536 needed_precision += 1;
537 else if (op_code == MULT_EXPR)
538 needed_precision *= 2;
540 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
542 /* Unsigned will do for everything but subtraction. */
543 if (op_code == MINUS_EXPR)
544 unsignedp = false;
546 /* If our type is the wrong signedness or isn't wide enough, make a new
547 type and convert both our operands to it. */
548 if (TYPE_PRECISION (op_type) < precision
549 || TYPE_UNSIGNED (op_type) != unsignedp)
551 /* Copy the node so we ensure it can be modified to make it modular. */
552 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
553 modulus = convert (op_type, modulus);
554 SET_TYPE_MODULUS (op_type, modulus);
555 TYPE_MODULAR_P (op_type) = 1;
556 lhs = convert (op_type, lhs);
557 rhs = convert (op_type, rhs);
560 /* Do the operation, then we'll fix it up. */
561 result = fold_build2 (op_code, op_type, lhs, rhs);
563 /* For multiplication, we have no choice but to do a full modulus
564 operation. However, we want to do this in the narrowest
565 possible size. */
566 if (op_code == MULT_EXPR)
568 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
569 modulus = convert (div_type, modulus);
570 SET_TYPE_MODULUS (div_type, modulus);
571 TYPE_MODULAR_P (div_type) = 1;
572 result = convert (op_type,
573 fold_build2 (TRUNC_MOD_EXPR, div_type,
574 convert (div_type, result), modulus));
577 /* For subtraction, add the modulus back if we are negative. */
578 else if (op_code == MINUS_EXPR)
580 result = gnat_protect_expr (result);
581 result = fold_build3 (COND_EXPR, op_type,
582 fold_build2 (LT_EXPR, boolean_type_node, result,
583 convert (op_type, integer_zero_node)),
584 fold_build2 (PLUS_EXPR, op_type, result, modulus),
585 result);
588 /* For the other operations, subtract the modulus if we are >= it. */
589 else
591 result = gnat_protect_expr (result);
592 result = fold_build3 (COND_EXPR, op_type,
593 fold_build2 (GE_EXPR, boolean_type_node,
594 result, modulus),
595 fold_build2 (MINUS_EXPR, op_type,
596 result, modulus),
597 result);
600 return convert (type, result);
603 /* This page contains routines that implement the Ada semantics with regard
604 to atomic objects. They are fully piggybacked on the middle-end support
605 for atomic loads and stores.
607 *** Memory barriers and volatile objects ***
609 We implement the weakened form of the C.6(16) clause that was introduced
610 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
611 implementable without significant performance hits on modern platforms.
613 We also take advantage of the requirements imposed on shared variables by
614 9.10 (conditions for sequential actions) to have non-erroneous execution
615 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
616 volatile updates with regard to sequential actions, i.e. with regard to
617 reads or updates of atomic objects.
619 As such, an update of an atomic object by a task requires that all earlier
620 accesses to volatile objects have completed. Similarly, later accesses to
621 volatile objects cannot be reordered before the update of the atomic object.
622 So, memory barriers both before and after the atomic update are needed.
624 For a read of an atomic object, to avoid seeing writes of volatile objects
625 by a task earlier than by the other tasks, a memory barrier is needed before
626 the atomic read. Finally, to avoid reordering later reads or updates of
627 volatile objects to before the atomic read, a barrier is needed after the
628 atomic read.
630 So, memory barriers are needed before and after atomic reads and updates.
631 And, in order to simplify the implementation, we use full memory barriers
632 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
634 /* Return the size of TYPE, which must be a positive power of 2. */
636 static unsigned int
637 resolve_atomic_size (tree type)
639 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
641 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
642 return size;
644 /* We shouldn't reach here without having already detected that the size
645 isn't compatible with an atomic access. */
646 gcc_assert (Serious_Errors_Detected);
648 return 0;
651 /* Build an atomic load for the underlying atomic object in SRC. */
653 tree
654 build_atomic_load (tree src)
656 tree ptr_type
657 = build_pointer_type
658 (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
659 tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
660 tree orig_src = src;
661 tree t, addr, val;
662 unsigned int size;
663 int fncode;
665 /* Remove conversions to get the address of the underlying object. */
666 src = remove_conversions (src, false);
667 size = resolve_atomic_size (TREE_TYPE (src));
668 if (size == 0)
669 return orig_src;
671 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
672 t = builtin_decl_implicit ((enum built_in_function) fncode);
674 addr = build_unary_op (ADDR_EXPR, ptr_type, src);
675 val = build_call_expr (t, 2, addr, mem_model);
677 /* First reinterpret the loaded bits in the original type of the load,
678 then convert to the expected result type. */
679 t = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (src), val);
680 return convert (TREE_TYPE (orig_src), t);
683 /* Build an atomic store from SRC to the underlying atomic object in DEST. */
685 tree
686 build_atomic_store (tree dest, tree src)
688 tree ptr_type
689 = build_pointer_type
690 (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
691 tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
692 tree orig_dest = dest;
693 tree t, int_type, addr;
694 unsigned int size;
695 int fncode;
697 /* Remove conversions to get the address of the underlying object. */
698 dest = remove_conversions (dest, false);
699 size = resolve_atomic_size (TREE_TYPE (dest));
700 if (size == 0)
701 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
703 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
704 t = builtin_decl_implicit ((enum built_in_function) fncode);
705 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
707 /* First convert the bits to be stored to the original type of the store,
708 then reinterpret them in the effective type. But if the original type
709 is a padded type with the same size, convert to the inner type instead,
710 as we don't want to artificially introduce a CONSTRUCTOR here. */
711 if (TYPE_IS_PADDING_P (TREE_TYPE (dest))
712 && TYPE_SIZE (TREE_TYPE (dest))
713 == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest)))))
714 src = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest))), src);
715 else
716 src = convert (TREE_TYPE (dest), src);
717 src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
718 addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
720 return build_call_expr (t, 3, addr, src, mem_model);
723 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
724 desired for the result. Usually the operation is to be performed
725 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
726 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
727 case the type to be used will be derived from the operands.
729 This function is very much unlike the ones for C and C++ since we
730 have already done any type conversion and matching required. All we
731 have to do here is validate the work done by SEM and handle subtypes. */
733 tree
734 build_binary_op (enum tree_code op_code, tree result_type,
735 tree left_operand, tree right_operand)
737 tree left_type = TREE_TYPE (left_operand);
738 tree right_type = TREE_TYPE (right_operand);
739 tree left_base_type = get_base_type (left_type);
740 tree right_base_type = get_base_type (right_type);
741 tree operation_type = result_type;
742 tree best_type = NULL_TREE;
743 tree modulus, result;
744 bool has_side_effects = false;
746 if (operation_type
747 && TREE_CODE (operation_type) == RECORD_TYPE
748 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
749 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
751 if (operation_type
752 && TREE_CODE (operation_type) == INTEGER_TYPE
753 && TYPE_EXTRA_SUBTYPE_P (operation_type))
754 operation_type = get_base_type (operation_type);
756 modulus = (operation_type
757 && TREE_CODE (operation_type) == INTEGER_TYPE
758 && TYPE_MODULAR_P (operation_type)
759 ? TYPE_MODULUS (operation_type) : NULL_TREE);
761 switch (op_code)
763 case INIT_EXPR:
764 case MODIFY_EXPR:
765 #ifdef ENABLE_CHECKING
766 gcc_assert (result_type == NULL_TREE);
767 #endif
768 /* If there were integral or pointer conversions on the LHS, remove
769 them; we'll be putting them back below if needed. Likewise for
770 conversions between array and record types, except for justified
771 modular types. But don't do this if the right operand is not
772 BLKmode (for packed arrays) unless we are not changing the mode. */
773 while ((CONVERT_EXPR_P (left_operand)
774 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
775 && (((INTEGRAL_TYPE_P (left_type)
776 || POINTER_TYPE_P (left_type))
777 && (INTEGRAL_TYPE_P (TREE_TYPE
778 (TREE_OPERAND (left_operand, 0)))
779 || POINTER_TYPE_P (TREE_TYPE
780 (TREE_OPERAND (left_operand, 0)))))
781 || (((TREE_CODE (left_type) == RECORD_TYPE
782 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
783 || TREE_CODE (left_type) == ARRAY_TYPE)
784 && ((TREE_CODE (TREE_TYPE
785 (TREE_OPERAND (left_operand, 0)))
786 == RECORD_TYPE)
787 || (TREE_CODE (TREE_TYPE
788 (TREE_OPERAND (left_operand, 0)))
789 == ARRAY_TYPE))
790 && (TYPE_MODE (right_type) == BLKmode
791 || (TYPE_MODE (left_type)
792 == TYPE_MODE (TREE_TYPE
793 (TREE_OPERAND
794 (left_operand, 0))))))))
796 left_operand = TREE_OPERAND (left_operand, 0);
797 left_type = TREE_TYPE (left_operand);
800 /* If a class-wide type may be involved, force use of the RHS type. */
801 if ((TREE_CODE (right_type) == RECORD_TYPE
802 || TREE_CODE (right_type) == UNION_TYPE)
803 && TYPE_ALIGN_OK (right_type))
804 operation_type = right_type;
806 /* If we are copying between padded objects with compatible types, use
807 the padded view of the objects, this is very likely more efficient.
808 Likewise for a padded object that is assigned a constructor, if we
809 can convert the constructor to the inner type, to avoid putting a
810 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
811 actually copied anything. */
812 else if (TYPE_IS_PADDING_P (left_type)
813 && TREE_CONSTANT (TYPE_SIZE (left_type))
814 && ((TREE_CODE (right_operand) == COMPONENT_REF
815 && TYPE_MAIN_VARIANT (left_type)
816 == TYPE_MAIN_VARIANT
817 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
818 || (TREE_CODE (right_operand) == CONSTRUCTOR
819 && !CONTAINS_PLACEHOLDER_P
820 (DECL_SIZE (TYPE_FIELDS (left_type)))))
821 && !integer_zerop (TYPE_SIZE (right_type)))
823 /* We make an exception for a BLKmode type padding a non-BLKmode
824 inner type and do the conversion of the LHS right away, since
825 unchecked_convert wouldn't do it properly. */
826 if (TYPE_MODE (left_type) == BLKmode
827 && TYPE_MODE (right_type) != BLKmode
828 && TREE_CODE (right_operand) != CONSTRUCTOR)
830 operation_type = right_type;
831 left_operand = convert (operation_type, left_operand);
832 left_type = operation_type;
834 else
835 operation_type = left_type;
838 /* If we have a call to a function that returns an unconstrained type
839 with default discriminant on the RHS, use the RHS type (which is
840 padded) as we cannot compute the size of the actual assignment. */
841 else if (TREE_CODE (right_operand) == CALL_EXPR
842 && TYPE_IS_PADDING_P (right_type)
843 && CONTAINS_PLACEHOLDER_P
844 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (right_type)))))
845 operation_type = right_type;
847 /* Find the best type to use for copying between aggregate types. */
848 else if (((TREE_CODE (left_type) == ARRAY_TYPE
849 && TREE_CODE (right_type) == ARRAY_TYPE)
850 || (TREE_CODE (left_type) == RECORD_TYPE
851 && TREE_CODE (right_type) == RECORD_TYPE))
852 && (best_type = find_common_type (left_type, right_type)))
853 operation_type = best_type;
855 /* Otherwise use the LHS type. */
856 else
857 operation_type = left_type;
859 /* Ensure everything on the LHS is valid. If we have a field reference,
860 strip anything that get_inner_reference can handle. Then remove any
861 conversions between types having the same code and mode. And mark
862 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
863 either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
864 result = left_operand;
865 while (true)
867 tree restype = TREE_TYPE (result);
869 if (TREE_CODE (result) == COMPONENT_REF
870 || TREE_CODE (result) == ARRAY_REF
871 || TREE_CODE (result) == ARRAY_RANGE_REF)
872 while (handled_component_p (result))
873 result = TREE_OPERAND (result, 0);
874 else if (TREE_CODE (result) == REALPART_EXPR
875 || TREE_CODE (result) == IMAGPART_EXPR
876 || (CONVERT_EXPR_P (result)
877 && (((TREE_CODE (restype)
878 == TREE_CODE (TREE_TYPE
879 (TREE_OPERAND (result, 0))))
880 && (TYPE_MODE (TREE_TYPE
881 (TREE_OPERAND (result, 0)))
882 == TYPE_MODE (restype)))
883 || TYPE_ALIGN_OK (restype))))
884 result = TREE_OPERAND (result, 0);
885 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
887 TREE_ADDRESSABLE (result) = 1;
888 result = TREE_OPERAND (result, 0);
890 else
891 break;
894 gcc_assert (TREE_CODE (result) == INDIRECT_REF
895 || TREE_CODE (result) == NULL_EXPR
896 || DECL_P (result));
898 /* Convert the right operand to the operation type unless it is
899 either already of the correct type or if the type involves a
900 placeholder, since the RHS may not have the same record type. */
901 if (operation_type != right_type
902 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
904 right_operand = convert (operation_type, right_operand);
905 right_type = operation_type;
908 /* If the left operand is not of the same type as the operation
909 type, wrap it up in a VIEW_CONVERT_EXPR. */
910 if (left_type != operation_type)
911 left_operand = unchecked_convert (operation_type, left_operand, false);
913 has_side_effects = true;
914 modulus = NULL_TREE;
915 break;
917 case ARRAY_REF:
918 if (!operation_type)
919 operation_type = TREE_TYPE (left_type);
921 /* ... fall through ... */
923 case ARRAY_RANGE_REF:
924 /* First look through conversion between type variants. Note that
925 this changes neither the operation type nor the type domain. */
926 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
927 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
928 == TYPE_MAIN_VARIANT (left_type))
930 left_operand = TREE_OPERAND (left_operand, 0);
931 left_type = TREE_TYPE (left_operand);
934 /* For a range, make sure the element type is consistent. */
935 if (op_code == ARRAY_RANGE_REF
936 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
937 operation_type = build_array_type (TREE_TYPE (left_type),
938 TYPE_DOMAIN (operation_type));
940 /* Then convert the right operand to its base type. This will prevent
941 unneeded sign conversions when sizetype is wider than integer. */
942 right_operand = convert (right_base_type, right_operand);
943 right_operand = convert_to_index_type (right_operand);
944 modulus = NULL_TREE;
945 break;
947 case TRUTH_ANDIF_EXPR:
948 case TRUTH_ORIF_EXPR:
949 case TRUTH_AND_EXPR:
950 case TRUTH_OR_EXPR:
951 case TRUTH_XOR_EXPR:
952 #ifdef ENABLE_CHECKING
953 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
954 #endif
955 operation_type = left_base_type;
956 left_operand = convert (operation_type, left_operand);
957 right_operand = convert (operation_type, right_operand);
958 break;
960 case GE_EXPR:
961 case LE_EXPR:
962 case GT_EXPR:
963 case LT_EXPR:
964 case EQ_EXPR:
965 case NE_EXPR:
966 #ifdef ENABLE_CHECKING
967 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
968 #endif
969 /* If either operand is a NULL_EXPR, just return a new one. */
970 if (TREE_CODE (left_operand) == NULL_EXPR)
971 return build2 (op_code, result_type,
972 build1 (NULL_EXPR, integer_type_node,
973 TREE_OPERAND (left_operand, 0)),
974 integer_zero_node);
976 else if (TREE_CODE (right_operand) == NULL_EXPR)
977 return build2 (op_code, result_type,
978 build1 (NULL_EXPR, integer_type_node,
979 TREE_OPERAND (right_operand, 0)),
980 integer_zero_node);
982 /* If either object is a justified modular types, get the
983 fields from within. */
984 if (TREE_CODE (left_type) == RECORD_TYPE
985 && TYPE_JUSTIFIED_MODULAR_P (left_type))
987 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
988 left_operand);
989 left_type = TREE_TYPE (left_operand);
990 left_base_type = get_base_type (left_type);
993 if (TREE_CODE (right_type) == RECORD_TYPE
994 && TYPE_JUSTIFIED_MODULAR_P (right_type))
996 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
997 right_operand);
998 right_type = TREE_TYPE (right_operand);
999 right_base_type = get_base_type (right_type);
1002 /* If both objects are arrays, compare them specially. */
1003 if ((TREE_CODE (left_type) == ARRAY_TYPE
1004 || (TREE_CODE (left_type) == INTEGER_TYPE
1005 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
1006 && (TREE_CODE (right_type) == ARRAY_TYPE
1007 || (TREE_CODE (right_type) == INTEGER_TYPE
1008 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1010 result = compare_arrays (input_location,
1011 result_type, left_operand, right_operand);
1012 if (op_code == NE_EXPR)
1013 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1014 else
1015 gcc_assert (op_code == EQ_EXPR);
1017 return result;
1020 /* Otherwise, the base types must be the same, unless they are both fat
1021 pointer types or record types. In the latter case, use the best type
1022 and convert both operands to that type. */
1023 if (left_base_type != right_base_type)
1025 if (TYPE_IS_FAT_POINTER_P (left_base_type)
1026 && TYPE_IS_FAT_POINTER_P (right_base_type))
1028 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1029 == TYPE_MAIN_VARIANT (right_base_type));
1030 best_type = left_base_type;
1033 else if (TREE_CODE (left_base_type) == RECORD_TYPE
1034 && TREE_CODE (right_base_type) == RECORD_TYPE)
1036 /* The only way this is permitted is if both types have the same
1037 name. In that case, one of them must not be self-referential.
1038 Use it as the best type. Even better with a fixed size. */
1039 gcc_assert (TYPE_NAME (left_base_type)
1040 && TYPE_NAME (left_base_type)
1041 == TYPE_NAME (right_base_type));
1043 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1044 best_type = left_base_type;
1045 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1046 best_type = right_base_type;
1047 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1048 best_type = left_base_type;
1049 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1050 best_type = right_base_type;
1051 else
1052 gcc_unreachable ();
1055 else
1056 gcc_unreachable ();
1058 left_operand = convert (best_type, left_operand);
1059 right_operand = convert (best_type, right_operand);
1061 else
1063 left_operand = convert (left_base_type, left_operand);
1064 right_operand = convert (right_base_type, right_operand);
1067 /* If both objects are fat pointers, compare them specially. */
1068 if (TYPE_IS_FAT_POINTER_P (left_base_type))
1070 result
1071 = compare_fat_pointers (input_location,
1072 result_type, left_operand, right_operand);
1073 if (op_code == NE_EXPR)
1074 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1075 else
1076 gcc_assert (op_code == EQ_EXPR);
1078 return result;
1081 modulus = NULL_TREE;
1082 break;
1084 case LSHIFT_EXPR:
1085 case RSHIFT_EXPR:
1086 case LROTATE_EXPR:
1087 case RROTATE_EXPR:
1088 /* The RHS of a shift can be any type. Also, ignore any modulus
1089 (we used to abort, but this is needed for unchecked conversion
1090 to modular types). Otherwise, processing is the same as normal. */
1091 gcc_assert (operation_type == left_base_type);
1092 modulus = NULL_TREE;
1093 left_operand = convert (operation_type, left_operand);
1094 break;
1096 case BIT_AND_EXPR:
1097 case BIT_IOR_EXPR:
1098 case BIT_XOR_EXPR:
1099 /* For binary modulus, if the inputs are in range, so are the
1100 outputs. */
1101 if (modulus && integer_pow2p (modulus))
1102 modulus = NULL_TREE;
1103 goto common;
1105 case COMPLEX_EXPR:
1106 gcc_assert (TREE_TYPE (result_type) == left_base_type
1107 && TREE_TYPE (result_type) == right_base_type);
1108 left_operand = convert (left_base_type, left_operand);
1109 right_operand = convert (right_base_type, right_operand);
1110 break;
1112 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1113 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1114 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1115 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1116 /* These always produce results lower than either operand. */
1117 modulus = NULL_TREE;
1118 goto common;
1120 case POINTER_PLUS_EXPR:
1121 gcc_assert (operation_type == left_base_type
1122 && sizetype == right_base_type);
1123 left_operand = convert (operation_type, left_operand);
1124 right_operand = convert (sizetype, right_operand);
1125 break;
1127 case PLUS_NOMOD_EXPR:
1128 case MINUS_NOMOD_EXPR:
1129 if (op_code == PLUS_NOMOD_EXPR)
1130 op_code = PLUS_EXPR;
1131 else
1132 op_code = MINUS_EXPR;
1133 modulus = NULL_TREE;
1135 /* ... fall through ... */
1137 case PLUS_EXPR:
1138 case MINUS_EXPR:
1139 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1140 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1141 these types but can generate addition/subtraction for Succ/Pred. */
1142 if (operation_type
1143 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1144 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1145 operation_type = left_base_type = right_base_type
1146 = gnat_type_for_mode (TYPE_MODE (operation_type),
1147 TYPE_UNSIGNED (operation_type));
1149 /* ... fall through ... */
1151 default:
1152 common:
1153 /* The result type should be the same as the base types of the
1154 both operands (and they should be the same). Convert
1155 everything to the result type. */
1157 gcc_assert (operation_type == left_base_type
1158 && left_base_type == right_base_type);
1159 left_operand = convert (operation_type, left_operand);
1160 right_operand = convert (operation_type, right_operand);
1163 if (modulus && !integer_pow2p (modulus))
1165 result = nonbinary_modular_operation (op_code, operation_type,
1166 left_operand, right_operand);
1167 modulus = NULL_TREE;
1169 /* If either operand is a NULL_EXPR, just return a new one. */
1170 else if (TREE_CODE (left_operand) == NULL_EXPR)
1171 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1172 else if (TREE_CODE (right_operand) == NULL_EXPR)
1173 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1174 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1175 result = fold (build4 (op_code, operation_type, left_operand,
1176 right_operand, NULL_TREE, NULL_TREE));
1177 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1178 result = build2 (op_code, void_type_node, left_operand, right_operand);
1179 else
1180 result
1181 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1183 if (TREE_CONSTANT (result))
1185 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1187 if (TYPE_VOLATILE (operation_type))
1188 TREE_THIS_VOLATILE (result) = 1;
1190 else
1191 TREE_CONSTANT (result)
1192 |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1194 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1196 /* If we are working with modular types, perform the MOD operation
1197 if something above hasn't eliminated the need for it. */
1198 if (modulus)
1199 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1200 convert (operation_type, modulus));
1202 if (result_type && result_type != operation_type)
1203 result = convert (result_type, result);
1205 return result;
1208 /* Similar, but for unary operations. */
1210 tree
1211 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1213 tree type = TREE_TYPE (operand);
1214 tree base_type = get_base_type (type);
1215 tree operation_type = result_type;
1216 tree result;
1218 if (operation_type
1219 && TREE_CODE (operation_type) == RECORD_TYPE
1220 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1221 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1223 if (operation_type
1224 && TREE_CODE (operation_type) == INTEGER_TYPE
1225 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1226 operation_type = get_base_type (operation_type);
1228 switch (op_code)
1230 case REALPART_EXPR:
1231 case IMAGPART_EXPR:
1232 if (!operation_type)
1233 result_type = operation_type = TREE_TYPE (type);
1234 else
1235 gcc_assert (result_type == TREE_TYPE (type));
1237 result = fold_build1 (op_code, operation_type, operand);
1238 break;
1240 case TRUTH_NOT_EXPR:
1241 #ifdef ENABLE_CHECKING
1242 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1243 #endif
1244 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1245 /* When not optimizing, fold the result as invert_truthvalue_loc
1246 doesn't fold the result of comparisons. This is intended to undo
1247 the trick used for boolean rvalues in gnat_to_gnu. */
1248 if (!optimize)
1249 result = fold (result);
1250 break;
1252 case ATTR_ADDR_EXPR:
1253 case ADDR_EXPR:
1254 switch (TREE_CODE (operand))
1256 case INDIRECT_REF:
1257 case UNCONSTRAINED_ARRAY_REF:
1258 result = TREE_OPERAND (operand, 0);
1260 /* Make sure the type here is a pointer, not a reference.
1261 GCC wants pointer types for function addresses. */
1262 if (!result_type)
1263 result_type = build_pointer_type (type);
1265 /* If the underlying object can alias everything, propagate the
1266 property since we are effectively retrieving the object. */
1267 if (POINTER_TYPE_P (TREE_TYPE (result))
1268 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1270 if (TREE_CODE (result_type) == POINTER_TYPE
1271 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1272 result_type
1273 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1274 TYPE_MODE (result_type),
1275 true);
1276 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1277 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1278 result_type
1279 = build_reference_type_for_mode (TREE_TYPE (result_type),
1280 TYPE_MODE (result_type),
1281 true);
1283 break;
1285 case NULL_EXPR:
1286 result = operand;
1287 TREE_TYPE (result) = type = build_pointer_type (type);
1288 break;
1290 case COMPOUND_EXPR:
1291 /* Fold a compound expression if it has unconstrained array type
1292 since the middle-end cannot handle it. But we don't it in the
1293 general case because it may introduce aliasing issues if the
1294 first operand is an indirect assignment and the second operand
1295 the corresponding address, e.g. for an allocator. */
1296 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1298 result = build_unary_op (ADDR_EXPR, result_type,
1299 TREE_OPERAND (operand, 1));
1300 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1301 TREE_OPERAND (operand, 0), result);
1302 break;
1304 goto common;
1306 case ARRAY_REF:
1307 case ARRAY_RANGE_REF:
1308 case COMPONENT_REF:
1309 case BIT_FIELD_REF:
1310 /* If this is for 'Address, find the address of the prefix and add
1311 the offset to the field. Otherwise, do this the normal way. */
1312 if (op_code == ATTR_ADDR_EXPR)
1314 HOST_WIDE_INT bitsize;
1315 HOST_WIDE_INT bitpos;
1316 tree offset, inner;
1317 machine_mode mode;
1318 int unsignedp, volatilep;
1320 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1321 &mode, &unsignedp, &volatilep,
1322 false);
1324 /* If INNER is a padding type whose field has a self-referential
1325 size, convert to that inner type. We know the offset is zero
1326 and we need to have that type visible. */
1327 if (TYPE_IS_PADDING_P (TREE_TYPE (inner))
1328 && CONTAINS_PLACEHOLDER_P
1329 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1330 (TREE_TYPE (inner))))))
1331 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1332 inner);
1334 /* Compute the offset as a byte offset from INNER. */
1335 if (!offset)
1336 offset = size_zero_node;
1338 offset = size_binop (PLUS_EXPR, offset,
1339 size_int (bitpos / BITS_PER_UNIT));
1341 /* Take the address of INNER, convert the offset to void *, and
1342 add then. It will later be converted to the desired result
1343 type, if any. */
1344 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1345 inner = convert (ptr_void_type_node, inner);
1346 result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1347 inner, offset);
1348 result = convert (build_pointer_type (TREE_TYPE (operand)),
1349 result);
1350 break;
1352 goto common;
1354 case CONSTRUCTOR:
1355 /* If this is just a constructor for a padded record, we can
1356 just take the address of the single field and convert it to
1357 a pointer to our type. */
1358 if (TYPE_IS_PADDING_P (type))
1360 result = (*CONSTRUCTOR_ELTS (operand))[0].value;
1361 result = convert (build_pointer_type (TREE_TYPE (operand)),
1362 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1363 break;
1366 goto common;
1368 case NOP_EXPR:
1369 if (AGGREGATE_TYPE_P (type)
1370 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1371 return build_unary_op (ADDR_EXPR, result_type,
1372 TREE_OPERAND (operand, 0));
1374 /* ... fallthru ... */
1376 case VIEW_CONVERT_EXPR:
1377 /* If this just a variant conversion or if the conversion doesn't
1378 change the mode, get the result type from this type and go down.
1379 This is needed for conversions of CONST_DECLs, to eventually get
1380 to the address of their CORRESPONDING_VARs. */
1381 if ((TYPE_MAIN_VARIANT (type)
1382 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1383 || (TYPE_MODE (type) != BLKmode
1384 && (TYPE_MODE (type)
1385 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1386 return build_unary_op (ADDR_EXPR,
1387 (result_type ? result_type
1388 : build_pointer_type (type)),
1389 TREE_OPERAND (operand, 0));
1390 goto common;
1392 case CONST_DECL:
1393 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1395 /* ... fall through ... */
1397 default:
1398 common:
1400 /* If we are taking the address of a padded record whose field
1401 contains a template, take the address of the field. */
1402 if (TYPE_IS_PADDING_P (type)
1403 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1404 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1406 type = TREE_TYPE (TYPE_FIELDS (type));
1407 operand = convert (type, operand);
1410 gnat_mark_addressable (operand);
1411 result = build_fold_addr_expr (operand);
1414 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1415 break;
1417 case INDIRECT_REF:
1419 tree t = remove_conversions (operand, false);
1420 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1422 /* If TYPE is a thin pointer, either first retrieve the base if this
1423 is an expression with an offset built for the initialization of an
1424 object with an unconstrained nominal subtype, or else convert to
1425 the fat pointer. */
1426 if (TYPE_IS_THIN_POINTER_P (type))
1428 tree rec_type = TREE_TYPE (type);
1430 if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1431 && TREE_OPERAND (operand, 1)
1432 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1433 && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1435 operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1436 type = TREE_TYPE (operand);
1438 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1440 operand
1441 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1442 operand);
1443 type = TREE_TYPE (operand);
1447 /* If we want to refer to an unconstrained array, use the appropriate
1448 expression. But this will never survive down to the back-end. */
1449 if (TYPE_IS_FAT_POINTER_P (type))
1451 result = build1 (UNCONSTRAINED_ARRAY_REF,
1452 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1453 TREE_READONLY (result)
1454 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1457 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1458 else if (TREE_CODE (operand) == ADDR_EXPR)
1459 result = TREE_OPERAND (operand, 0);
1461 /* Otherwise, build and fold the indirect reference. */
1462 else
1464 result = build_fold_indirect_ref (operand);
1465 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1468 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1470 TREE_SIDE_EFFECTS (result) = 1;
1471 if (TREE_CODE (result) == INDIRECT_REF)
1472 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1475 if ((TREE_CODE (result) == INDIRECT_REF
1476 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1477 && can_never_be_null)
1478 TREE_THIS_NOTRAP (result) = 1;
1480 break;
1483 case NEGATE_EXPR:
1484 case BIT_NOT_EXPR:
1486 tree modulus = ((operation_type
1487 && TREE_CODE (operation_type) == INTEGER_TYPE
1488 && TYPE_MODULAR_P (operation_type))
1489 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1490 int mod_pow2 = modulus && integer_pow2p (modulus);
1492 /* If this is a modular type, there are various possibilities
1493 depending on the operation and whether the modulus is a
1494 power of two or not. */
1496 if (modulus)
1498 gcc_assert (operation_type == base_type);
1499 operand = convert (operation_type, operand);
1501 /* The fastest in the negate case for binary modulus is
1502 the straightforward code; the TRUNC_MOD_EXPR below
1503 is an AND operation. */
1504 if (op_code == NEGATE_EXPR && mod_pow2)
1505 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1506 fold_build1 (NEGATE_EXPR, operation_type,
1507 operand),
1508 modulus);
1510 /* For nonbinary negate case, return zero for zero operand,
1511 else return the modulus minus the operand. If the modulus
1512 is a power of two minus one, we can do the subtraction
1513 as an XOR since it is equivalent and faster on most machines. */
1514 else if (op_code == NEGATE_EXPR && !mod_pow2)
1516 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1517 modulus,
1518 convert (operation_type,
1519 integer_one_node))))
1520 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1521 operand, modulus);
1522 else
1523 result = fold_build2 (MINUS_EXPR, operation_type,
1524 modulus, operand);
1526 result = fold_build3 (COND_EXPR, operation_type,
1527 fold_build2 (NE_EXPR,
1528 boolean_type_node,
1529 operand,
1530 convert
1531 (operation_type,
1532 integer_zero_node)),
1533 result, operand);
1535 else
1537 /* For the NOT cases, we need a constant equal to
1538 the modulus minus one. For a binary modulus, we
1539 XOR against the constant and subtract the operand from
1540 that constant for nonbinary modulus. */
1542 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1543 convert (operation_type,
1544 integer_one_node));
1546 if (mod_pow2)
1547 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1548 operand, cnst);
1549 else
1550 result = fold_build2 (MINUS_EXPR, operation_type,
1551 cnst, operand);
1554 break;
1558 /* ... fall through ... */
1560 default:
1561 gcc_assert (operation_type == base_type);
1562 result = fold_build1 (op_code, operation_type,
1563 convert (operation_type, operand));
1566 if (result_type && TREE_TYPE (result) != result_type)
1567 result = convert (result_type, result);
1569 return result;
1572 /* Similar, but for COND_EXPR. */
1574 tree
1575 build_cond_expr (tree result_type, tree condition_operand,
1576 tree true_operand, tree false_operand)
1578 bool addr_p = false;
1579 tree result;
1581 /* The front-end verified that result, true and false operands have
1582 same base type. Convert everything to the result type. */
1583 true_operand = convert (result_type, true_operand);
1584 false_operand = convert (result_type, false_operand);
1586 /* If the result type is unconstrained, take the address of the operands and
1587 then dereference the result. Likewise if the result type is passed by
1588 reference, because creating a temporary of this type is not allowed. */
1589 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1590 || TYPE_IS_BY_REFERENCE_P (result_type)
1591 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1593 result_type = build_pointer_type (result_type);
1594 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1595 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1596 addr_p = true;
1599 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1600 true_operand, false_operand);
1602 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1603 in both arms, make sure it gets evaluated by moving it ahead of the
1604 conditional expression. This is necessary because it is evaluated
1605 in only one place at run time and would otherwise be uninitialized
1606 in one of the arms. */
1607 true_operand = skip_simple_arithmetic (true_operand);
1608 false_operand = skip_simple_arithmetic (false_operand);
1610 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1611 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1613 if (addr_p)
1614 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1616 return result;
1619 /* Similar, but for COMPOUND_EXPR. */
1621 tree
1622 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1624 bool addr_p = false;
1625 tree result;
1627 /* If the result type is unconstrained, take the address of the operand and
1628 then dereference the result. Likewise if the result type is passed by
1629 reference, but this is natively handled in the gimplifier. */
1630 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1631 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1633 result_type = build_pointer_type (result_type);
1634 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1635 addr_p = true;
1638 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1639 expr_operand);
1641 if (addr_p)
1642 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1644 return result;
1647 /* Conveniently construct a function call expression. FNDECL names the
1648 function to be called, N is the number of arguments, and the "..."
1649 parameters are the argument expressions. Unlike build_call_expr
1650 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1652 tree
1653 build_call_n_expr (tree fndecl, int n, ...)
1655 va_list ap;
1656 tree fntype = TREE_TYPE (fndecl);
1657 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1659 va_start (ap, n);
1660 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1661 va_end (ap);
1662 return fn;
1665 /* Call a function that raises an exception and pass the line number and file
1666 name, if requested. MSG says which exception function to call.
1668 GNAT_NODE is the gnat node conveying the source location for which the
1669 error should be signaled, or Empty in which case the error is signaled on
1670 the current ref_file_name/input_line.
1672 KIND says which kind of exception this is for
1673 (N_Raise_{Constraint,Storage,Program}_Error). */
1675 tree
1676 build_call_raise (int msg, Node_Id gnat_node, char kind)
1678 tree fndecl = gnat_raise_decls[msg];
1679 tree label = get_exception_label (kind);
1680 tree filename;
1681 int line_number;
1682 const char *str;
1683 int len;
1685 /* If this is to be done as a goto, handle that case. */
1686 if (label)
1688 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1689 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1691 /* If Local_Raise is present, generate
1692 Local_Raise (exception'Identity); */
1693 if (Present (local_raise))
1695 tree gnu_local_raise
1696 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1697 tree gnu_exception_entity
1698 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1699 tree gnu_call
1700 = build_call_n_expr (gnu_local_raise, 1,
1701 build_unary_op (ADDR_EXPR, NULL_TREE,
1702 gnu_exception_entity));
1704 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1705 gnu_call, gnu_result);}
1707 return gnu_result;
1711 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1712 ? ""
1713 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1714 ? IDENTIFIER_POINTER
1715 (get_identifier (Get_Name_String
1716 (Debug_Source_Name
1717 (Get_Source_File_Index (Sloc (gnat_node))))))
1718 : ref_filename;
1720 len = strlen (str);
1721 filename = build_string (len, str);
1722 line_number
1723 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1724 ? Get_Logical_Line_Number (Sloc(gnat_node))
1725 : LOCATION_LINE (input_location);
1727 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1728 build_index_type (size_int (len)));
1730 return
1731 build_call_n_expr (fndecl, 2,
1732 build1 (ADDR_EXPR,
1733 build_pointer_type (unsigned_char_type_node),
1734 filename),
1735 build_int_cst (NULL_TREE, line_number));
1738 /* Similar to build_call_raise, for an index or range check exception as
1739 determined by MSG, with extra information generated of the form
1740 "INDEX out of range FIRST..LAST". */
1742 tree
1743 build_call_raise_range (int msg, Node_Id gnat_node,
1744 tree index, tree first, tree last)
1746 tree fndecl = gnat_raise_decls_ext[msg];
1747 tree filename;
1748 int line_number, column_number;
1749 const char *str;
1750 int len;
1753 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1754 ? ""
1755 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1756 ? IDENTIFIER_POINTER
1757 (get_identifier (Get_Name_String
1758 (Debug_Source_Name
1759 (Get_Source_File_Index (Sloc (gnat_node))))))
1760 : ref_filename;
1762 len = strlen (str);
1763 filename = build_string (len, str);
1764 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1766 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1767 column_number = Get_Column_Number (Sloc (gnat_node));
1769 else
1771 line_number = LOCATION_LINE (input_location);
1772 column_number = 0;
1775 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1776 build_index_type (size_int (len)));
1778 return
1779 build_call_n_expr (fndecl, 6,
1780 build1 (ADDR_EXPR,
1781 build_pointer_type (unsigned_char_type_node),
1782 filename),
1783 build_int_cst (NULL_TREE, line_number),
1784 build_int_cst (NULL_TREE, column_number),
1785 convert (integer_type_node, index),
1786 convert (integer_type_node, first),
1787 convert (integer_type_node, last));
1790 /* Similar to build_call_raise, with extra information about the column
1791 where the check failed. */
1793 tree
1794 build_call_raise_column (int msg, Node_Id gnat_node)
1796 tree fndecl = gnat_raise_decls_ext[msg];
1797 tree filename;
1798 int line_number, column_number;
1799 const char *str;
1800 int len;
1803 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1804 ? ""
1805 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1806 ? IDENTIFIER_POINTER
1807 (get_identifier (Get_Name_String
1808 (Debug_Source_Name
1809 (Get_Source_File_Index (Sloc (gnat_node))))))
1810 : ref_filename;
1812 len = strlen (str);
1813 filename = build_string (len, str);
1814 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1816 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1817 column_number = Get_Column_Number (Sloc (gnat_node));
1819 else
1821 line_number = LOCATION_LINE (input_location);
1822 column_number = 0;
1825 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1826 build_index_type (size_int (len)));
1828 return
1829 build_call_n_expr (fndecl, 3,
1830 build1 (ADDR_EXPR,
1831 build_pointer_type (unsigned_char_type_node),
1832 filename),
1833 build_int_cst (NULL_TREE, line_number),
1834 build_int_cst (NULL_TREE, column_number));
1837 /* qsort comparer for the bit positions of two constructor elements
1838 for record components. */
1840 static int
1841 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1843 const constructor_elt * const elmt1 = (const constructor_elt * const) rt1;
1844 const constructor_elt * const elmt2 = (const constructor_elt * const) rt2;
1845 const_tree const field1 = elmt1->index;
1846 const_tree const field2 = elmt2->index;
1847 const int ret
1848 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1850 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1853 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1855 tree
1856 gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
1858 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1859 bool read_only = true;
1860 bool side_effects = false;
1861 tree result, obj, val;
1862 unsigned int n_elmts;
1864 /* Scan the elements to see if they are all constant or if any has side
1865 effects, to let us set global flags on the resulting constructor. Count
1866 the elements along the way for possible sorting purposes below. */
1867 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1869 /* The predicate must be in keeping with output_constructor. */
1870 if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1871 || (TREE_CODE (type) == RECORD_TYPE
1872 && CONSTRUCTOR_BITFIELD_P (obj)
1873 && !initializer_constant_valid_for_bitfield_p (val))
1874 || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1875 allconstant = false;
1877 if (!TREE_READONLY (val))
1878 read_only = false;
1880 if (TREE_SIDE_EFFECTS (val))
1881 side_effects = true;
1884 /* For record types with constant components only, sort field list
1885 by increasing bit position. This is necessary to ensure the
1886 constructor can be output as static data. */
1887 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1888 v->qsort (compare_elmt_bitpos);
1890 result = build_constructor (type, v);
1891 CONSTRUCTOR_NO_CLEARING (result) = 1;
1892 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1893 TREE_SIDE_EFFECTS (result) = side_effects;
1894 TREE_READONLY (result) = TYPE_READONLY (type) || read_only || allconstant;
1895 return result;
1898 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1899 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1900 for the field. Don't fold the result if NO_FOLD_P is true.
1902 We also handle the fact that we might have been passed a pointer to the
1903 actual record and know how to look for fields in variant parts. */
1905 static tree
1906 build_simple_component_ref (tree record_variable, tree component, tree field,
1907 bool no_fold_p)
1909 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1910 tree base, ref;
1912 gcc_assert (RECORD_OR_UNION_TYPE_P (record_type)
1913 && COMPLETE_TYPE_P (record_type)
1914 && (component == NULL_TREE) != (field == NULL_TREE));
1916 /* If no field was specified, look for a field with the specified name in
1917 the current record only. */
1918 if (!field)
1919 for (field = TYPE_FIELDS (record_type);
1920 field;
1921 field = DECL_CHAIN (field))
1922 if (DECL_NAME (field) == component)
1923 break;
1925 if (!field)
1926 return NULL_TREE;
1928 /* If this field is not in the specified record, see if we can find a field
1929 in the specified record whose original field is the same as this one. */
1930 if (DECL_CONTEXT (field) != record_type)
1932 tree new_field;
1934 /* First loop through normal components. */
1935 for (new_field = TYPE_FIELDS (record_type);
1936 new_field;
1937 new_field = DECL_CHAIN (new_field))
1938 if (SAME_FIELD_P (field, new_field))
1939 break;
1941 /* Next, see if we're looking for an inherited component in an extension.
1942 If so, look through the extension directly, unless the type contains
1943 a placeholder, as it might be needed for a later substitution. */
1944 if (!new_field
1945 && TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1946 && TYPE_ALIGN_OK (record_type)
1947 && !type_contains_placeholder_p (record_type)
1948 && TREE_CODE (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1949 == RECORD_TYPE
1950 && TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (record_variable, 0))))
1952 ref = build_simple_component_ref (TREE_OPERAND (record_variable, 0),
1953 NULL_TREE, field, no_fold_p);
1954 if (ref)
1955 return ref;
1958 /* Next, loop through DECL_INTERNAL_P components if we haven't found the
1959 component in the first search. Doing this search in two steps is
1960 required to avoid hidden homonymous fields in the _Parent field. */
1961 if (!new_field)
1962 for (new_field = TYPE_FIELDS (record_type);
1963 new_field;
1964 new_field = DECL_CHAIN (new_field))
1965 if (DECL_INTERNAL_P (new_field))
1967 tree field_ref
1968 = build_simple_component_ref (record_variable,
1969 NULL_TREE, new_field, no_fold_p);
1970 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1971 no_fold_p);
1972 if (ref)
1973 return ref;
1976 field = new_field;
1979 if (!field)
1980 return NULL_TREE;
1982 /* If the field's offset has overflowed, do not try to access it, as doing
1983 so may trigger sanity checks deeper in the back-end. Note that we don't
1984 need to warn since this will be done on trying to declare the object. */
1985 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1986 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1987 return NULL_TREE;
1989 /* We have found a suitable field. Before building the COMPONENT_REF, get
1990 the base object of the record variable if possible. */
1991 base = record_variable;
1993 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR)
1995 tree inner_variable = TREE_OPERAND (record_variable, 0);
1996 tree inner_type = TYPE_MAIN_VARIANT (TREE_TYPE (inner_variable));
1998 /* Look through a conversion between type variants. This is transparent
1999 as far as the field is concerned. */
2000 if (inner_type == record_type)
2001 base = inner_variable;
2003 /* Look through a conversion between original and packable version, but
2004 the field needs to be adjusted in this case. */
2005 else if (RECORD_OR_UNION_TYPE_P (inner_type)
2006 && TYPE_NAME (inner_type) == TYPE_NAME (record_type))
2008 tree new_field;
2010 for (new_field = TYPE_FIELDS (inner_type);
2011 new_field;
2012 new_field = DECL_CHAIN (new_field))
2013 if (SAME_FIELD_P (field, new_field))
2014 break;
2015 if (new_field)
2017 field = new_field;
2018 base = inner_variable;
2023 ref = build3 (COMPONENT_REF, TREE_TYPE (field), base, field, NULL_TREE);
2025 if (TREE_READONLY (record_variable)
2026 || TREE_READONLY (field)
2027 || TYPE_READONLY (record_type))
2028 TREE_READONLY (ref) = 1;
2030 if (TREE_THIS_VOLATILE (record_variable)
2031 || TREE_THIS_VOLATILE (field)
2032 || TYPE_VOLATILE (record_type))
2033 TREE_THIS_VOLATILE (ref) = 1;
2035 if (no_fold_p)
2036 return ref;
2038 /* The generic folder may punt in this case because the inner array type
2039 can be self-referential, but folding is in fact not problematic. */
2040 if (TREE_CODE (base) == CONSTRUCTOR
2041 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (base)))
2043 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (base);
2044 unsigned HOST_WIDE_INT idx;
2045 tree index, value;
2046 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
2047 if (index == field)
2048 return value;
2049 return ref;
2052 return fold (ref);
2055 /* Likewise, but generate a Constraint_Error if the reference could not be
2056 found. */
2058 tree
2059 build_component_ref (tree record_variable, tree component, tree field,
2060 bool no_fold_p)
2062 tree ref = build_simple_component_ref (record_variable, component, field,
2063 no_fold_p);
2064 if (ref)
2065 return ref;
2067 /* If FIELD was specified, assume this is an invalid user field so raise
2068 Constraint_Error. Otherwise, we have no type to return so abort. */
2069 gcc_assert (field);
2070 return build1 (NULL_EXPR, TREE_TYPE (field),
2071 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2072 N_Raise_Constraint_Error));
2075 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2076 identically. Process the case where a GNAT_PROC to call is provided. */
2078 static inline tree
2079 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2080 Entity_Id gnat_proc, Entity_Id gnat_pool)
2082 tree gnu_proc = gnat_to_gnu (gnat_proc);
2083 tree gnu_call;
2085 /* A storage pool's underlying type is a record type (for both predefined
2086 storage pools and GNAT simple storage pools). The secondary stack uses
2087 the same mechanism, but its pool object (SS_Pool) is an integer. */
2088 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2090 /* The size is the third parameter; the alignment is the
2091 same type. */
2092 Entity_Id gnat_size_type
2093 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2094 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2096 tree gnu_pool = gnat_to_gnu (gnat_pool);
2097 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2098 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2100 gnu_size = convert (gnu_size_type, gnu_size);
2101 gnu_align = convert (gnu_size_type, gnu_align);
2103 /* The first arg is always the address of the storage pool; next
2104 comes the address of the object, for a deallocator, then the
2105 size and alignment. */
2106 if (gnu_obj)
2107 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2108 gnu_size, gnu_align);
2109 else
2110 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2111 gnu_size, gnu_align);
2114 /* Secondary stack case. */
2115 else
2117 /* The size is the second parameter. */
2118 Entity_Id gnat_size_type
2119 = Etype (Next_Formal (First_Formal (gnat_proc)));
2120 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2122 gnu_size = convert (gnu_size_type, gnu_size);
2124 /* The first arg is the address of the object, for a deallocator,
2125 then the size. */
2126 if (gnu_obj)
2127 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2128 else
2129 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2132 return gnu_call;
2135 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2136 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2137 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2138 latter offers. */
2140 static inline tree
2141 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2143 /* When the DATA_TYPE alignment is stricter than what malloc offers
2144 (super-aligned case), we allocate an "aligning" wrapper type and return
2145 the address of its single data field with the malloc's return value
2146 stored just in front. */
2148 unsigned int data_align = TYPE_ALIGN (data_type);
2149 unsigned int system_allocator_alignment
2150 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2152 tree aligning_type
2153 = ((data_align > system_allocator_alignment)
2154 ? make_aligning_type (data_type, data_align, data_size,
2155 system_allocator_alignment,
2156 POINTER_SIZE / BITS_PER_UNIT,
2157 gnat_node)
2158 : NULL_TREE);
2160 tree size_to_malloc
2161 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2163 tree malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2165 if (aligning_type)
2167 /* Latch malloc's return value and get a pointer to the aligning field
2168 first. */
2169 tree storage_ptr = gnat_protect_expr (malloc_ptr);
2171 tree aligning_record_addr
2172 = convert (build_pointer_type (aligning_type), storage_ptr);
2174 tree aligning_record
2175 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2177 tree aligning_field
2178 = build_component_ref (aligning_record, NULL_TREE,
2179 TYPE_FIELDS (aligning_type), false);
2181 tree aligning_field_addr
2182 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2184 /* Then arrange to store the allocator's return value ahead
2185 and return. */
2186 tree storage_ptr_slot_addr
2187 = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
2188 convert (ptr_void_type_node, aligning_field_addr),
2189 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2190 / BITS_PER_UNIT));
2192 tree storage_ptr_slot
2193 = build_unary_op (INDIRECT_REF, NULL_TREE,
2194 convert (build_pointer_type (ptr_void_type_node),
2195 storage_ptr_slot_addr));
2197 return
2198 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2199 build_binary_op (INIT_EXPR, NULL_TREE,
2200 storage_ptr_slot, storage_ptr),
2201 aligning_field_addr);
2203 else
2204 return malloc_ptr;
2207 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2208 designated by DATA_PTR using the __gnat_free entry point. */
2210 static inline tree
2211 maybe_wrap_free (tree data_ptr, tree data_type)
2213 /* In the regular alignment case, we pass the data pointer straight to free.
2214 In the superaligned case, we need to retrieve the initial allocator
2215 return value, stored in front of the data block at allocation time. */
2217 unsigned int data_align = TYPE_ALIGN (data_type);
2218 unsigned int system_allocator_alignment
2219 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2221 tree free_ptr;
2223 if (data_align > system_allocator_alignment)
2225 /* DATA_FRONT_PTR (void *)
2226 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2227 tree data_front_ptr
2228 = build_binary_op
2229 (POINTER_PLUS_EXPR, ptr_void_type_node,
2230 convert (ptr_void_type_node, data_ptr),
2231 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2233 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2234 free_ptr
2235 = build_unary_op
2236 (INDIRECT_REF, NULL_TREE,
2237 convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
2239 else
2240 free_ptr = data_ptr;
2242 return build_call_n_expr (free_decl, 1, free_ptr);
2245 /* Build a GCC tree to call an allocation or deallocation function.
2246 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2247 generate an allocator.
2249 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2250 object type, used to determine the to-be-honored address alignment.
2251 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2252 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2253 to provide an error location for restriction violation messages. */
2255 tree
2256 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2257 Entity_Id gnat_proc, Entity_Id gnat_pool,
2258 Node_Id gnat_node)
2260 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
2262 /* Explicit proc to call ? This one is assumed to deal with the type
2263 alignment constraints. */
2264 if (Present (gnat_proc))
2265 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2266 gnat_proc, gnat_pool);
2268 /* Otherwise, object to "free" or "malloc" with possible special processing
2269 for alignments stricter than what the default allocator honors. */
2270 else if (gnu_obj)
2271 return maybe_wrap_free (gnu_obj, gnu_type);
2272 else
2274 /* Assert that we no longer can be called with this special pool. */
2275 gcc_assert (gnat_pool != -1);
2277 /* Check that we aren't violating the associated restriction. */
2278 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2279 Check_No_Implicit_Heap_Alloc (gnat_node);
2281 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2285 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2286 initial value is INIT, if INIT is nonzero. Convert the expression to
2287 RESULT_TYPE, which must be some pointer type, and return the result.
2289 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2290 the storage pool to use. GNAT_NODE is used to provide an error
2291 location for restriction violation messages. If IGNORE_INIT_TYPE is
2292 true, ignore the type of INIT for the purpose of determining the size;
2293 this will cause the maximum size to be allocated if TYPE is of
2294 self-referential size. */
2296 tree
2297 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2298 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2300 tree size, storage, storage_deref, storage_init;
2302 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2303 if (init && TREE_CODE (init) == NULL_EXPR)
2304 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2306 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2307 else if (init && TREE_CODE (init) == COND_EXPR)
2308 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2309 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2310 gnat_proc, gnat_pool, gnat_node,
2311 ignore_init_type),
2312 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2313 gnat_proc, gnat_pool, gnat_node,
2314 ignore_init_type));
2316 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2317 sizes of the object and its template. Allocate the whole thing and
2318 fill in the parts that are known. */
2319 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2321 tree storage_type
2322 = build_unc_object_type_from_ptr (result_type, type,
2323 get_identifier ("ALLOC"), false);
2324 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2325 tree storage_ptr_type = build_pointer_type (storage_type);
2327 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2328 init);
2330 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2331 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2332 size = size_int (-1);
2334 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2335 gnat_proc, gnat_pool, gnat_node);
2336 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2337 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2338 TREE_THIS_NOTRAP (storage_deref) = 1;
2340 /* If there is an initializing expression, then make a constructor for
2341 the entire object including the bounds and copy it into the object.
2342 If there is no initializing expression, just set the bounds. */
2343 if (init)
2345 vec<constructor_elt, va_gc> *v;
2346 vec_alloc (v, 2);
2348 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2349 build_template (template_type, type, init));
2350 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2351 init);
2352 storage_init
2353 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2354 gnat_build_constructor (storage_type, v));
2356 else
2357 storage_init
2358 = build_binary_op (INIT_EXPR, NULL_TREE,
2359 build_component_ref (storage_deref, NULL_TREE,
2360 TYPE_FIELDS (storage_type),
2361 false),
2362 build_template (template_type, type, NULL_TREE));
2364 return build2 (COMPOUND_EXPR, result_type,
2365 storage_init, convert (result_type, storage));
2368 size = TYPE_SIZE_UNIT (type);
2370 /* If we have an initializing expression, see if its size is simpler
2371 than the size from the type. */
2372 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2373 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2374 || CONTAINS_PLACEHOLDER_P (size)))
2375 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2377 /* If the size is still self-referential, reference the initializing
2378 expression, if it is present. If not, this must have been a
2379 call to allocate a library-level object, in which case we use
2380 the maximum size. */
2381 if (CONTAINS_PLACEHOLDER_P (size))
2383 if (!ignore_init_type && init)
2384 size = substitute_placeholder_in_expr (size, init);
2385 else
2386 size = max_size (size, true);
2389 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2390 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2391 size = size_int (-1);
2393 storage = convert (result_type,
2394 build_call_alloc_dealloc (NULL_TREE, size, type,
2395 gnat_proc, gnat_pool,
2396 gnat_node));
2398 /* If we have an initial value, protect the new address, assign the value
2399 and return the address with a COMPOUND_EXPR. */
2400 if (init)
2402 storage = gnat_protect_expr (storage);
2403 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2404 TREE_THIS_NOTRAP (storage_deref) = 1;
2405 storage_init
2406 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2407 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2410 return storage;
2413 /* Indicate that we need to take the address of T and that it therefore
2414 should not be allocated in a register. Returns true if successful. */
2416 bool
2417 gnat_mark_addressable (tree t)
2419 while (true)
2420 switch (TREE_CODE (t))
2422 case ADDR_EXPR:
2423 case COMPONENT_REF:
2424 case ARRAY_REF:
2425 case ARRAY_RANGE_REF:
2426 case REALPART_EXPR:
2427 case IMAGPART_EXPR:
2428 case VIEW_CONVERT_EXPR:
2429 case NON_LVALUE_EXPR:
2430 CASE_CONVERT:
2431 t = TREE_OPERAND (t, 0);
2432 break;
2434 case COMPOUND_EXPR:
2435 t = TREE_OPERAND (t, 1);
2436 break;
2438 case CONSTRUCTOR:
2439 TREE_ADDRESSABLE (t) = 1;
2440 return true;
2442 case VAR_DECL:
2443 case PARM_DECL:
2444 case RESULT_DECL:
2445 TREE_ADDRESSABLE (t) = 1;
2446 return true;
2448 case FUNCTION_DECL:
2449 TREE_ADDRESSABLE (t) = 1;
2450 return true;
2452 case CONST_DECL:
2453 return DECL_CONST_CORRESPONDING_VAR (t)
2454 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2456 default:
2457 return true;
2461 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2462 but we know how to handle our own nodes. */
2464 tree
2465 gnat_save_expr (tree exp)
2467 tree type = TREE_TYPE (exp);
2468 enum tree_code code = TREE_CODE (exp);
2470 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2471 return exp;
2473 if (code == UNCONSTRAINED_ARRAY_REF)
2475 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2476 TREE_READONLY (t) = TYPE_READONLY (type);
2477 return t;
2480 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2481 This may be more efficient, but will also allow us to more easily find
2482 the match for the PLACEHOLDER_EXPR. */
2483 if (code == COMPONENT_REF
2484 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2485 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2486 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2488 return save_expr (exp);
2491 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2492 is optimized under the assumption that EXP's value doesn't change before
2493 its subsequent reuse(s) except through its potential reevaluation. */
2495 tree
2496 gnat_protect_expr (tree exp)
2498 tree type = TREE_TYPE (exp);
2499 enum tree_code code = TREE_CODE (exp);
2501 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2502 return exp;
2504 /* If EXP has no side effects, we theoretically don't need to do anything.
2505 However, we may be recursively passed more and more complex expressions
2506 involving checks which will be reused multiple times and eventually be
2507 unshared for gimplification; in order to avoid a complexity explosion
2508 at that point, we protect any expressions more complex than a simple
2509 arithmetic expression. */
2510 if (!TREE_SIDE_EFFECTS (exp))
2512 tree inner = skip_simple_arithmetic (exp);
2513 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2514 return exp;
2517 /* If this is a conversion, protect what's inside the conversion. */
2518 if (code == NON_LVALUE_EXPR
2519 || CONVERT_EXPR_CODE_P (code)
2520 || code == VIEW_CONVERT_EXPR)
2521 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2523 /* If we're indirectly referencing something, we only need to protect the
2524 address since the data itself can't change in these situations. */
2525 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2527 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2528 TREE_READONLY (t) = TYPE_READONLY (type);
2529 return t;
2532 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2533 This may be more efficient, but will also allow us to more easily find
2534 the match for the PLACEHOLDER_EXPR. */
2535 if (code == COMPONENT_REF
2536 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2537 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2538 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2540 /* If this is a fat pointer or something that can be placed in a register,
2541 just make a SAVE_EXPR. Likewise for a CALL_EXPR as large objects are
2542 returned via invisible reference in most ABIs so the temporary will
2543 directly be filled by the callee. */
2544 if (TYPE_IS_FAT_POINTER_P (type)
2545 || TYPE_MODE (type) != BLKmode
2546 || code == CALL_EXPR)
2547 return save_expr (exp);
2549 /* Otherwise reference, protect the address and dereference. */
2550 return
2551 build_unary_op (INDIRECT_REF, type,
2552 save_expr (build_unary_op (ADDR_EXPR,
2553 build_reference_type (type),
2554 exp)));
2557 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2558 argument to force evaluation of everything. */
2560 static tree
2561 gnat_stabilize_reference_1 (tree e, bool force)
2563 enum tree_code code = TREE_CODE (e);
2564 tree type = TREE_TYPE (e);
2565 tree result;
2567 /* We cannot ignore const expressions because it might be a reference
2568 to a const array but whose index contains side-effects. But we can
2569 ignore things that are actual constant or that already have been
2570 handled by this function. */
2571 if (TREE_CONSTANT (e) || code == SAVE_EXPR)
2572 return e;
2574 switch (TREE_CODE_CLASS (code))
2576 case tcc_exceptional:
2577 case tcc_declaration:
2578 case tcc_comparison:
2579 case tcc_expression:
2580 case tcc_reference:
2581 case tcc_vl_exp:
2582 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2583 fat pointer. This may be more efficient, but will also allow
2584 us to more easily find the match for the PLACEHOLDER_EXPR. */
2585 if (code == COMPONENT_REF
2586 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2587 result
2588 = build3 (code, type,
2589 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2590 TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2591 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2592 so that it will only be evaluated once. */
2593 /* The tcc_reference and tcc_comparison classes could be handled as
2594 below, but it is generally faster to only evaluate them once. */
2595 else if (TREE_SIDE_EFFECTS (e) || force)
2596 return save_expr (e);
2597 else
2598 return e;
2599 break;
2601 case tcc_binary:
2602 /* Recursively stabilize each operand. */
2603 result
2604 = build2 (code, type,
2605 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2606 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), force));
2607 break;
2609 case tcc_unary:
2610 /* Recursively stabilize each operand. */
2611 result
2612 = build1 (code, type,
2613 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force));
2614 break;
2616 default:
2617 gcc_unreachable ();
2620 /* See similar handling in gnat_stabilize_reference. */
2621 TREE_READONLY (result) = TREE_READONLY (e);
2622 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2623 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2625 if (code == INDIRECT_REF
2626 || code == UNCONSTRAINED_ARRAY_REF
2627 || code == ARRAY_REF
2628 || code == ARRAY_RANGE_REF)
2629 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (e);
2631 return result;
2634 /* This is equivalent to stabilize_reference in tree.c but we know how to
2635 handle our own nodes and we take extra arguments. FORCE says whether to
2636 force evaluation of everything. We set SUCCESS to true unless we walk
2637 through something we don't know how to stabilize. */
2639 tree
2640 gnat_stabilize_reference (tree ref, bool force, bool *success)
2642 tree type = TREE_TYPE (ref);
2643 enum tree_code code = TREE_CODE (ref);
2644 tree result;
2646 /* Assume we'll success unless proven otherwise. */
2647 if (success)
2648 *success = true;
2650 switch (code)
2652 case CONST_DECL:
2653 case VAR_DECL:
2654 case PARM_DECL:
2655 case RESULT_DECL:
2656 /* No action is needed in this case. */
2657 return ref;
2659 case ADDR_EXPR:
2660 CASE_CONVERT:
2661 case FLOAT_EXPR:
2662 case FIX_TRUNC_EXPR:
2663 case VIEW_CONVERT_EXPR:
2664 result
2665 = build1 (code, type,
2666 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2667 success));
2668 break;
2670 case INDIRECT_REF:
2671 case UNCONSTRAINED_ARRAY_REF:
2672 result = build1 (code, type,
2673 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 0),
2674 force));
2675 break;
2677 case COMPONENT_REF:
2678 result = build3 (COMPONENT_REF, type,
2679 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2680 success),
2681 TREE_OPERAND (ref, 1), NULL_TREE);
2682 break;
2684 case BIT_FIELD_REF:
2685 result = build3 (BIT_FIELD_REF, type,
2686 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2687 success),
2688 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
2689 break;
2691 case ARRAY_REF:
2692 case ARRAY_RANGE_REF:
2693 result = build4 (code, type,
2694 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2695 success),
2696 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2697 force),
2698 NULL_TREE, NULL_TREE);
2699 break;
2701 case CALL_EXPR:
2702 result = gnat_stabilize_reference_1 (ref, force);
2703 break;
2705 case COMPOUND_EXPR:
2706 result = build2 (COMPOUND_EXPR, type,
2707 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2708 success),
2709 gnat_stabilize_reference (TREE_OPERAND (ref, 1), force,
2710 success));
2711 break;
2713 case CONSTRUCTOR:
2714 /* Constructors with 1 element are used extensively to formally
2715 convert objects to special wrapping types. */
2716 if (TREE_CODE (type) == RECORD_TYPE
2717 && vec_safe_length (CONSTRUCTOR_ELTS (ref)) == 1)
2719 tree index = (*CONSTRUCTOR_ELTS (ref))[0].index;
2720 tree value = (*CONSTRUCTOR_ELTS (ref))[0].value;
2721 result
2722 = build_constructor_single (type, index,
2723 gnat_stabilize_reference_1 (value,
2724 force));
2726 else
2728 if (success)
2729 *success = false;
2730 return ref;
2732 break;
2734 case ERROR_MARK:
2735 ref = error_mark_node;
2737 /* ... fall through to failure ... */
2739 /* If arg isn't a kind of lvalue we recognize, make no change.
2740 Caller should recognize the error for an invalid lvalue. */
2741 default:
2742 if (success)
2743 *success = false;
2744 return ref;
2747 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2748 may not be sustained across some paths, such as the way via build1 for
2749 INDIRECT_REF. We reset those flags here in the general case, which is
2750 consistent with the GCC version of this routine.
2752 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2753 paths introduce side-effects where there was none initially (e.g. if a
2754 SAVE_EXPR is built) and we also want to keep track of that. */
2755 TREE_READONLY (result) = TREE_READONLY (ref);
2756 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2757 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2759 if (code == INDIRECT_REF
2760 || code == UNCONSTRAINED_ARRAY_REF
2761 || code == ARRAY_REF
2762 || code == ARRAY_RANGE_REF)
2763 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2765 return result;
2768 /* If EXPR is an expression that is invariant in the current function, in the
2769 sense that it can be evaluated anywhere in the function and any number of
2770 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2772 tree
2773 gnat_invariant_expr (tree expr)
2775 tree type = TREE_TYPE (expr), t;
2777 expr = remove_conversions (expr, false);
2779 while ((TREE_CODE (expr) == CONST_DECL
2780 || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2781 && decl_function_context (expr) == current_function_decl
2782 && DECL_INITIAL (expr))
2784 expr = DECL_INITIAL (expr);
2785 /* Look into CONSTRUCTORs built to initialize padded types. */
2786 if (TYPE_IS_PADDING_P (TREE_TYPE (expr)))
2787 expr = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (expr))), expr);
2788 expr = remove_conversions (expr, false);
2791 if (TREE_CONSTANT (expr))
2792 return fold_convert (type, expr);
2794 t = expr;
2796 while (true)
2798 switch (TREE_CODE (t))
2800 case COMPONENT_REF:
2801 if (TREE_OPERAND (t, 2) != NULL_TREE)
2802 return NULL_TREE;
2803 break;
2805 case ARRAY_REF:
2806 case ARRAY_RANGE_REF:
2807 if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
2808 || TREE_OPERAND (t, 2) != NULL_TREE
2809 || TREE_OPERAND (t, 3) != NULL_TREE)
2810 return NULL_TREE;
2811 break;
2813 case BIT_FIELD_REF:
2814 case VIEW_CONVERT_EXPR:
2815 case REALPART_EXPR:
2816 case IMAGPART_EXPR:
2817 break;
2819 case INDIRECT_REF:
2820 if (!TREE_READONLY (t)
2821 || TREE_SIDE_EFFECTS (t)
2822 || !TREE_THIS_NOTRAP (t))
2823 return NULL_TREE;
2824 break;
2826 default:
2827 goto object;
2830 t = TREE_OPERAND (t, 0);
2833 object:
2834 if (TREE_SIDE_EFFECTS (t))
2835 return NULL_TREE;
2837 if (TREE_CODE (t) == CONST_DECL
2838 && (DECL_EXTERNAL (t)
2839 || decl_function_context (t) != current_function_decl))
2840 return fold_convert (type, expr);
2842 if (!TREE_READONLY (t))
2843 return NULL_TREE;
2845 if (TREE_CODE (t) == PARM_DECL)
2846 return fold_convert (type, expr);
2848 if (TREE_CODE (t) == VAR_DECL
2849 && (DECL_EXTERNAL (t)
2850 || decl_function_context (t) != current_function_decl))
2851 return fold_convert (type, expr);
2853 return NULL_TREE;