Reverting merge from trunk
[official-gcc.git] / gcc / ada / gcc-interface / utils2.c
blob9410b4083bf25ae23fb2402e523601d5eb3fe8a9
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2013, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "toplev.h"
33 #include "ggc.h"
34 #include "tree-inline.h"
36 #include "ada.h"
37 #include "types.h"
38 #include "atree.h"
39 #include "elists.h"
40 #include "namet.h"
41 #include "nlists.h"
42 #include "snames.h"
43 #include "stringt.h"
44 #include "uintp.h"
45 #include "fe.h"
46 #include "sinfo.h"
47 #include "einfo.h"
48 #include "ada-tree.h"
49 #include "gigi.h"
51 /* Return the base type of TYPE. */
53 tree
54 get_base_type (tree type)
56 if (TREE_CODE (type) == RECORD_TYPE
57 && TYPE_JUSTIFIED_MODULAR_P (type))
58 type = TREE_TYPE (TYPE_FIELDS (type));
60 while (TREE_TYPE (type)
61 && (TREE_CODE (type) == INTEGER_TYPE
62 || TREE_CODE (type) == REAL_TYPE))
63 type = TREE_TYPE (type);
65 return type;
68 /* EXP is a GCC tree representing an address. See if we can find how
69 strictly the object at that address is aligned. Return that alignment
70 in bits. If we don't know anything about the alignment, return 0. */
72 unsigned int
73 known_alignment (tree exp)
75 unsigned int this_alignment;
76 unsigned int lhs, rhs;
78 switch (TREE_CODE (exp))
80 CASE_CONVERT:
81 case VIEW_CONVERT_EXPR:
82 case NON_LVALUE_EXPR:
83 /* Conversions between pointers and integers don't change the alignment
84 of the underlying object. */
85 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
86 break;
88 case COMPOUND_EXPR:
89 /* The value of a COMPOUND_EXPR is that of it's second operand. */
90 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
91 break;
93 case PLUS_EXPR:
94 case MINUS_EXPR:
95 /* If two address are added, the alignment of the result is the
96 minimum of the two alignments. */
97 lhs = known_alignment (TREE_OPERAND (exp, 0));
98 rhs = known_alignment (TREE_OPERAND (exp, 1));
99 this_alignment = MIN (lhs, rhs);
100 break;
102 case POINTER_PLUS_EXPR:
103 lhs = known_alignment (TREE_OPERAND (exp, 0));
104 rhs = known_alignment (TREE_OPERAND (exp, 1));
105 /* If we don't know the alignment of the offset, we assume that
106 of the base. */
107 if (rhs == 0)
108 this_alignment = lhs;
109 else
110 this_alignment = MIN (lhs, rhs);
111 break;
113 case COND_EXPR:
114 /* If there is a choice between two values, use the smallest one. */
115 lhs = known_alignment (TREE_OPERAND (exp, 1));
116 rhs = known_alignment (TREE_OPERAND (exp, 2));
117 this_alignment = MIN (lhs, rhs);
118 break;
120 case INTEGER_CST:
122 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
123 /* The first part of this represents the lowest bit in the constant,
124 but it is originally in bytes, not bits. */
125 this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
127 break;
129 case MULT_EXPR:
130 /* If we know the alignment of just one side, use it. Otherwise,
131 use the product of the alignments. */
132 lhs = known_alignment (TREE_OPERAND (exp, 0));
133 rhs = known_alignment (TREE_OPERAND (exp, 1));
135 if (lhs == 0)
136 this_alignment = rhs;
137 else if (rhs == 0)
138 this_alignment = lhs;
139 else
140 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
141 break;
143 case BIT_AND_EXPR:
144 /* A bit-and expression is as aligned as the maximum alignment of the
145 operands. We typically get here for a complex lhs and a constant
146 negative power of two on the rhs to force an explicit alignment, so
147 don't bother looking at the lhs. */
148 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
149 break;
151 case ADDR_EXPR:
152 this_alignment = expr_align (TREE_OPERAND (exp, 0));
153 break;
155 case CALL_EXPR:
157 tree t = maybe_inline_call_in_expr (exp);
158 if (t)
159 return known_alignment (t);
162 /* Fall through... */
164 default:
165 /* For other pointer expressions, we assume that the pointed-to object
166 is at least as aligned as the pointed-to type. Beware that we can
167 have a dummy type here (e.g. a Taft Amendment type), for which the
168 alignment is meaningless and should be ignored. */
169 if (POINTER_TYPE_P (TREE_TYPE (exp))
170 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
171 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
172 else
173 this_alignment = 0;
174 break;
177 return this_alignment;
180 /* We have a comparison or assignment operation on two types, T1 and T2, which
181 are either both array types or both record types. T1 is assumed to be for
182 the left hand side operand, and T2 for the right hand side. Return the
183 type that both operands should be converted to for the operation, if any.
184 Otherwise return zero. */
186 static tree
187 find_common_type (tree t1, tree t2)
189 /* ??? As of today, various constructs lead to here with types of different
190 sizes even when both constants (e.g. tagged types, packable vs regular
191 component types, padded vs unpadded types, ...). While some of these
192 would better be handled upstream (types should be made consistent before
193 calling into build_binary_op), some others are really expected and we
194 have to be careful. */
196 /* We must avoid writing more than what the target can hold if this is for
197 an assignment and the case of tagged types is handled in build_binary_op
198 so we use the lhs type if it is known to be smaller or of constant size
199 and the rhs type is not, whatever the modes. We also force t1 in case of
200 constant size equality to minimize occurrences of view conversions on the
201 lhs of an assignment, except for the case of record types with a variant
202 part on the lhs but not on the rhs to make the conversion simpler. */
203 if (TREE_CONSTANT (TYPE_SIZE (t1))
204 && (!TREE_CONSTANT (TYPE_SIZE (t2))
205 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
206 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
207 && !(TREE_CODE (t1) == RECORD_TYPE
208 && TREE_CODE (t2) == RECORD_TYPE
209 && get_variant_part (t1) != NULL_TREE
210 && get_variant_part (t2) == NULL_TREE))))
211 return t1;
213 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
214 that we will not have any alignment problems since, if we did, the
215 non-BLKmode type could not have been used. */
216 if (TYPE_MODE (t1) != BLKmode)
217 return t1;
219 /* If the rhs type is of constant size, use it whatever the modes. At
220 this point it is known to be smaller, or of constant size and the
221 lhs type is not. */
222 if (TREE_CONSTANT (TYPE_SIZE (t2)))
223 return t2;
225 /* Otherwise, if the rhs type is non-BLKmode, use it. */
226 if (TYPE_MODE (t2) != BLKmode)
227 return t2;
229 /* In this case, both types have variable size and BLKmode. It's
230 probably best to leave the "type mismatch" because changing it
231 could cause a bad self-referential reference. */
232 return NULL_TREE;
235 /* Return an expression tree representing an equality comparison of A1 and A2,
236 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
238 Two arrays are equal in one of two ways: (1) if both have zero length in
239 some dimension (not necessarily the same dimension) or (2) if the lengths
240 in each dimension are equal and the data is equal. We perform the length
241 tests in as efficient a manner as possible. */
243 static tree
244 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
246 tree result = convert (result_type, boolean_true_node);
247 tree a1_is_null = convert (result_type, boolean_false_node);
248 tree a2_is_null = convert (result_type, boolean_false_node);
249 tree t1 = TREE_TYPE (a1);
250 tree t2 = TREE_TYPE (a2);
251 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
252 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
253 bool length_zero_p = false;
255 /* If either operand has side-effects, they have to be evaluated only once
256 in spite of the multiple references to the operand in the comparison. */
257 if (a1_side_effects_p)
258 a1 = gnat_protect_expr (a1);
260 if (a2_side_effects_p)
261 a2 = gnat_protect_expr (a2);
263 /* Process each dimension separately and compare the lengths. If any
264 dimension has a length known to be zero, set LENGTH_ZERO_P to true
265 in order to suppress the comparison of the data at the end. */
266 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
268 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
269 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
270 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
271 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
272 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
273 size_one_node);
274 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
275 size_one_node);
276 tree comparison, this_a1_is_null, this_a2_is_null;
278 /* If the length of the first array is a constant, swap our operands
279 unless the length of the second array is the constant zero. */
280 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
282 tree tem;
283 bool btem;
285 tem = a1, a1 = a2, a2 = tem;
286 tem = t1, t1 = t2, t2 = tem;
287 tem = lb1, lb1 = lb2, lb2 = tem;
288 tem = ub1, ub1 = ub2, ub2 = tem;
289 tem = length1, length1 = length2, length2 = tem;
290 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
291 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
292 a2_side_effects_p = btem;
295 /* If the length of the second array is the constant zero, we can just
296 use the original stored bounds for the first array and see whether
297 last < first holds. */
298 if (integer_zerop (length2))
300 length_zero_p = true;
302 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
303 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
305 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
306 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
307 if (EXPR_P (comparison))
308 SET_EXPR_LOCATION (comparison, loc);
310 this_a1_is_null = comparison;
311 this_a2_is_null = convert (result_type, boolean_true_node);
314 /* Otherwise, if the length is some other constant value, we know that
315 this dimension in the second array cannot be superflat, so we can
316 just use its length computed from the actual stored bounds. */
317 else if (TREE_CODE (length2) == INTEGER_CST)
319 tree bt;
321 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
322 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
323 /* Note that we know that UB2 and LB2 are constant and hence
324 cannot contain a PLACEHOLDER_EXPR. */
325 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
326 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
327 bt = get_base_type (TREE_TYPE (ub1));
329 comparison
330 = fold_build2_loc (loc, EQ_EXPR, result_type,
331 build_binary_op (MINUS_EXPR, bt, ub1, lb1),
332 build_binary_op (MINUS_EXPR, bt, ub2, lb2));
333 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
334 if (EXPR_P (comparison))
335 SET_EXPR_LOCATION (comparison, loc);
337 this_a1_is_null
338 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
340 this_a2_is_null = convert (result_type, boolean_false_node);
343 /* Otherwise, compare the computed lengths. */
344 else
346 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
347 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
349 comparison
350 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
352 /* If the length expression is of the form (cond ? val : 0), assume
353 that cond is equivalent to (length != 0). That's guaranteed by
354 construction of the array types in gnat_to_gnu_entity. */
355 if (TREE_CODE (length1) == COND_EXPR
356 && integer_zerop (TREE_OPERAND (length1, 2)))
357 this_a1_is_null
358 = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
359 else
360 this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
361 length1, size_zero_node);
363 /* Likewise for the second array. */
364 if (TREE_CODE (length2) == COND_EXPR
365 && integer_zerop (TREE_OPERAND (length2, 2)))
366 this_a2_is_null
367 = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
368 else
369 this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
370 length2, size_zero_node);
373 /* Append expressions for this dimension to the final expressions. */
374 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
375 result, comparison);
377 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
378 this_a1_is_null, a1_is_null);
380 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
381 this_a2_is_null, a2_is_null);
383 t1 = TREE_TYPE (t1);
384 t2 = TREE_TYPE (t2);
387 /* Unless the length of some dimension is known to be zero, compare the
388 data in the array. */
389 if (!length_zero_p)
391 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
392 tree comparison;
394 if (type)
396 a1 = convert (type, a1),
397 a2 = convert (type, a2);
400 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
402 result
403 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
406 /* The result is also true if both sizes are zero. */
407 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
408 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
409 a1_is_null, a2_is_null),
410 result);
412 /* If either operand has side-effects, they have to be evaluated before
413 starting the comparison above since the place they would be otherwise
414 evaluated could be wrong. */
415 if (a1_side_effects_p)
416 result = build2 (COMPOUND_EXPR, result_type, a1, result);
418 if (a2_side_effects_p)
419 result = build2 (COMPOUND_EXPR, result_type, a2, result);
421 return result;
424 /* Return an expression tree representing an equality comparison of P1 and P2,
425 two objects of fat pointer type. The result should be of type RESULT_TYPE.
427 Two fat pointers are equal in one of two ways: (1) if both have a null
428 pointer to the array or (2) if they contain the same couple of pointers.
429 We perform the comparison in as efficient a manner as possible. */
431 static tree
432 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
434 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
435 tree p1_array_is_null, p2_array_is_null;
437 /* If either operand has side-effects, they have to be evaluated only once
438 in spite of the multiple references to the operand in the comparison. */
439 p1 = gnat_protect_expr (p1);
440 p2 = gnat_protect_expr (p2);
442 /* The constant folder doesn't fold fat pointer types so we do it here. */
443 if (TREE_CODE (p1) == CONSTRUCTOR)
444 p1_array = (*CONSTRUCTOR_ELTS (p1))[0].value;
445 else
446 p1_array = build_component_ref (p1, NULL_TREE,
447 TYPE_FIELDS (TREE_TYPE (p1)), true);
449 p1_array_is_null
450 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
451 fold_convert_loc (loc, TREE_TYPE (p1_array),
452 null_pointer_node));
454 if (TREE_CODE (p2) == CONSTRUCTOR)
455 p2_array = (*CONSTRUCTOR_ELTS (p2))[0].value;
456 else
457 p2_array = build_component_ref (p2, NULL_TREE,
458 TYPE_FIELDS (TREE_TYPE (p2)), true);
460 p2_array_is_null
461 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
462 fold_convert_loc (loc, TREE_TYPE (p2_array),
463 null_pointer_node));
465 /* If one of the pointers to the array is null, just compare the other. */
466 if (integer_zerop (p1_array))
467 return p2_array_is_null;
468 else if (integer_zerop (p2_array))
469 return p1_array_is_null;
471 /* Otherwise, do the fully-fledged comparison. */
472 same_array
473 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
475 if (TREE_CODE (p1) == CONSTRUCTOR)
476 p1_bounds = (*CONSTRUCTOR_ELTS (p1))[1].value;
477 else
478 p1_bounds
479 = build_component_ref (p1, NULL_TREE,
480 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))), true);
482 if (TREE_CODE (p2) == CONSTRUCTOR)
483 p2_bounds = (*CONSTRUCTOR_ELTS (p2))[1].value;
484 else
485 p2_bounds
486 = build_component_ref (p2, NULL_TREE,
487 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))), true);
489 same_bounds
490 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
492 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
493 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
494 build_binary_op (TRUTH_ORIF_EXPR, result_type,
495 p1_array_is_null, same_bounds));
498 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
499 type TYPE. We know that TYPE is a modular type with a nonbinary
500 modulus. */
502 static tree
503 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
504 tree rhs)
506 tree modulus = TYPE_MODULUS (type);
507 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
508 unsigned int precision;
509 bool unsignedp = true;
510 tree op_type = type;
511 tree result;
513 /* If this is an addition of a constant, convert it to a subtraction
514 of a constant since we can do that faster. */
515 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
517 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
518 op_code = MINUS_EXPR;
521 /* For the logical operations, we only need PRECISION bits. For
522 addition and subtraction, we need one more and for multiplication we
523 need twice as many. But we never want to make a size smaller than
524 our size. */
525 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
526 needed_precision += 1;
527 else if (op_code == MULT_EXPR)
528 needed_precision *= 2;
530 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
532 /* Unsigned will do for everything but subtraction. */
533 if (op_code == MINUS_EXPR)
534 unsignedp = false;
536 /* If our type is the wrong signedness or isn't wide enough, make a new
537 type and convert both our operands to it. */
538 if (TYPE_PRECISION (op_type) < precision
539 || TYPE_UNSIGNED (op_type) != unsignedp)
541 /* Copy the node so we ensure it can be modified to make it modular. */
542 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
543 modulus = convert (op_type, modulus);
544 SET_TYPE_MODULUS (op_type, modulus);
545 TYPE_MODULAR_P (op_type) = 1;
546 lhs = convert (op_type, lhs);
547 rhs = convert (op_type, rhs);
550 /* Do the operation, then we'll fix it up. */
551 result = fold_build2 (op_code, op_type, lhs, rhs);
553 /* For multiplication, we have no choice but to do a full modulus
554 operation. However, we want to do this in the narrowest
555 possible size. */
556 if (op_code == MULT_EXPR)
558 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
559 modulus = convert (div_type, modulus);
560 SET_TYPE_MODULUS (div_type, modulus);
561 TYPE_MODULAR_P (div_type) = 1;
562 result = convert (op_type,
563 fold_build2 (TRUNC_MOD_EXPR, div_type,
564 convert (div_type, result), modulus));
567 /* For subtraction, add the modulus back if we are negative. */
568 else if (op_code == MINUS_EXPR)
570 result = gnat_protect_expr (result);
571 result = fold_build3 (COND_EXPR, op_type,
572 fold_build2 (LT_EXPR, boolean_type_node, result,
573 convert (op_type, integer_zero_node)),
574 fold_build2 (PLUS_EXPR, op_type, result, modulus),
575 result);
578 /* For the other operations, subtract the modulus if we are >= it. */
579 else
581 result = gnat_protect_expr (result);
582 result = fold_build3 (COND_EXPR, op_type,
583 fold_build2 (GE_EXPR, boolean_type_node,
584 result, modulus),
585 fold_build2 (MINUS_EXPR, op_type,
586 result, modulus),
587 result);
590 return convert (type, result);
593 /* This page contains routines that implement the Ada semantics with regard
594 to atomic objects. They are fully piggybacked on the middle-end support
595 for atomic loads and stores.
597 *** Memory barriers and volatile objects ***
599 We implement the weakened form of the C.6(16) clause that was introduced
600 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
601 implementable without significant performance hits on modern platforms.
603 We also take advantage of the requirements imposed on shared variables by
604 9.10 (conditions for sequential actions) to have non-erroneous execution
605 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
606 volatile updates with regard to sequential actions, i.e. with regard to
607 reads or updates of atomic objects.
609 As such, an update of an atomic object by a task requires that all earlier
610 accesses to volatile objects have completed. Similarly, later accesses to
611 volatile objects cannot be reordered before the update of the atomic object.
612 So, memory barriers both before and after the atomic update are needed.
614 For a read of an atomic object, to avoid seeing writes of volatile objects
615 by a task earlier than by the other tasks, a memory barrier is needed before
616 the atomic read. Finally, to avoid reordering later reads or updates of
617 volatile objects to before the atomic read, a barrier is needed after the
618 atomic read.
620 So, memory barriers are needed before and after atomic reads and updates.
621 And, in order to simplify the implementation, we use full memory barriers
622 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
624 /* Return the size of TYPE, which must be a positive power of 2. */
626 static unsigned int
627 resolve_atomic_size (tree type)
629 unsigned HOST_WIDE_INT size = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
631 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
632 return size;
634 /* We shouldn't reach here without having already detected that the size
635 isn't compatible with an atomic access. */
636 gcc_assert (Serious_Errors_Detected);
638 return 0;
641 /* Build an atomic load for the underlying atomic object in SRC. */
643 tree
644 build_atomic_load (tree src)
646 tree ptr_type
647 = build_pointer_type
648 (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
649 tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
650 tree orig_src = src;
651 tree t, addr, val;
652 unsigned int size;
653 int fncode;
655 /* Remove conversions to get the address of the underlying object. */
656 src = remove_conversions (src, false);
657 size = resolve_atomic_size (TREE_TYPE (src));
658 if (size == 0)
659 return orig_src;
661 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
662 t = builtin_decl_implicit ((enum built_in_function) fncode);
664 addr = build_unary_op (ADDR_EXPR, ptr_type, src);
665 val = build_call_expr (t, 2, addr, mem_model);
667 /* First reinterpret the loaded bits in the original type of the load,
668 then convert to the expected result type. */
669 t = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (src), val);
670 return convert (TREE_TYPE (orig_src), t);
673 /* Build an atomic store from SRC to the underlying atomic object in DEST. */
675 tree
676 build_atomic_store (tree dest, tree src)
678 tree ptr_type
679 = build_pointer_type
680 (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
681 tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
682 tree orig_dest = dest;
683 tree t, int_type, addr;
684 unsigned int size;
685 int fncode;
687 /* Remove conversions to get the address of the underlying object. */
688 dest = remove_conversions (dest, false);
689 size = resolve_atomic_size (TREE_TYPE (dest));
690 if (size == 0)
691 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
693 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
694 t = builtin_decl_implicit ((enum built_in_function) fncode);
695 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
697 /* First convert the bits to be stored to the original type of the store,
698 then reinterpret them in the effective type. But if the original type
699 is a padded type with the same size, convert to the inner type instead,
700 as we don't want to artificially introduce a CONSTRUCTOR here. */
701 if (TYPE_IS_PADDING_P (TREE_TYPE (dest))
702 && TYPE_SIZE (TREE_TYPE (dest))
703 == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest)))))
704 src = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest))), src);
705 else
706 src = convert (TREE_TYPE (dest), src);
707 src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
708 addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
710 return build_call_expr (t, 3, addr, src, mem_model);
713 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
714 desired for the result. Usually the operation is to be performed
715 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
716 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
717 case the type to be used will be derived from the operands.
719 This function is very much unlike the ones for C and C++ since we
720 have already done any type conversion and matching required. All we
721 have to do here is validate the work done by SEM and handle subtypes. */
723 tree
724 build_binary_op (enum tree_code op_code, tree result_type,
725 tree left_operand, tree right_operand)
727 tree left_type = TREE_TYPE (left_operand);
728 tree right_type = TREE_TYPE (right_operand);
729 tree left_base_type = get_base_type (left_type);
730 tree right_base_type = get_base_type (right_type);
731 tree operation_type = result_type;
732 tree best_type = NULL_TREE;
733 tree modulus, result;
734 bool has_side_effects = false;
736 if (operation_type
737 && TREE_CODE (operation_type) == RECORD_TYPE
738 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
739 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
741 if (operation_type
742 && TREE_CODE (operation_type) == INTEGER_TYPE
743 && TYPE_EXTRA_SUBTYPE_P (operation_type))
744 operation_type = get_base_type (operation_type);
746 modulus = (operation_type
747 && TREE_CODE (operation_type) == INTEGER_TYPE
748 && TYPE_MODULAR_P (operation_type)
749 ? TYPE_MODULUS (operation_type) : NULL_TREE);
751 switch (op_code)
753 case INIT_EXPR:
754 case MODIFY_EXPR:
755 #ifdef ENABLE_CHECKING
756 gcc_assert (result_type == NULL_TREE);
757 #endif
758 /* If there were integral or pointer conversions on the LHS, remove
759 them; we'll be putting them back below if needed. Likewise for
760 conversions between array and record types, except for justified
761 modular types. But don't do this if the right operand is not
762 BLKmode (for packed arrays) unless we are not changing the mode. */
763 while ((CONVERT_EXPR_P (left_operand)
764 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
765 && (((INTEGRAL_TYPE_P (left_type)
766 || POINTER_TYPE_P (left_type))
767 && (INTEGRAL_TYPE_P (TREE_TYPE
768 (TREE_OPERAND (left_operand, 0)))
769 || POINTER_TYPE_P (TREE_TYPE
770 (TREE_OPERAND (left_operand, 0)))))
771 || (((TREE_CODE (left_type) == RECORD_TYPE
772 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
773 || TREE_CODE (left_type) == ARRAY_TYPE)
774 && ((TREE_CODE (TREE_TYPE
775 (TREE_OPERAND (left_operand, 0)))
776 == RECORD_TYPE)
777 || (TREE_CODE (TREE_TYPE
778 (TREE_OPERAND (left_operand, 0)))
779 == ARRAY_TYPE))
780 && (TYPE_MODE (right_type) == BLKmode
781 || (TYPE_MODE (left_type)
782 == TYPE_MODE (TREE_TYPE
783 (TREE_OPERAND
784 (left_operand, 0))))))))
786 left_operand = TREE_OPERAND (left_operand, 0);
787 left_type = TREE_TYPE (left_operand);
790 /* If a class-wide type may be involved, force use of the RHS type. */
791 if ((TREE_CODE (right_type) == RECORD_TYPE
792 || TREE_CODE (right_type) == UNION_TYPE)
793 && TYPE_ALIGN_OK (right_type))
794 operation_type = right_type;
796 /* If we are copying between padded objects with compatible types, use
797 the padded view of the objects, this is very likely more efficient.
798 Likewise for a padded object that is assigned a constructor, if we
799 can convert the constructor to the inner type, to avoid putting a
800 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
801 actually copied anything. */
802 else if (TYPE_IS_PADDING_P (left_type)
803 && TREE_CONSTANT (TYPE_SIZE (left_type))
804 && ((TREE_CODE (right_operand) == COMPONENT_REF
805 && TYPE_MAIN_VARIANT (left_type)
806 == TYPE_MAIN_VARIANT
807 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
808 || (TREE_CODE (right_operand) == CONSTRUCTOR
809 && !CONTAINS_PLACEHOLDER_P
810 (DECL_SIZE (TYPE_FIELDS (left_type)))))
811 && !integer_zerop (TYPE_SIZE (right_type)))
813 /* We make an exception for a BLKmode type padding a non-BLKmode
814 inner type and do the conversion of the LHS right away, since
815 unchecked_convert wouldn't do it properly. */
816 if (TYPE_MODE (left_type) == BLKmode
817 && TYPE_MODE (right_type) != BLKmode
818 && TREE_CODE (right_operand) != CONSTRUCTOR)
820 operation_type = right_type;
821 left_operand = convert (operation_type, left_operand);
822 left_type = operation_type;
824 else
825 operation_type = left_type;
828 /* If we have a call to a function that returns an unconstrained type
829 with default discriminant on the RHS, use the RHS type (which is
830 padded) as we cannot compute the size of the actual assignment. */
831 else if (TREE_CODE (right_operand) == CALL_EXPR
832 && TYPE_IS_PADDING_P (right_type)
833 && CONTAINS_PLACEHOLDER_P
834 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (right_type)))))
835 operation_type = right_type;
837 /* Find the best type to use for copying between aggregate types. */
838 else if (((TREE_CODE (left_type) == ARRAY_TYPE
839 && TREE_CODE (right_type) == ARRAY_TYPE)
840 || (TREE_CODE (left_type) == RECORD_TYPE
841 && TREE_CODE (right_type) == RECORD_TYPE))
842 && (best_type = find_common_type (left_type, right_type)))
843 operation_type = best_type;
845 /* Otherwise use the LHS type. */
846 else
847 operation_type = left_type;
849 /* Ensure everything on the LHS is valid. If we have a field reference,
850 strip anything that get_inner_reference can handle. Then remove any
851 conversions between types having the same code and mode. And mark
852 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
853 either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
854 result = left_operand;
855 while (true)
857 tree restype = TREE_TYPE (result);
859 if (TREE_CODE (result) == COMPONENT_REF
860 || TREE_CODE (result) == ARRAY_REF
861 || TREE_CODE (result) == ARRAY_RANGE_REF)
862 while (handled_component_p (result))
863 result = TREE_OPERAND (result, 0);
864 else if (TREE_CODE (result) == REALPART_EXPR
865 || TREE_CODE (result) == IMAGPART_EXPR
866 || (CONVERT_EXPR_P (result)
867 && (((TREE_CODE (restype)
868 == TREE_CODE (TREE_TYPE
869 (TREE_OPERAND (result, 0))))
870 && (TYPE_MODE (TREE_TYPE
871 (TREE_OPERAND (result, 0)))
872 == TYPE_MODE (restype)))
873 || TYPE_ALIGN_OK (restype))))
874 result = TREE_OPERAND (result, 0);
875 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
877 TREE_ADDRESSABLE (result) = 1;
878 result = TREE_OPERAND (result, 0);
880 else
881 break;
884 gcc_assert (TREE_CODE (result) == INDIRECT_REF
885 || TREE_CODE (result) == NULL_EXPR
886 || DECL_P (result));
888 /* Convert the right operand to the operation type unless it is
889 either already of the correct type or if the type involves a
890 placeholder, since the RHS may not have the same record type. */
891 if (operation_type != right_type
892 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
894 right_operand = convert (operation_type, right_operand);
895 right_type = operation_type;
898 /* If the left operand is not of the same type as the operation
899 type, wrap it up in a VIEW_CONVERT_EXPR. */
900 if (left_type != operation_type)
901 left_operand = unchecked_convert (operation_type, left_operand, false);
903 has_side_effects = true;
904 modulus = NULL_TREE;
905 break;
907 case ARRAY_REF:
908 if (!operation_type)
909 operation_type = TREE_TYPE (left_type);
911 /* ... fall through ... */
913 case ARRAY_RANGE_REF:
914 /* First look through conversion between type variants. Note that
915 this changes neither the operation type nor the type domain. */
916 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
917 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
918 == TYPE_MAIN_VARIANT (left_type))
920 left_operand = TREE_OPERAND (left_operand, 0);
921 left_type = TREE_TYPE (left_operand);
924 /* For a range, make sure the element type is consistent. */
925 if (op_code == ARRAY_RANGE_REF
926 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
927 operation_type = build_array_type (TREE_TYPE (left_type),
928 TYPE_DOMAIN (operation_type));
930 /* Then convert the right operand to its base type. This will prevent
931 unneeded sign conversions when sizetype is wider than integer. */
932 right_operand = convert (right_base_type, right_operand);
933 right_operand = convert_to_index_type (right_operand);
934 modulus = NULL_TREE;
935 break;
937 case TRUTH_ANDIF_EXPR:
938 case TRUTH_ORIF_EXPR:
939 case TRUTH_AND_EXPR:
940 case TRUTH_OR_EXPR:
941 case TRUTH_XOR_EXPR:
942 #ifdef ENABLE_CHECKING
943 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
944 #endif
945 operation_type = left_base_type;
946 left_operand = convert (operation_type, left_operand);
947 right_operand = convert (operation_type, right_operand);
948 break;
950 case GE_EXPR:
951 case LE_EXPR:
952 case GT_EXPR:
953 case LT_EXPR:
954 case EQ_EXPR:
955 case NE_EXPR:
956 #ifdef ENABLE_CHECKING
957 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
958 #endif
959 /* If either operand is a NULL_EXPR, just return a new one. */
960 if (TREE_CODE (left_operand) == NULL_EXPR)
961 return build2 (op_code, result_type,
962 build1 (NULL_EXPR, integer_type_node,
963 TREE_OPERAND (left_operand, 0)),
964 integer_zero_node);
966 else if (TREE_CODE (right_operand) == NULL_EXPR)
967 return build2 (op_code, result_type,
968 build1 (NULL_EXPR, integer_type_node,
969 TREE_OPERAND (right_operand, 0)),
970 integer_zero_node);
972 /* If either object is a justified modular types, get the
973 fields from within. */
974 if (TREE_CODE (left_type) == RECORD_TYPE
975 && TYPE_JUSTIFIED_MODULAR_P (left_type))
977 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
978 left_operand);
979 left_type = TREE_TYPE (left_operand);
980 left_base_type = get_base_type (left_type);
983 if (TREE_CODE (right_type) == RECORD_TYPE
984 && TYPE_JUSTIFIED_MODULAR_P (right_type))
986 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
987 right_operand);
988 right_type = TREE_TYPE (right_operand);
989 right_base_type = get_base_type (right_type);
992 /* If both objects are arrays, compare them specially. */
993 if ((TREE_CODE (left_type) == ARRAY_TYPE
994 || (TREE_CODE (left_type) == INTEGER_TYPE
995 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
996 && (TREE_CODE (right_type) == ARRAY_TYPE
997 || (TREE_CODE (right_type) == INTEGER_TYPE
998 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1000 result = compare_arrays (input_location,
1001 result_type, left_operand, right_operand);
1002 if (op_code == NE_EXPR)
1003 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1004 else
1005 gcc_assert (op_code == EQ_EXPR);
1007 return result;
1010 /* Otherwise, the base types must be the same, unless they are both fat
1011 pointer types or record types. In the latter case, use the best type
1012 and convert both operands to that type. */
1013 if (left_base_type != right_base_type)
1015 if (TYPE_IS_FAT_POINTER_P (left_base_type)
1016 && TYPE_IS_FAT_POINTER_P (right_base_type))
1018 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1019 == TYPE_MAIN_VARIANT (right_base_type));
1020 best_type = left_base_type;
1023 else if (TREE_CODE (left_base_type) == RECORD_TYPE
1024 && TREE_CODE (right_base_type) == RECORD_TYPE)
1026 /* The only way this is permitted is if both types have the same
1027 name. In that case, one of them must not be self-referential.
1028 Use it as the best type. Even better with a fixed size. */
1029 gcc_assert (TYPE_NAME (left_base_type)
1030 && TYPE_NAME (left_base_type)
1031 == TYPE_NAME (right_base_type));
1033 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1034 best_type = left_base_type;
1035 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1036 best_type = right_base_type;
1037 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1038 best_type = left_base_type;
1039 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1040 best_type = right_base_type;
1041 else
1042 gcc_unreachable ();
1045 else
1046 gcc_unreachable ();
1048 left_operand = convert (best_type, left_operand);
1049 right_operand = convert (best_type, right_operand);
1051 else
1053 left_operand = convert (left_base_type, left_operand);
1054 right_operand = convert (right_base_type, right_operand);
1057 /* If both objects are fat pointers, compare them specially. */
1058 if (TYPE_IS_FAT_POINTER_P (left_base_type))
1060 result
1061 = compare_fat_pointers (input_location,
1062 result_type, left_operand, right_operand);
1063 if (op_code == NE_EXPR)
1064 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1065 else
1066 gcc_assert (op_code == EQ_EXPR);
1068 return result;
1071 modulus = NULL_TREE;
1072 break;
1074 case LSHIFT_EXPR:
1075 case RSHIFT_EXPR:
1076 case LROTATE_EXPR:
1077 case RROTATE_EXPR:
1078 /* The RHS of a shift can be any type. Also, ignore any modulus
1079 (we used to abort, but this is needed for unchecked conversion
1080 to modular types). Otherwise, processing is the same as normal. */
1081 gcc_assert (operation_type == left_base_type);
1082 modulus = NULL_TREE;
1083 left_operand = convert (operation_type, left_operand);
1084 break;
1086 case BIT_AND_EXPR:
1087 case BIT_IOR_EXPR:
1088 case BIT_XOR_EXPR:
1089 /* For binary modulus, if the inputs are in range, so are the
1090 outputs. */
1091 if (modulus && integer_pow2p (modulus))
1092 modulus = NULL_TREE;
1093 goto common;
1095 case COMPLEX_EXPR:
1096 gcc_assert (TREE_TYPE (result_type) == left_base_type
1097 && TREE_TYPE (result_type) == right_base_type);
1098 left_operand = convert (left_base_type, left_operand);
1099 right_operand = convert (right_base_type, right_operand);
1100 break;
1102 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1103 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1104 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1105 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1106 /* These always produce results lower than either operand. */
1107 modulus = NULL_TREE;
1108 goto common;
1110 case POINTER_PLUS_EXPR:
1111 gcc_assert (operation_type == left_base_type
1112 && sizetype == right_base_type);
1113 left_operand = convert (operation_type, left_operand);
1114 right_operand = convert (sizetype, right_operand);
1115 break;
1117 case PLUS_NOMOD_EXPR:
1118 case MINUS_NOMOD_EXPR:
1119 if (op_code == PLUS_NOMOD_EXPR)
1120 op_code = PLUS_EXPR;
1121 else
1122 op_code = MINUS_EXPR;
1123 modulus = NULL_TREE;
1125 /* ... fall through ... */
1127 case PLUS_EXPR:
1128 case MINUS_EXPR:
1129 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1130 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1131 these types but can generate addition/subtraction for Succ/Pred. */
1132 if (operation_type
1133 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1134 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1135 operation_type = left_base_type = right_base_type
1136 = gnat_type_for_mode (TYPE_MODE (operation_type),
1137 TYPE_UNSIGNED (operation_type));
1139 /* ... fall through ... */
1141 default:
1142 common:
1143 /* The result type should be the same as the base types of the
1144 both operands (and they should be the same). Convert
1145 everything to the result type. */
1147 gcc_assert (operation_type == left_base_type
1148 && left_base_type == right_base_type);
1149 left_operand = convert (operation_type, left_operand);
1150 right_operand = convert (operation_type, right_operand);
1153 if (modulus && !integer_pow2p (modulus))
1155 result = nonbinary_modular_operation (op_code, operation_type,
1156 left_operand, right_operand);
1157 modulus = NULL_TREE;
1159 /* If either operand is a NULL_EXPR, just return a new one. */
1160 else if (TREE_CODE (left_operand) == NULL_EXPR)
1161 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1162 else if (TREE_CODE (right_operand) == NULL_EXPR)
1163 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1164 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1165 result = fold (build4 (op_code, operation_type, left_operand,
1166 right_operand, NULL_TREE, NULL_TREE));
1167 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1168 result = build2 (op_code, void_type_node, left_operand, right_operand);
1169 else
1170 result
1171 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1173 if (TREE_CONSTANT (result))
1175 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1177 TREE_THIS_NOTRAP (result) = 1;
1178 if (TYPE_VOLATILE (operation_type))
1179 TREE_THIS_VOLATILE (result) = 1;
1181 else
1182 TREE_CONSTANT (result)
1183 |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1185 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1187 /* If we are working with modular types, perform the MOD operation
1188 if something above hasn't eliminated the need for it. */
1189 if (modulus)
1190 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1191 convert (operation_type, modulus));
1193 if (result_type && result_type != operation_type)
1194 result = convert (result_type, result);
1196 return result;
1199 /* Similar, but for unary operations. */
1201 tree
1202 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1204 tree type = TREE_TYPE (operand);
1205 tree base_type = get_base_type (type);
1206 tree operation_type = result_type;
1207 tree result;
1209 if (operation_type
1210 && TREE_CODE (operation_type) == RECORD_TYPE
1211 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1212 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1214 if (operation_type
1215 && TREE_CODE (operation_type) == INTEGER_TYPE
1216 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1217 operation_type = get_base_type (operation_type);
1219 switch (op_code)
1221 case REALPART_EXPR:
1222 case IMAGPART_EXPR:
1223 if (!operation_type)
1224 result_type = operation_type = TREE_TYPE (type);
1225 else
1226 gcc_assert (result_type == TREE_TYPE (type));
1228 result = fold_build1 (op_code, operation_type, operand);
1229 break;
1231 case TRUTH_NOT_EXPR:
1232 #ifdef ENABLE_CHECKING
1233 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1234 #endif
1235 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1236 /* When not optimizing, fold the result as invert_truthvalue_loc
1237 doesn't fold the result of comparisons. This is intended to undo
1238 the trick used for boolean rvalues in gnat_to_gnu. */
1239 if (!optimize)
1240 result = fold (result);
1241 break;
1243 case ATTR_ADDR_EXPR:
1244 case ADDR_EXPR:
1245 switch (TREE_CODE (operand))
1247 case INDIRECT_REF:
1248 case UNCONSTRAINED_ARRAY_REF:
1249 result = TREE_OPERAND (operand, 0);
1251 /* Make sure the type here is a pointer, not a reference.
1252 GCC wants pointer types for function addresses. */
1253 if (!result_type)
1254 result_type = build_pointer_type (type);
1256 /* If the underlying object can alias everything, propagate the
1257 property since we are effectively retrieving the object. */
1258 if (POINTER_TYPE_P (TREE_TYPE (result))
1259 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1261 if (TREE_CODE (result_type) == POINTER_TYPE
1262 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1263 result_type
1264 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1265 TYPE_MODE (result_type),
1266 true);
1267 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1268 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1269 result_type
1270 = build_reference_type_for_mode (TREE_TYPE (result_type),
1271 TYPE_MODE (result_type),
1272 true);
1274 break;
1276 case NULL_EXPR:
1277 result = operand;
1278 TREE_TYPE (result) = type = build_pointer_type (type);
1279 break;
1281 case COMPOUND_EXPR:
1282 /* Fold a compound expression if it has unconstrained array type
1283 since the middle-end cannot handle it. But we don't it in the
1284 general case because it may introduce aliasing issues if the
1285 first operand is an indirect assignment and the second operand
1286 the corresponding address, e.g. for an allocator. */
1287 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1289 result = build_unary_op (ADDR_EXPR, result_type,
1290 TREE_OPERAND (operand, 1));
1291 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1292 TREE_OPERAND (operand, 0), result);
1293 break;
1295 goto common;
1297 case ARRAY_REF:
1298 case ARRAY_RANGE_REF:
1299 case COMPONENT_REF:
1300 case BIT_FIELD_REF:
1301 /* If this is for 'Address, find the address of the prefix and add
1302 the offset to the field. Otherwise, do this the normal way. */
1303 if (op_code == ATTR_ADDR_EXPR)
1305 HOST_WIDE_INT bitsize;
1306 HOST_WIDE_INT bitpos;
1307 tree offset, inner;
1308 enum machine_mode mode;
1309 int unsignedp, volatilep;
1311 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1312 &mode, &unsignedp, &volatilep,
1313 false);
1315 /* If INNER is a padding type whose field has a self-referential
1316 size, convert to that inner type. We know the offset is zero
1317 and we need to have that type visible. */
1318 if (TYPE_IS_PADDING_P (TREE_TYPE (inner))
1319 && CONTAINS_PLACEHOLDER_P
1320 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1321 (TREE_TYPE (inner))))))
1322 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1323 inner);
1325 /* Compute the offset as a byte offset from INNER. */
1326 if (!offset)
1327 offset = size_zero_node;
1329 offset = size_binop (PLUS_EXPR, offset,
1330 size_int (bitpos / BITS_PER_UNIT));
1332 /* Take the address of INNER, convert the offset to void *, and
1333 add then. It will later be converted to the desired result
1334 type, if any. */
1335 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1336 inner = convert (ptr_void_type_node, inner);
1337 result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1338 inner, offset);
1339 result = convert (build_pointer_type (TREE_TYPE (operand)),
1340 result);
1341 break;
1343 goto common;
1345 case CONSTRUCTOR:
1346 /* If this is just a constructor for a padded record, we can
1347 just take the address of the single field and convert it to
1348 a pointer to our type. */
1349 if (TYPE_IS_PADDING_P (type))
1351 result = (*CONSTRUCTOR_ELTS (operand))[0].value;
1352 result = convert (build_pointer_type (TREE_TYPE (operand)),
1353 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1354 break;
1357 goto common;
1359 case NOP_EXPR:
1360 if (AGGREGATE_TYPE_P (type)
1361 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1362 return build_unary_op (ADDR_EXPR, result_type,
1363 TREE_OPERAND (operand, 0));
1365 /* ... fallthru ... */
1367 case VIEW_CONVERT_EXPR:
1368 /* If this just a variant conversion or if the conversion doesn't
1369 change the mode, get the result type from this type and go down.
1370 This is needed for conversions of CONST_DECLs, to eventually get
1371 to the address of their CORRESPONDING_VARs. */
1372 if ((TYPE_MAIN_VARIANT (type)
1373 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1374 || (TYPE_MODE (type) != BLKmode
1375 && (TYPE_MODE (type)
1376 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1377 return build_unary_op (ADDR_EXPR,
1378 (result_type ? result_type
1379 : build_pointer_type (type)),
1380 TREE_OPERAND (operand, 0));
1381 goto common;
1383 case CONST_DECL:
1384 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1386 /* ... fall through ... */
1388 default:
1389 common:
1391 /* If we are taking the address of a padded record whose field
1392 contains a template, take the address of the field. */
1393 if (TYPE_IS_PADDING_P (type)
1394 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1395 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1397 type = TREE_TYPE (TYPE_FIELDS (type));
1398 operand = convert (type, operand);
1401 gnat_mark_addressable (operand);
1402 result = build_fold_addr_expr (operand);
1405 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1406 break;
1408 case INDIRECT_REF:
1410 tree t = remove_conversions (operand, false);
1411 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1413 /* If TYPE is a thin pointer, either first retrieve the base if this
1414 is an expression with an offset built for the initialization of an
1415 object with an unconstrained nominal subtype, or else convert to
1416 the fat pointer. */
1417 if (TYPE_IS_THIN_POINTER_P (type))
1419 tree rec_type = TREE_TYPE (type);
1421 if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1422 && TREE_OPERAND (operand, 1)
1423 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1424 && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1426 operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1427 type = TREE_TYPE (operand);
1429 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1431 operand
1432 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1433 operand);
1434 type = TREE_TYPE (operand);
1438 /* If we want to refer to an unconstrained array, use the appropriate
1439 expression. But this will never survive down to the back-end. */
1440 if (TYPE_IS_FAT_POINTER_P (type))
1442 result = build1 (UNCONSTRAINED_ARRAY_REF,
1443 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1444 TREE_READONLY (result)
1445 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1448 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1449 else if (TREE_CODE (operand) == ADDR_EXPR)
1450 result = TREE_OPERAND (operand, 0);
1452 /* Otherwise, build and fold the indirect reference. */
1453 else
1455 result = build_fold_indirect_ref (operand);
1456 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1459 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1461 TREE_SIDE_EFFECTS (result) = 1;
1462 if (TREE_CODE (result) == INDIRECT_REF)
1463 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1466 if ((TREE_CODE (result) == INDIRECT_REF
1467 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1468 && can_never_be_null)
1469 TREE_THIS_NOTRAP (result) = 1;
1471 break;
1474 case NEGATE_EXPR:
1475 case BIT_NOT_EXPR:
1477 tree modulus = ((operation_type
1478 && TREE_CODE (operation_type) == INTEGER_TYPE
1479 && TYPE_MODULAR_P (operation_type))
1480 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1481 int mod_pow2 = modulus && integer_pow2p (modulus);
1483 /* If this is a modular type, there are various possibilities
1484 depending on the operation and whether the modulus is a
1485 power of two or not. */
1487 if (modulus)
1489 gcc_assert (operation_type == base_type);
1490 operand = convert (operation_type, operand);
1492 /* The fastest in the negate case for binary modulus is
1493 the straightforward code; the TRUNC_MOD_EXPR below
1494 is an AND operation. */
1495 if (op_code == NEGATE_EXPR && mod_pow2)
1496 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1497 fold_build1 (NEGATE_EXPR, operation_type,
1498 operand),
1499 modulus);
1501 /* For nonbinary negate case, return zero for zero operand,
1502 else return the modulus minus the operand. If the modulus
1503 is a power of two minus one, we can do the subtraction
1504 as an XOR since it is equivalent and faster on most machines. */
1505 else if (op_code == NEGATE_EXPR && !mod_pow2)
1507 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1508 modulus,
1509 convert (operation_type,
1510 integer_one_node))))
1511 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1512 operand, modulus);
1513 else
1514 result = fold_build2 (MINUS_EXPR, operation_type,
1515 modulus, operand);
1517 result = fold_build3 (COND_EXPR, operation_type,
1518 fold_build2 (NE_EXPR,
1519 boolean_type_node,
1520 operand,
1521 convert
1522 (operation_type,
1523 integer_zero_node)),
1524 result, operand);
1526 else
1528 /* For the NOT cases, we need a constant equal to
1529 the modulus minus one. For a binary modulus, we
1530 XOR against the constant and subtract the operand from
1531 that constant for nonbinary modulus. */
1533 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1534 convert (operation_type,
1535 integer_one_node));
1537 if (mod_pow2)
1538 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1539 operand, cnst);
1540 else
1541 result = fold_build2 (MINUS_EXPR, operation_type,
1542 cnst, operand);
1545 break;
1549 /* ... fall through ... */
1551 default:
1552 gcc_assert (operation_type == base_type);
1553 result = fold_build1 (op_code, operation_type,
1554 convert (operation_type, operand));
1557 if (result_type && TREE_TYPE (result) != result_type)
1558 result = convert (result_type, result);
1560 return result;
1563 /* Similar, but for COND_EXPR. */
1565 tree
1566 build_cond_expr (tree result_type, tree condition_operand,
1567 tree true_operand, tree false_operand)
1569 bool addr_p = false;
1570 tree result;
1572 /* The front-end verified that result, true and false operands have
1573 same base type. Convert everything to the result type. */
1574 true_operand = convert (result_type, true_operand);
1575 false_operand = convert (result_type, false_operand);
1577 /* If the result type is unconstrained, take the address of the operands and
1578 then dereference the result. Likewise if the result type is passed by
1579 reference, because creating a temporary of this type is not allowed. */
1580 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1581 || TYPE_IS_BY_REFERENCE_P (result_type)
1582 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1584 result_type = build_pointer_type (result_type);
1585 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1586 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1587 addr_p = true;
1590 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1591 true_operand, false_operand);
1593 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1594 in both arms, make sure it gets evaluated by moving it ahead of the
1595 conditional expression. This is necessary because it is evaluated
1596 in only one place at run time and would otherwise be uninitialized
1597 in one of the arms. */
1598 true_operand = skip_simple_arithmetic (true_operand);
1599 false_operand = skip_simple_arithmetic (false_operand);
1601 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1602 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1604 if (addr_p)
1605 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1607 return result;
1610 /* Similar, but for COMPOUND_EXPR. */
1612 tree
1613 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1615 bool addr_p = false;
1616 tree result;
1618 /* If the result type is unconstrained, take the address of the operand and
1619 then dereference the result. Likewise if the result type is passed by
1620 reference, but this is natively handled in the gimplifier. */
1621 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1622 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1624 result_type = build_pointer_type (result_type);
1625 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1626 addr_p = true;
1629 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1630 expr_operand);
1632 if (addr_p)
1633 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1635 return result;
1638 /* Conveniently construct a function call expression. FNDECL names the
1639 function to be called, N is the number of arguments, and the "..."
1640 parameters are the argument expressions. Unlike build_call_expr
1641 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1643 tree
1644 build_call_n_expr (tree fndecl, int n, ...)
1646 va_list ap;
1647 tree fntype = TREE_TYPE (fndecl);
1648 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1650 va_start (ap, n);
1651 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1652 va_end (ap);
1653 return fn;
1656 /* Call a function that raises an exception and pass the line number and file
1657 name, if requested. MSG says which exception function to call.
1659 GNAT_NODE is the gnat node conveying the source location for which the
1660 error should be signaled, or Empty in which case the error is signaled on
1661 the current ref_file_name/input_line.
1663 KIND says which kind of exception this is for
1664 (N_Raise_{Constraint,Storage,Program}_Error). */
1666 tree
1667 build_call_raise (int msg, Node_Id gnat_node, char kind)
1669 tree fndecl = gnat_raise_decls[msg];
1670 tree label = get_exception_label (kind);
1671 tree filename;
1672 int line_number;
1673 const char *str;
1674 int len;
1676 /* If this is to be done as a goto, handle that case. */
1677 if (label)
1679 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1680 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1682 /* If Local_Raise is present, generate
1683 Local_Raise (exception'Identity); */
1684 if (Present (local_raise))
1686 tree gnu_local_raise
1687 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1688 tree gnu_exception_entity
1689 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1690 tree gnu_call
1691 = build_call_n_expr (gnu_local_raise, 1,
1692 build_unary_op (ADDR_EXPR, NULL_TREE,
1693 gnu_exception_entity));
1695 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1696 gnu_call, gnu_result);}
1698 return gnu_result;
1702 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1703 ? ""
1704 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1705 ? IDENTIFIER_POINTER
1706 (get_identifier (Get_Name_String
1707 (Debug_Source_Name
1708 (Get_Source_File_Index (Sloc (gnat_node))))))
1709 : ref_filename;
1711 len = strlen (str);
1712 filename = build_string (len, str);
1713 line_number
1714 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1715 ? Get_Logical_Line_Number (Sloc(gnat_node)) : input_line;
1717 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1718 build_index_type (size_int (len)));
1720 return
1721 build_call_n_expr (fndecl, 2,
1722 build1 (ADDR_EXPR,
1723 build_pointer_type (unsigned_char_type_node),
1724 filename),
1725 build_int_cst (NULL_TREE, line_number));
1728 /* Similar to build_call_raise, for an index or range check exception as
1729 determined by MSG, with extra information generated of the form
1730 "INDEX out of range FIRST..LAST". */
1732 tree
1733 build_call_raise_range (int msg, Node_Id gnat_node,
1734 tree index, tree first, tree last)
1736 tree fndecl = gnat_raise_decls_ext[msg];
1737 tree filename;
1738 int line_number, column_number;
1739 const char *str;
1740 int len;
1743 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1744 ? ""
1745 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1746 ? IDENTIFIER_POINTER
1747 (get_identifier (Get_Name_String
1748 (Debug_Source_Name
1749 (Get_Source_File_Index (Sloc (gnat_node))))))
1750 : ref_filename;
1752 len = strlen (str);
1753 filename = build_string (len, str);
1754 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1756 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1757 column_number = Get_Column_Number (Sloc (gnat_node));
1759 else
1761 line_number = input_line;
1762 column_number = 0;
1765 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1766 build_index_type (size_int (len)));
1768 return
1769 build_call_n_expr (fndecl, 6,
1770 build1 (ADDR_EXPR,
1771 build_pointer_type (unsigned_char_type_node),
1772 filename),
1773 build_int_cst (NULL_TREE, line_number),
1774 build_int_cst (NULL_TREE, column_number),
1775 convert (integer_type_node, index),
1776 convert (integer_type_node, first),
1777 convert (integer_type_node, last));
1780 /* Similar to build_call_raise, with extra information about the column
1781 where the check failed. */
1783 tree
1784 build_call_raise_column (int msg, Node_Id gnat_node)
1786 tree fndecl = gnat_raise_decls_ext[msg];
1787 tree filename;
1788 int line_number, column_number;
1789 const char *str;
1790 int len;
1793 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1794 ? ""
1795 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1796 ? IDENTIFIER_POINTER
1797 (get_identifier (Get_Name_String
1798 (Debug_Source_Name
1799 (Get_Source_File_Index (Sloc (gnat_node))))))
1800 : ref_filename;
1802 len = strlen (str);
1803 filename = build_string (len, str);
1804 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1806 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1807 column_number = Get_Column_Number (Sloc (gnat_node));
1809 else
1811 line_number = input_line;
1812 column_number = 0;
1815 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1816 build_index_type (size_int (len)));
1818 return
1819 build_call_n_expr (fndecl, 3,
1820 build1 (ADDR_EXPR,
1821 build_pointer_type (unsigned_char_type_node),
1822 filename),
1823 build_int_cst (NULL_TREE, line_number),
1824 build_int_cst (NULL_TREE, column_number));
1827 /* qsort comparer for the bit positions of two constructor elements
1828 for record components. */
1830 static int
1831 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1833 const constructor_elt * const elmt1 = (const constructor_elt * const) rt1;
1834 const constructor_elt * const elmt2 = (const constructor_elt * const) rt2;
1835 const_tree const field1 = elmt1->index;
1836 const_tree const field2 = elmt2->index;
1837 const int ret
1838 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1840 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1843 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1845 tree
1846 gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
1848 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1849 bool side_effects = false;
1850 tree result, obj, val;
1851 unsigned int n_elmts;
1853 /* Scan the elements to see if they are all constant or if any has side
1854 effects, to let us set global flags on the resulting constructor. Count
1855 the elements along the way for possible sorting purposes below. */
1856 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1858 /* The predicate must be in keeping with output_constructor. */
1859 if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1860 || (TREE_CODE (type) == RECORD_TYPE
1861 && CONSTRUCTOR_BITFIELD_P (obj)
1862 && !initializer_constant_valid_for_bitfield_p (val))
1863 || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1864 allconstant = false;
1866 if (TREE_SIDE_EFFECTS (val))
1867 side_effects = true;
1870 /* For record types with constant components only, sort field list
1871 by increasing bit position. This is necessary to ensure the
1872 constructor can be output as static data. */
1873 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1874 v->qsort (compare_elmt_bitpos);
1876 result = build_constructor (type, v);
1877 CONSTRUCTOR_NO_CLEARING (result) = 1;
1878 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1879 TREE_SIDE_EFFECTS (result) = side_effects;
1880 TREE_READONLY (result) = TYPE_READONLY (type) || allconstant;
1881 return result;
1884 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1885 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1886 for the field. Don't fold the result if NO_FOLD_P is true.
1888 We also handle the fact that we might have been passed a pointer to the
1889 actual record and know how to look for fields in variant parts. */
1891 static tree
1892 build_simple_component_ref (tree record_variable, tree component,
1893 tree field, bool no_fold_p)
1895 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1896 tree ref, inner_variable;
1898 gcc_assert (RECORD_OR_UNION_TYPE_P (record_type)
1899 && COMPLETE_TYPE_P (record_type)
1900 && (component == NULL_TREE) != (field == NULL_TREE));
1902 /* If no field was specified, look for a field with the specified name in
1903 the current record only. */
1904 if (!field)
1905 for (field = TYPE_FIELDS (record_type);
1906 field;
1907 field = DECL_CHAIN (field))
1908 if (DECL_NAME (field) == component)
1909 break;
1911 if (!field)
1912 return NULL_TREE;
1914 /* If this field is not in the specified record, see if we can find a field
1915 in the specified record whose original field is the same as this one. */
1916 if (DECL_CONTEXT (field) != record_type)
1918 tree new_field;
1920 /* First loop through normal components. */
1921 for (new_field = TYPE_FIELDS (record_type);
1922 new_field;
1923 new_field = DECL_CHAIN (new_field))
1924 if (SAME_FIELD_P (field, new_field))
1925 break;
1927 /* Next, see if we're looking for an inherited component in an extension.
1928 If so, look through the extension directly, but not if the type contains
1929 a placeholder, as it might be needed for a later substitution. */
1930 if (!new_field
1931 && TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1932 && TYPE_ALIGN_OK (record_type)
1933 && !type_contains_placeholder_p (record_type)
1934 && TREE_CODE (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1935 == RECORD_TYPE
1936 && TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (record_variable, 0))))
1938 ref = build_simple_component_ref (TREE_OPERAND (record_variable, 0),
1939 NULL_TREE, field, no_fold_p);
1940 if (ref)
1941 return ref;
1944 /* Next, loop through DECL_INTERNAL_P components if we haven't found the
1945 component in the first search. Doing this search in two steps is
1946 required to avoid hidden homonymous fields in the _Parent field. */
1947 if (!new_field)
1948 for (new_field = TYPE_FIELDS (record_type);
1949 new_field;
1950 new_field = DECL_CHAIN (new_field))
1951 if (DECL_INTERNAL_P (new_field))
1953 tree field_ref
1954 = build_simple_component_ref (record_variable,
1955 NULL_TREE, new_field, no_fold_p);
1956 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1957 no_fold_p);
1958 if (ref)
1959 return ref;
1962 field = new_field;
1965 if (!field)
1966 return NULL_TREE;
1968 /* If the field's offset has overflowed, do not try to access it, as doing
1969 so may trigger sanity checks deeper in the back-end. Note that we don't
1970 need to warn since this will be done on trying to declare the object. */
1971 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1972 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1973 return NULL_TREE;
1975 /* Look through conversion between type variants. This is transparent as
1976 far as the field is concerned. */
1977 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1978 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1979 == record_type)
1980 inner_variable = TREE_OPERAND (record_variable, 0);
1981 else
1982 inner_variable = record_variable;
1984 ref = build3 (COMPONENT_REF, TREE_TYPE (field), inner_variable, field,
1985 NULL_TREE);
1987 if (TREE_READONLY (record_variable)
1988 || TREE_READONLY (field)
1989 || TYPE_READONLY (record_type))
1990 TREE_READONLY (ref) = 1;
1992 if (TREE_THIS_VOLATILE (record_variable)
1993 || TREE_THIS_VOLATILE (field)
1994 || TYPE_VOLATILE (record_type))
1995 TREE_THIS_VOLATILE (ref) = 1;
1997 if (no_fold_p)
1998 return ref;
2000 /* The generic folder may punt in this case because the inner array type
2001 can be self-referential, but folding is in fact not problematic. */
2002 if (TREE_CODE (record_variable) == CONSTRUCTOR
2003 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record_variable)))
2005 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (record_variable);
2006 unsigned HOST_WIDE_INT idx;
2007 tree index, value;
2008 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
2009 if (index == field)
2010 return value;
2011 return ref;
2014 return fold (ref);
2017 /* Like build_simple_component_ref, except that we give an error if the
2018 reference could not be found. */
2020 tree
2021 build_component_ref (tree record_variable, tree component,
2022 tree field, bool no_fold_p)
2024 tree ref = build_simple_component_ref (record_variable, component, field,
2025 no_fold_p);
2027 if (ref)
2028 return ref;
2030 /* If FIELD was specified, assume this is an invalid user field so raise
2031 Constraint_Error. Otherwise, we have no type to return so abort. */
2032 gcc_assert (field);
2033 return build1 (NULL_EXPR, TREE_TYPE (field),
2034 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2035 N_Raise_Constraint_Error));
2038 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2039 identically. Process the case where a GNAT_PROC to call is provided. */
2041 static inline tree
2042 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2043 Entity_Id gnat_proc, Entity_Id gnat_pool)
2045 tree gnu_proc = gnat_to_gnu (gnat_proc);
2046 tree gnu_call;
2048 /* A storage pool's underlying type is a record type (for both predefined
2049 storage pools and GNAT simple storage pools). The secondary stack uses
2050 the same mechanism, but its pool object (SS_Pool) is an integer. */
2051 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2053 /* The size is the third parameter; the alignment is the
2054 same type. */
2055 Entity_Id gnat_size_type
2056 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2057 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2059 tree gnu_pool = gnat_to_gnu (gnat_pool);
2060 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2061 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2063 gnu_size = convert (gnu_size_type, gnu_size);
2064 gnu_align = convert (gnu_size_type, gnu_align);
2066 /* The first arg is always the address of the storage pool; next
2067 comes the address of the object, for a deallocator, then the
2068 size and alignment. */
2069 if (gnu_obj)
2070 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2071 gnu_size, gnu_align);
2072 else
2073 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2074 gnu_size, gnu_align);
2077 /* Secondary stack case. */
2078 else
2080 /* The size is the second parameter. */
2081 Entity_Id gnat_size_type
2082 = Etype (Next_Formal (First_Formal (gnat_proc)));
2083 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2085 gnu_size = convert (gnu_size_type, gnu_size);
2087 /* The first arg is the address of the object, for a deallocator,
2088 then the size. */
2089 if (gnu_obj)
2090 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2091 else
2092 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2095 return gnu_call;
2098 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2099 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2100 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2101 latter offers. */
2103 static inline tree
2104 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2106 /* When the DATA_TYPE alignment is stricter than what malloc offers
2107 (super-aligned case), we allocate an "aligning" wrapper type and return
2108 the address of its single data field with the malloc's return value
2109 stored just in front. */
2111 unsigned int data_align = TYPE_ALIGN (data_type);
2112 unsigned int system_allocator_alignment
2113 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2115 tree aligning_type
2116 = ((data_align > system_allocator_alignment)
2117 ? make_aligning_type (data_type, data_align, data_size,
2118 system_allocator_alignment,
2119 POINTER_SIZE / BITS_PER_UNIT,
2120 gnat_node)
2121 : NULL_TREE);
2123 tree size_to_malloc
2124 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2126 tree malloc_ptr;
2128 /* On VMS, if pointers are 64-bit and the allocator size is 32-bit or
2129 Convention C, allocate 32-bit memory. */
2130 if (TARGET_ABI_OPEN_VMS
2131 && POINTER_SIZE == 64
2132 && Nkind (gnat_node) == N_Allocator
2133 && (UI_To_Int (Esize (Etype (gnat_node))) == 32
2134 || Convention (Etype (gnat_node)) == Convention_C))
2135 malloc_ptr = build_call_n_expr (malloc32_decl, 1, size_to_malloc);
2136 else
2137 malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2139 if (aligning_type)
2141 /* Latch malloc's return value and get a pointer to the aligning field
2142 first. */
2143 tree storage_ptr = gnat_protect_expr (malloc_ptr);
2145 tree aligning_record_addr
2146 = convert (build_pointer_type (aligning_type), storage_ptr);
2148 tree aligning_record
2149 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2151 tree aligning_field
2152 = build_component_ref (aligning_record, NULL_TREE,
2153 TYPE_FIELDS (aligning_type), false);
2155 tree aligning_field_addr
2156 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2158 /* Then arrange to store the allocator's return value ahead
2159 and return. */
2160 tree storage_ptr_slot_addr
2161 = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
2162 convert (ptr_void_type_node, aligning_field_addr),
2163 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2164 / BITS_PER_UNIT));
2166 tree storage_ptr_slot
2167 = build_unary_op (INDIRECT_REF, NULL_TREE,
2168 convert (build_pointer_type (ptr_void_type_node),
2169 storage_ptr_slot_addr));
2171 return
2172 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2173 build_binary_op (INIT_EXPR, NULL_TREE,
2174 storage_ptr_slot, storage_ptr),
2175 aligning_field_addr);
2177 else
2178 return malloc_ptr;
2181 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2182 designated by DATA_PTR using the __gnat_free entry point. */
2184 static inline tree
2185 maybe_wrap_free (tree data_ptr, tree data_type)
2187 /* In the regular alignment case, we pass the data pointer straight to free.
2188 In the superaligned case, we need to retrieve the initial allocator
2189 return value, stored in front of the data block at allocation time. */
2191 unsigned int data_align = TYPE_ALIGN (data_type);
2192 unsigned int system_allocator_alignment
2193 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2195 tree free_ptr;
2197 if (data_align > system_allocator_alignment)
2199 /* DATA_FRONT_PTR (void *)
2200 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2201 tree data_front_ptr
2202 = build_binary_op
2203 (POINTER_PLUS_EXPR, ptr_void_type_node,
2204 convert (ptr_void_type_node, data_ptr),
2205 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2207 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2208 free_ptr
2209 = build_unary_op
2210 (INDIRECT_REF, NULL_TREE,
2211 convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
2213 else
2214 free_ptr = data_ptr;
2216 return build_call_n_expr (free_decl, 1, free_ptr);
2219 /* Build a GCC tree to call an allocation or deallocation function.
2220 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2221 generate an allocator.
2223 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2224 object type, used to determine the to-be-honored address alignment.
2225 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2226 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2227 to provide an error location for restriction violation messages. */
2229 tree
2230 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2231 Entity_Id gnat_proc, Entity_Id gnat_pool,
2232 Node_Id gnat_node)
2234 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
2236 /* Explicit proc to call ? This one is assumed to deal with the type
2237 alignment constraints. */
2238 if (Present (gnat_proc))
2239 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2240 gnat_proc, gnat_pool);
2242 /* Otherwise, object to "free" or "malloc" with possible special processing
2243 for alignments stricter than what the default allocator honors. */
2244 else if (gnu_obj)
2245 return maybe_wrap_free (gnu_obj, gnu_type);
2246 else
2248 /* Assert that we no longer can be called with this special pool. */
2249 gcc_assert (gnat_pool != -1);
2251 /* Check that we aren't violating the associated restriction. */
2252 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2253 Check_No_Implicit_Heap_Alloc (gnat_node);
2255 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2259 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2260 initial value is INIT, if INIT is nonzero. Convert the expression to
2261 RESULT_TYPE, which must be some pointer type, and return the result.
2263 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2264 the storage pool to use. GNAT_NODE is used to provide an error
2265 location for restriction violation messages. If IGNORE_INIT_TYPE is
2266 true, ignore the type of INIT for the purpose of determining the size;
2267 this will cause the maximum size to be allocated if TYPE is of
2268 self-referential size. */
2270 tree
2271 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2272 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2274 tree size, storage, storage_deref, storage_init;
2276 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2277 if (init && TREE_CODE (init) == NULL_EXPR)
2278 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2280 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2281 else if (init && TREE_CODE (init) == COND_EXPR)
2282 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2283 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2284 gnat_proc, gnat_pool, gnat_node,
2285 ignore_init_type),
2286 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2287 gnat_proc, gnat_pool, gnat_node,
2288 ignore_init_type));
2290 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2291 sizes of the object and its template. Allocate the whole thing and
2292 fill in the parts that are known. */
2293 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2295 tree storage_type
2296 = build_unc_object_type_from_ptr (result_type, type,
2297 get_identifier ("ALLOC"), false);
2298 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2299 tree storage_ptr_type = build_pointer_type (storage_type);
2301 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2302 init);
2304 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2305 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2306 size = size_int (-1);
2308 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2309 gnat_proc, gnat_pool, gnat_node);
2310 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2311 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2312 TREE_THIS_NOTRAP (storage_deref) = 1;
2314 /* If there is an initializing expression, then make a constructor for
2315 the entire object including the bounds and copy it into the object.
2316 If there is no initializing expression, just set the bounds. */
2317 if (init)
2319 vec<constructor_elt, va_gc> *v;
2320 vec_alloc (v, 2);
2322 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2323 build_template (template_type, type, init));
2324 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2325 init);
2326 storage_init
2327 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2328 gnat_build_constructor (storage_type, v));
2330 else
2331 storage_init
2332 = build_binary_op (INIT_EXPR, NULL_TREE,
2333 build_component_ref (storage_deref, NULL_TREE,
2334 TYPE_FIELDS (storage_type),
2335 false),
2336 build_template (template_type, type, NULL_TREE));
2338 return build2 (COMPOUND_EXPR, result_type,
2339 storage_init, convert (result_type, storage));
2342 size = TYPE_SIZE_UNIT (type);
2344 /* If we have an initializing expression, see if its size is simpler
2345 than the size from the type. */
2346 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2347 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2348 || CONTAINS_PLACEHOLDER_P (size)))
2349 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2351 /* If the size is still self-referential, reference the initializing
2352 expression, if it is present. If not, this must have been a
2353 call to allocate a library-level object, in which case we use
2354 the maximum size. */
2355 if (CONTAINS_PLACEHOLDER_P (size))
2357 if (!ignore_init_type && init)
2358 size = substitute_placeholder_in_expr (size, init);
2359 else
2360 size = max_size (size, true);
2363 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2364 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2365 size = size_int (-1);
2367 storage = convert (result_type,
2368 build_call_alloc_dealloc (NULL_TREE, size, type,
2369 gnat_proc, gnat_pool,
2370 gnat_node));
2372 /* If we have an initial value, protect the new address, assign the value
2373 and return the address with a COMPOUND_EXPR. */
2374 if (init)
2376 storage = gnat_protect_expr (storage);
2377 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2378 TREE_THIS_NOTRAP (storage_deref) = 1;
2379 storage_init
2380 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2381 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2384 return storage;
2387 /* Indicate that we need to take the address of T and that it therefore
2388 should not be allocated in a register. Returns true if successful. */
2390 bool
2391 gnat_mark_addressable (tree t)
2393 while (true)
2394 switch (TREE_CODE (t))
2396 case ADDR_EXPR:
2397 case COMPONENT_REF:
2398 case ARRAY_REF:
2399 case ARRAY_RANGE_REF:
2400 case REALPART_EXPR:
2401 case IMAGPART_EXPR:
2402 case VIEW_CONVERT_EXPR:
2403 case NON_LVALUE_EXPR:
2404 CASE_CONVERT:
2405 t = TREE_OPERAND (t, 0);
2406 break;
2408 case COMPOUND_EXPR:
2409 t = TREE_OPERAND (t, 1);
2410 break;
2412 case CONSTRUCTOR:
2413 TREE_ADDRESSABLE (t) = 1;
2414 return true;
2416 case VAR_DECL:
2417 case PARM_DECL:
2418 case RESULT_DECL:
2419 TREE_ADDRESSABLE (t) = 1;
2420 return true;
2422 case FUNCTION_DECL:
2423 TREE_ADDRESSABLE (t) = 1;
2424 return true;
2426 case CONST_DECL:
2427 return DECL_CONST_CORRESPONDING_VAR (t)
2428 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2430 default:
2431 return true;
2435 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2436 but we know how to handle our own nodes. */
2438 tree
2439 gnat_save_expr (tree exp)
2441 tree type = TREE_TYPE (exp);
2442 enum tree_code code = TREE_CODE (exp);
2444 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2445 return exp;
2447 if (code == UNCONSTRAINED_ARRAY_REF)
2449 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2450 TREE_READONLY (t) = TYPE_READONLY (type);
2451 return t;
2454 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2455 This may be more efficient, but will also allow us to more easily find
2456 the match for the PLACEHOLDER_EXPR. */
2457 if (code == COMPONENT_REF
2458 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2459 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2460 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2462 return save_expr (exp);
2465 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2466 is optimized under the assumption that EXP's value doesn't change before
2467 its subsequent reuse(s) except through its potential reevaluation. */
2469 tree
2470 gnat_protect_expr (tree exp)
2472 tree type = TREE_TYPE (exp);
2473 enum tree_code code = TREE_CODE (exp);
2475 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2476 return exp;
2478 /* If EXP has no side effects, we theoretically don't need to do anything.
2479 However, we may be recursively passed more and more complex expressions
2480 involving checks which will be reused multiple times and eventually be
2481 unshared for gimplification; in order to avoid a complexity explosion
2482 at that point, we protect any expressions more complex than a simple
2483 arithmetic expression. */
2484 if (!TREE_SIDE_EFFECTS (exp))
2486 tree inner = skip_simple_arithmetic (exp);
2487 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2488 return exp;
2491 /* If this is a conversion, protect what's inside the conversion. */
2492 if (code == NON_LVALUE_EXPR
2493 || CONVERT_EXPR_CODE_P (code)
2494 || code == VIEW_CONVERT_EXPR)
2495 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2497 /* If we're indirectly referencing something, we only need to protect the
2498 address since the data itself can't change in these situations. */
2499 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2501 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2502 TREE_READONLY (t) = TYPE_READONLY (type);
2503 return t;
2506 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2507 This may be more efficient, but will also allow us to more easily find
2508 the match for the PLACEHOLDER_EXPR. */
2509 if (code == COMPONENT_REF
2510 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2511 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2512 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2514 /* If this is a fat pointer or something that can be placed in a register,
2515 just make a SAVE_EXPR. Likewise for a CALL_EXPR as large objects are
2516 returned via invisible reference in most ABIs so the temporary will
2517 directly be filled by the callee. */
2518 if (TYPE_IS_FAT_POINTER_P (type)
2519 || TYPE_MODE (type) != BLKmode
2520 || code == CALL_EXPR)
2521 return save_expr (exp);
2523 /* Otherwise reference, protect the address and dereference. */
2524 return
2525 build_unary_op (INDIRECT_REF, type,
2526 save_expr (build_unary_op (ADDR_EXPR,
2527 build_reference_type (type),
2528 exp)));
2531 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2532 argument to force evaluation of everything. */
2534 static tree
2535 gnat_stabilize_reference_1 (tree e, bool force)
2537 enum tree_code code = TREE_CODE (e);
2538 tree type = TREE_TYPE (e);
2539 tree result;
2541 /* We cannot ignore const expressions because it might be a reference
2542 to a const array but whose index contains side-effects. But we can
2543 ignore things that are actual constant or that already have been
2544 handled by this function. */
2545 if (TREE_CONSTANT (e) || code == SAVE_EXPR)
2546 return e;
2548 switch (TREE_CODE_CLASS (code))
2550 case tcc_exceptional:
2551 case tcc_declaration:
2552 case tcc_comparison:
2553 case tcc_expression:
2554 case tcc_reference:
2555 case tcc_vl_exp:
2556 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2557 fat pointer. This may be more efficient, but will also allow
2558 us to more easily find the match for the PLACEHOLDER_EXPR. */
2559 if (code == COMPONENT_REF
2560 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2561 result
2562 = build3 (code, type,
2563 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2564 TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2565 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2566 so that it will only be evaluated once. */
2567 /* The tcc_reference and tcc_comparison classes could be handled as
2568 below, but it is generally faster to only evaluate them once. */
2569 else if (TREE_SIDE_EFFECTS (e) || force)
2570 return save_expr (e);
2571 else
2572 return e;
2573 break;
2575 case tcc_binary:
2576 /* Recursively stabilize each operand. */
2577 result
2578 = build2 (code, type,
2579 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2580 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), force));
2581 break;
2583 case tcc_unary:
2584 /* Recursively stabilize each operand. */
2585 result
2586 = build1 (code, type,
2587 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force));
2588 break;
2590 default:
2591 gcc_unreachable ();
2594 /* See similar handling in gnat_stabilize_reference. */
2595 TREE_READONLY (result) = TREE_READONLY (e);
2596 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2597 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2599 if (code == INDIRECT_REF
2600 || code == UNCONSTRAINED_ARRAY_REF
2601 || code == ARRAY_REF
2602 || code == ARRAY_RANGE_REF)
2603 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (e);
2605 return result;
2608 /* This is equivalent to stabilize_reference in tree.c but we know how to
2609 handle our own nodes and we take extra arguments. FORCE says whether to
2610 force evaluation of everything. We set SUCCESS to true unless we walk
2611 through something we don't know how to stabilize. */
2613 tree
2614 gnat_stabilize_reference (tree ref, bool force, bool *success)
2616 tree type = TREE_TYPE (ref);
2617 enum tree_code code = TREE_CODE (ref);
2618 tree result;
2620 /* Assume we'll success unless proven otherwise. */
2621 if (success)
2622 *success = true;
2624 switch (code)
2626 case CONST_DECL:
2627 case VAR_DECL:
2628 case PARM_DECL:
2629 case RESULT_DECL:
2630 /* No action is needed in this case. */
2631 return ref;
2633 case ADDR_EXPR:
2634 CASE_CONVERT:
2635 case FLOAT_EXPR:
2636 case FIX_TRUNC_EXPR:
2637 case VIEW_CONVERT_EXPR:
2638 result
2639 = build1 (code, type,
2640 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2641 success));
2642 break;
2644 case INDIRECT_REF:
2645 case UNCONSTRAINED_ARRAY_REF:
2646 result = build1 (code, type,
2647 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 0),
2648 force));
2649 break;
2651 case COMPONENT_REF:
2652 result = build3 (COMPONENT_REF, type,
2653 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2654 success),
2655 TREE_OPERAND (ref, 1), NULL_TREE);
2656 break;
2658 case BIT_FIELD_REF:
2659 result = build3 (BIT_FIELD_REF, type,
2660 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2661 success),
2662 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
2663 break;
2665 case ARRAY_REF:
2666 case ARRAY_RANGE_REF:
2667 result = build4 (code, type,
2668 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2669 success),
2670 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2671 force),
2672 NULL_TREE, NULL_TREE);
2673 break;
2675 case CALL_EXPR:
2676 result = gnat_stabilize_reference_1 (ref, force);
2677 break;
2679 case COMPOUND_EXPR:
2680 result = build2 (COMPOUND_EXPR, type,
2681 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2682 success),
2683 gnat_stabilize_reference (TREE_OPERAND (ref, 1), force,
2684 success));
2685 break;
2687 case CONSTRUCTOR:
2688 /* Constructors with 1 element are used extensively to formally
2689 convert objects to special wrapping types. */
2690 if (TREE_CODE (type) == RECORD_TYPE
2691 && vec_safe_length (CONSTRUCTOR_ELTS (ref)) == 1)
2693 tree index = (*CONSTRUCTOR_ELTS (ref))[0].index;
2694 tree value = (*CONSTRUCTOR_ELTS (ref))[0].value;
2695 result
2696 = build_constructor_single (type, index,
2697 gnat_stabilize_reference_1 (value,
2698 force));
2700 else
2702 if (success)
2703 *success = false;
2704 return ref;
2706 break;
2708 case ERROR_MARK:
2709 ref = error_mark_node;
2711 /* ... fall through to failure ... */
2713 /* If arg isn't a kind of lvalue we recognize, make no change.
2714 Caller should recognize the error for an invalid lvalue. */
2715 default:
2716 if (success)
2717 *success = false;
2718 return ref;
2721 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2722 may not be sustained across some paths, such as the way via build1 for
2723 INDIRECT_REF. We reset those flags here in the general case, which is
2724 consistent with the GCC version of this routine.
2726 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2727 paths introduce side-effects where there was none initially (e.g. if a
2728 SAVE_EXPR is built) and we also want to keep track of that. */
2729 TREE_READONLY (result) = TREE_READONLY (ref);
2730 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2731 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2733 if (code == INDIRECT_REF
2734 || code == UNCONSTRAINED_ARRAY_REF
2735 || code == ARRAY_REF
2736 || code == ARRAY_RANGE_REF)
2737 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2739 return result;
2742 /* If EXPR is an expression that is invariant in the current function, in the
2743 sense that it can be evaluated anywhere in the function and any number of
2744 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2746 tree
2747 gnat_invariant_expr (tree expr)
2749 tree type = TREE_TYPE (expr), t;
2751 expr = remove_conversions (expr, false);
2753 while ((TREE_CODE (expr) == CONST_DECL
2754 || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2755 && decl_function_context (expr) == current_function_decl
2756 && DECL_INITIAL (expr))
2757 expr = remove_conversions (DECL_INITIAL (expr), false);
2759 if (TREE_CONSTANT (expr))
2760 return fold_convert (type, expr);
2762 t = expr;
2764 while (true)
2766 switch (TREE_CODE (t))
2768 case COMPONENT_REF:
2769 if (TREE_OPERAND (t, 2) != NULL_TREE)
2770 return NULL_TREE;
2771 break;
2773 case ARRAY_REF:
2774 case ARRAY_RANGE_REF:
2775 if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
2776 || TREE_OPERAND (t, 2) != NULL_TREE
2777 || TREE_OPERAND (t, 3) != NULL_TREE)
2778 return NULL_TREE;
2779 break;
2781 case BIT_FIELD_REF:
2782 case VIEW_CONVERT_EXPR:
2783 case REALPART_EXPR:
2784 case IMAGPART_EXPR:
2785 break;
2787 case INDIRECT_REF:
2788 if (!TREE_READONLY (t)
2789 || TREE_SIDE_EFFECTS (t)
2790 || !TREE_THIS_NOTRAP (t))
2791 return NULL_TREE;
2792 break;
2794 default:
2795 goto object;
2798 t = TREE_OPERAND (t, 0);
2801 object:
2802 if (TREE_SIDE_EFFECTS (t))
2803 return NULL_TREE;
2805 if (TREE_CODE (t) == CONST_DECL
2806 && (DECL_EXTERNAL (t)
2807 || decl_function_context (t) != current_function_decl))
2808 return fold_convert (type, expr);
2810 if (!TREE_READONLY (t))
2811 return NULL_TREE;
2813 if (TREE_CODE (t) == PARM_DECL)
2814 return fold_convert (type, expr);
2816 if (TREE_CODE (t) == VAR_DECL
2817 && (DECL_EXTERNAL (t)
2818 || decl_function_context (t) != current_function_decl))
2819 return fold_convert (type, expr);
2821 return NULL_TREE;