Implement -mmemcpy-strategy= and -mmemset-strategy= options
[official-gcc.git] / gcc / ada / gcc-interface / utils2.c
blob7f7f6af034ae9cc3886020f518b3b8fa7bf6b5a0
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2013, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "toplev.h"
33 #include "ggc.h"
34 #include "tree-inline.h"
36 #include "ada.h"
37 #include "types.h"
38 #include "atree.h"
39 #include "elists.h"
40 #include "namet.h"
41 #include "nlists.h"
42 #include "snames.h"
43 #include "stringt.h"
44 #include "uintp.h"
45 #include "fe.h"
46 #include "sinfo.h"
47 #include "einfo.h"
48 #include "ada-tree.h"
49 #include "gigi.h"
51 /* Return the base type of TYPE. */
53 tree
54 get_base_type (tree type)
56 if (TREE_CODE (type) == RECORD_TYPE
57 && TYPE_JUSTIFIED_MODULAR_P (type))
58 type = TREE_TYPE (TYPE_FIELDS (type));
60 while (TREE_TYPE (type)
61 && (TREE_CODE (type) == INTEGER_TYPE
62 || TREE_CODE (type) == REAL_TYPE))
63 type = TREE_TYPE (type);
65 return type;
68 /* EXP is a GCC tree representing an address. See if we can find how
69 strictly the object at that address is aligned. Return that alignment
70 in bits. If we don't know anything about the alignment, return 0. */
72 unsigned int
73 known_alignment (tree exp)
75 unsigned int this_alignment;
76 unsigned int lhs, rhs;
78 switch (TREE_CODE (exp))
80 CASE_CONVERT:
81 case VIEW_CONVERT_EXPR:
82 case NON_LVALUE_EXPR:
83 /* Conversions between pointers and integers don't change the alignment
84 of the underlying object. */
85 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
86 break;
88 case COMPOUND_EXPR:
89 /* The value of a COMPOUND_EXPR is that of it's second operand. */
90 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
91 break;
93 case PLUS_EXPR:
94 case MINUS_EXPR:
95 /* If two address are added, the alignment of the result is the
96 minimum of the two alignments. */
97 lhs = known_alignment (TREE_OPERAND (exp, 0));
98 rhs = known_alignment (TREE_OPERAND (exp, 1));
99 this_alignment = MIN (lhs, rhs);
100 break;
102 case POINTER_PLUS_EXPR:
103 lhs = known_alignment (TREE_OPERAND (exp, 0));
104 rhs = known_alignment (TREE_OPERAND (exp, 1));
105 /* If we don't know the alignment of the offset, we assume that
106 of the base. */
107 if (rhs == 0)
108 this_alignment = lhs;
109 else
110 this_alignment = MIN (lhs, rhs);
111 break;
113 case COND_EXPR:
114 /* If there is a choice between two values, use the smallest one. */
115 lhs = known_alignment (TREE_OPERAND (exp, 1));
116 rhs = known_alignment (TREE_OPERAND (exp, 2));
117 this_alignment = MIN (lhs, rhs);
118 break;
120 case INTEGER_CST:
122 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
123 /* The first part of this represents the lowest bit in the constant,
124 but it is originally in bytes, not bits. */
125 this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
127 break;
129 case MULT_EXPR:
130 /* If we know the alignment of just one side, use it. Otherwise,
131 use the product of the alignments. */
132 lhs = known_alignment (TREE_OPERAND (exp, 0));
133 rhs = known_alignment (TREE_OPERAND (exp, 1));
135 if (lhs == 0)
136 this_alignment = rhs;
137 else if (rhs == 0)
138 this_alignment = lhs;
139 else
140 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
141 break;
143 case BIT_AND_EXPR:
144 /* A bit-and expression is as aligned as the maximum alignment of the
145 operands. We typically get here for a complex lhs and a constant
146 negative power of two on the rhs to force an explicit alignment, so
147 don't bother looking at the lhs. */
148 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
149 break;
151 case ADDR_EXPR:
152 this_alignment = expr_align (TREE_OPERAND (exp, 0));
153 break;
155 case CALL_EXPR:
157 tree t = maybe_inline_call_in_expr (exp);
158 if (t)
159 return known_alignment (t);
162 /* Fall through... */
164 default:
165 /* For other pointer expressions, we assume that the pointed-to object
166 is at least as aligned as the pointed-to type. Beware that we can
167 have a dummy type here (e.g. a Taft Amendment type), for which the
168 alignment is meaningless and should be ignored. */
169 if (POINTER_TYPE_P (TREE_TYPE (exp))
170 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
171 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
172 else
173 this_alignment = 0;
174 break;
177 return this_alignment;
180 /* We have a comparison or assignment operation on two types, T1 and T2, which
181 are either both array types or both record types. T1 is assumed to be for
182 the left hand side operand, and T2 for the right hand side. Return the
183 type that both operands should be converted to for the operation, if any.
184 Otherwise return zero. */
186 static tree
187 find_common_type (tree t1, tree t2)
189 /* ??? As of today, various constructs lead to here with types of different
190 sizes even when both constants (e.g. tagged types, packable vs regular
191 component types, padded vs unpadded types, ...). While some of these
192 would better be handled upstream (types should be made consistent before
193 calling into build_binary_op), some others are really expected and we
194 have to be careful. */
196 /* We must avoid writing more than what the target can hold if this is for
197 an assignment and the case of tagged types is handled in build_binary_op
198 so we use the lhs type if it is known to be smaller or of constant size
199 and the rhs type is not, whatever the modes. We also force t1 in case of
200 constant size equality to minimize occurrences of view conversions on the
201 lhs of an assignment, except for the case of record types with a variant
202 part on the lhs but not on the rhs to make the conversion simpler. */
203 if (TREE_CONSTANT (TYPE_SIZE (t1))
204 && (!TREE_CONSTANT (TYPE_SIZE (t2))
205 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
206 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
207 && !(TREE_CODE (t1) == RECORD_TYPE
208 && TREE_CODE (t2) == RECORD_TYPE
209 && get_variant_part (t1) != NULL_TREE
210 && get_variant_part (t2) == NULL_TREE))))
211 return t1;
213 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
214 that we will not have any alignment problems since, if we did, the
215 non-BLKmode type could not have been used. */
216 if (TYPE_MODE (t1) != BLKmode)
217 return t1;
219 /* If the rhs type is of constant size, use it whatever the modes. At
220 this point it is known to be smaller, or of constant size and the
221 lhs type is not. */
222 if (TREE_CONSTANT (TYPE_SIZE (t2)))
223 return t2;
225 /* Otherwise, if the rhs type is non-BLKmode, use it. */
226 if (TYPE_MODE (t2) != BLKmode)
227 return t2;
229 /* In this case, both types have variable size and BLKmode. It's
230 probably best to leave the "type mismatch" because changing it
231 could cause a bad self-referential reference. */
232 return NULL_TREE;
235 /* Return an expression tree representing an equality comparison of A1 and A2,
236 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
238 Two arrays are equal in one of two ways: (1) if both have zero length in
239 some dimension (not necessarily the same dimension) or (2) if the lengths
240 in each dimension are equal and the data is equal. We perform the length
241 tests in as efficient a manner as possible. */
243 static tree
244 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
246 tree result = convert (result_type, boolean_true_node);
247 tree a1_is_null = convert (result_type, boolean_false_node);
248 tree a2_is_null = convert (result_type, boolean_false_node);
249 tree t1 = TREE_TYPE (a1);
250 tree t2 = TREE_TYPE (a2);
251 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
252 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
253 bool length_zero_p = false;
255 /* If either operand has side-effects, they have to be evaluated only once
256 in spite of the multiple references to the operand in the comparison. */
257 if (a1_side_effects_p)
258 a1 = gnat_protect_expr (a1);
260 if (a2_side_effects_p)
261 a2 = gnat_protect_expr (a2);
263 /* Process each dimension separately and compare the lengths. If any
264 dimension has a length known to be zero, set LENGTH_ZERO_P to true
265 in order to suppress the comparison of the data at the end. */
266 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
268 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
269 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
270 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
271 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
272 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
273 size_one_node);
274 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
275 size_one_node);
276 tree comparison, this_a1_is_null, this_a2_is_null;
278 /* If the length of the first array is a constant, swap our operands
279 unless the length of the second array is the constant zero. */
280 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
282 tree tem;
283 bool btem;
285 tem = a1, a1 = a2, a2 = tem;
286 tem = t1, t1 = t2, t2 = tem;
287 tem = lb1, lb1 = lb2, lb2 = tem;
288 tem = ub1, ub1 = ub2, ub2 = tem;
289 tem = length1, length1 = length2, length2 = tem;
290 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
291 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
292 a2_side_effects_p = btem;
295 /* If the length of the second array is the constant zero, we can just
296 use the original stored bounds for the first array and see whether
297 last < first holds. */
298 if (integer_zerop (length2))
300 length_zero_p = true;
302 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
303 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
305 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
306 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
307 if (EXPR_P (comparison))
308 SET_EXPR_LOCATION (comparison, loc);
310 this_a1_is_null = comparison;
311 this_a2_is_null = convert (result_type, boolean_true_node);
314 /* Otherwise, if the length is some other constant value, we know that
315 this dimension in the second array cannot be superflat, so we can
316 just use its length computed from the actual stored bounds. */
317 else if (TREE_CODE (length2) == INTEGER_CST)
319 tree bt;
321 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
322 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
323 /* Note that we know that UB2 and LB2 are constant and hence
324 cannot contain a PLACEHOLDER_EXPR. */
325 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
326 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
327 bt = get_base_type (TREE_TYPE (ub1));
329 comparison
330 = fold_build2_loc (loc, EQ_EXPR, result_type,
331 build_binary_op (MINUS_EXPR, bt, ub1, lb1),
332 build_binary_op (MINUS_EXPR, bt, ub2, lb2));
333 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
334 if (EXPR_P (comparison))
335 SET_EXPR_LOCATION (comparison, loc);
337 this_a1_is_null
338 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
340 this_a2_is_null = convert (result_type, boolean_false_node);
343 /* Otherwise, compare the computed lengths. */
344 else
346 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
347 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
349 comparison
350 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
352 /* If the length expression is of the form (cond ? val : 0), assume
353 that cond is equivalent to (length != 0). That's guaranteed by
354 construction of the array types in gnat_to_gnu_entity. */
355 if (TREE_CODE (length1) == COND_EXPR
356 && integer_zerop (TREE_OPERAND (length1, 2)))
357 this_a1_is_null
358 = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
359 else
360 this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
361 length1, size_zero_node);
363 /* Likewise for the second array. */
364 if (TREE_CODE (length2) == COND_EXPR
365 && integer_zerop (TREE_OPERAND (length2, 2)))
366 this_a2_is_null
367 = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
368 else
369 this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
370 length2, size_zero_node);
373 /* Append expressions for this dimension to the final expressions. */
374 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
375 result, comparison);
377 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
378 this_a1_is_null, a1_is_null);
380 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
381 this_a2_is_null, a2_is_null);
383 t1 = TREE_TYPE (t1);
384 t2 = TREE_TYPE (t2);
387 /* Unless the length of some dimension is known to be zero, compare the
388 data in the array. */
389 if (!length_zero_p)
391 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
392 tree comparison;
394 if (type)
396 a1 = convert (type, a1),
397 a2 = convert (type, a2);
400 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
402 result
403 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
406 /* The result is also true if both sizes are zero. */
407 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
408 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
409 a1_is_null, a2_is_null),
410 result);
412 /* If either operand has side-effects, they have to be evaluated before
413 starting the comparison above since the place they would be otherwise
414 evaluated could be wrong. */
415 if (a1_side_effects_p)
416 result = build2 (COMPOUND_EXPR, result_type, a1, result);
418 if (a2_side_effects_p)
419 result = build2 (COMPOUND_EXPR, result_type, a2, result);
421 return result;
424 /* Return an expression tree representing an equality comparison of P1 and P2,
425 two objects of fat pointer type. The result should be of type RESULT_TYPE.
427 Two fat pointers are equal in one of two ways: (1) if both have a null
428 pointer to the array or (2) if they contain the same couple of pointers.
429 We perform the comparison in as efficient a manner as possible. */
431 static tree
432 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
434 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
435 tree p1_array_is_null, p2_array_is_null;
437 /* If either operand has side-effects, they have to be evaluated only once
438 in spite of the multiple references to the operand in the comparison. */
439 p1 = gnat_protect_expr (p1);
440 p2 = gnat_protect_expr (p2);
442 /* The constant folder doesn't fold fat pointer types so we do it here. */
443 if (TREE_CODE (p1) == CONSTRUCTOR)
444 p1_array = (*CONSTRUCTOR_ELTS (p1))[0].value;
445 else
446 p1_array = build_component_ref (p1, NULL_TREE,
447 TYPE_FIELDS (TREE_TYPE (p1)), true);
449 p1_array_is_null
450 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
451 fold_convert_loc (loc, TREE_TYPE (p1_array),
452 null_pointer_node));
454 if (TREE_CODE (p2) == CONSTRUCTOR)
455 p2_array = (*CONSTRUCTOR_ELTS (p2))[0].value;
456 else
457 p2_array = build_component_ref (p2, NULL_TREE,
458 TYPE_FIELDS (TREE_TYPE (p2)), true);
460 p2_array_is_null
461 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
462 fold_convert_loc (loc, TREE_TYPE (p2_array),
463 null_pointer_node));
465 /* If one of the pointers to the array is null, just compare the other. */
466 if (integer_zerop (p1_array))
467 return p2_array_is_null;
468 else if (integer_zerop (p2_array))
469 return p1_array_is_null;
471 /* Otherwise, do the fully-fledged comparison. */
472 same_array
473 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
475 if (TREE_CODE (p1) == CONSTRUCTOR)
476 p1_bounds = (*CONSTRUCTOR_ELTS (p1))[1].value;
477 else
478 p1_bounds
479 = build_component_ref (p1, NULL_TREE,
480 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))), true);
482 if (TREE_CODE (p2) == CONSTRUCTOR)
483 p2_bounds = (*CONSTRUCTOR_ELTS (p2))[1].value;
484 else
485 p2_bounds
486 = build_component_ref (p2, NULL_TREE,
487 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))), true);
489 same_bounds
490 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
492 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
493 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
494 build_binary_op (TRUTH_ORIF_EXPR, result_type,
495 p1_array_is_null, same_bounds));
498 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
499 type TYPE. We know that TYPE is a modular type with a nonbinary
500 modulus. */
502 static tree
503 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
504 tree rhs)
506 tree modulus = TYPE_MODULUS (type);
507 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
508 unsigned int precision;
509 bool unsignedp = true;
510 tree op_type = type;
511 tree result;
513 /* If this is an addition of a constant, convert it to a subtraction
514 of a constant since we can do that faster. */
515 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
517 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
518 op_code = MINUS_EXPR;
521 /* For the logical operations, we only need PRECISION bits. For
522 addition and subtraction, we need one more and for multiplication we
523 need twice as many. But we never want to make a size smaller than
524 our size. */
525 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
526 needed_precision += 1;
527 else if (op_code == MULT_EXPR)
528 needed_precision *= 2;
530 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
532 /* Unsigned will do for everything but subtraction. */
533 if (op_code == MINUS_EXPR)
534 unsignedp = false;
536 /* If our type is the wrong signedness or isn't wide enough, make a new
537 type and convert both our operands to it. */
538 if (TYPE_PRECISION (op_type) < precision
539 || TYPE_UNSIGNED (op_type) != unsignedp)
541 /* Copy the node so we ensure it can be modified to make it modular. */
542 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
543 modulus = convert (op_type, modulus);
544 SET_TYPE_MODULUS (op_type, modulus);
545 TYPE_MODULAR_P (op_type) = 1;
546 lhs = convert (op_type, lhs);
547 rhs = convert (op_type, rhs);
550 /* Do the operation, then we'll fix it up. */
551 result = fold_build2 (op_code, op_type, lhs, rhs);
553 /* For multiplication, we have no choice but to do a full modulus
554 operation. However, we want to do this in the narrowest
555 possible size. */
556 if (op_code == MULT_EXPR)
558 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
559 modulus = convert (div_type, modulus);
560 SET_TYPE_MODULUS (div_type, modulus);
561 TYPE_MODULAR_P (div_type) = 1;
562 result = convert (op_type,
563 fold_build2 (TRUNC_MOD_EXPR, div_type,
564 convert (div_type, result), modulus));
567 /* For subtraction, add the modulus back if we are negative. */
568 else if (op_code == MINUS_EXPR)
570 result = gnat_protect_expr (result);
571 result = fold_build3 (COND_EXPR, op_type,
572 fold_build2 (LT_EXPR, boolean_type_node, result,
573 convert (op_type, integer_zero_node)),
574 fold_build2 (PLUS_EXPR, op_type, result, modulus),
575 result);
578 /* For the other operations, subtract the modulus if we are >= it. */
579 else
581 result = gnat_protect_expr (result);
582 result = fold_build3 (COND_EXPR, op_type,
583 fold_build2 (GE_EXPR, boolean_type_node,
584 result, modulus),
585 fold_build2 (MINUS_EXPR, op_type,
586 result, modulus),
587 result);
590 return convert (type, result);
593 /* This page contains routines that implement the Ada semantics with regard
594 to atomic objects. They are fully piggybacked on the middle-end support
595 for atomic loads and stores.
597 *** Memory barriers and volatile objects ***
599 We implement the weakened form of the C.6(16) clause that was introduced
600 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
601 implementable without significant performance hits on modern platforms.
603 We also take advantage of the requirements imposed on shared variables by
604 9.10 (conditions for sequential actions) to have non-erroneous execution
605 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
606 volatile updates with regard to sequential actions, i.e. with regard to
607 reads or updates of atomic objects.
609 As such, an update of an atomic object by a task requires that all earlier
610 accesses to volatile objects have completed. Similarly, later accesses to
611 volatile objects cannot be reordered before the update of the atomic object.
612 So, memory barriers both before and after the atomic update are needed.
614 For a read of an atomic object, to avoid seeing writes of volatile objects
615 by a task earlier than by the other tasks, a memory barrier is needed before
616 the atomic read. Finally, to avoid reordering later reads or updates of
617 volatile objects to before the atomic read, a barrier is needed after the
618 atomic read.
620 So, memory barriers are needed before and after atomic reads and updates.
621 And, in order to simplify the implementation, we use full memory barriers
622 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
624 /* Return the size of TYPE, which must be a positive power of 2. */
626 static unsigned int
627 resolve_atomic_size (tree type)
629 unsigned HOST_WIDE_INT size = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
631 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
632 return size;
634 /* We shouldn't reach here without having already detected that the size
635 isn't compatible with an atomic access. */
636 gcc_assert (Serious_Errors_Detected);
638 return 0;
641 /* Build an atomic load for the underlying atomic object in SRC. */
643 tree
644 build_atomic_load (tree src)
646 tree ptr_type
647 = build_pointer_type
648 (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
649 tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
650 tree orig_src = src;
651 tree type = TREE_TYPE (src);
652 tree t, val;
653 unsigned int size;
654 int fncode;
656 src = remove_conversions (src, false);
657 size = resolve_atomic_size (TREE_TYPE (src));
658 if (size == 0)
659 return orig_src;
661 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
662 t = builtin_decl_implicit ((enum built_in_function) fncode);
664 src = build_unary_op (ADDR_EXPR, ptr_type, src);
665 val = build_call_expr (t, 2, src, mem_model);
667 return unchecked_convert (type, val, true);
670 /* Build an atomic store from SRC to the underlying atomic object in DEST. */
672 tree
673 build_atomic_store (tree dest, tree src)
675 tree ptr_type
676 = build_pointer_type
677 (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
678 tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
679 tree orig_dest = dest;
680 tree t, int_type;
681 unsigned int size;
682 int fncode;
684 dest = remove_conversions (dest, false);
685 size = resolve_atomic_size (TREE_TYPE (dest));
686 if (size == 0)
687 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
689 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
690 t = builtin_decl_implicit ((enum built_in_function) fncode);
691 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
693 dest = build_unary_op (ADDR_EXPR, ptr_type, dest);
694 src = unchecked_convert (int_type, src, true);
696 return build_call_expr (t, 3, dest, src, mem_model);
699 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
700 desired for the result. Usually the operation is to be performed
701 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
702 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
703 case the type to be used will be derived from the operands.
705 This function is very much unlike the ones for C and C++ since we
706 have already done any type conversion and matching required. All we
707 have to do here is validate the work done by SEM and handle subtypes. */
709 tree
710 build_binary_op (enum tree_code op_code, tree result_type,
711 tree left_operand, tree right_operand)
713 tree left_type = TREE_TYPE (left_operand);
714 tree right_type = TREE_TYPE (right_operand);
715 tree left_base_type = get_base_type (left_type);
716 tree right_base_type = get_base_type (right_type);
717 tree operation_type = result_type;
718 tree best_type = NULL_TREE;
719 tree modulus, result;
720 bool has_side_effects = false;
722 if (operation_type
723 && TREE_CODE (operation_type) == RECORD_TYPE
724 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
725 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
727 if (operation_type
728 && TREE_CODE (operation_type) == INTEGER_TYPE
729 && TYPE_EXTRA_SUBTYPE_P (operation_type))
730 operation_type = get_base_type (operation_type);
732 modulus = (operation_type
733 && TREE_CODE (operation_type) == INTEGER_TYPE
734 && TYPE_MODULAR_P (operation_type)
735 ? TYPE_MODULUS (operation_type) : NULL_TREE);
737 switch (op_code)
739 case INIT_EXPR:
740 case MODIFY_EXPR:
741 #ifdef ENABLE_CHECKING
742 gcc_assert (result_type == NULL_TREE);
743 #endif
744 /* If there were integral or pointer conversions on the LHS, remove
745 them; we'll be putting them back below if needed. Likewise for
746 conversions between array and record types, except for justified
747 modular types. But don't do this if the right operand is not
748 BLKmode (for packed arrays) unless we are not changing the mode. */
749 while ((CONVERT_EXPR_P (left_operand)
750 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
751 && (((INTEGRAL_TYPE_P (left_type)
752 || POINTER_TYPE_P (left_type))
753 && (INTEGRAL_TYPE_P (TREE_TYPE
754 (TREE_OPERAND (left_operand, 0)))
755 || POINTER_TYPE_P (TREE_TYPE
756 (TREE_OPERAND (left_operand, 0)))))
757 || (((TREE_CODE (left_type) == RECORD_TYPE
758 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
759 || TREE_CODE (left_type) == ARRAY_TYPE)
760 && ((TREE_CODE (TREE_TYPE
761 (TREE_OPERAND (left_operand, 0)))
762 == RECORD_TYPE)
763 || (TREE_CODE (TREE_TYPE
764 (TREE_OPERAND (left_operand, 0)))
765 == ARRAY_TYPE))
766 && (TYPE_MODE (right_type) == BLKmode
767 || (TYPE_MODE (left_type)
768 == TYPE_MODE (TREE_TYPE
769 (TREE_OPERAND
770 (left_operand, 0))))))))
772 left_operand = TREE_OPERAND (left_operand, 0);
773 left_type = TREE_TYPE (left_operand);
776 /* If a class-wide type may be involved, force use of the RHS type. */
777 if ((TREE_CODE (right_type) == RECORD_TYPE
778 || TREE_CODE (right_type) == UNION_TYPE)
779 && TYPE_ALIGN_OK (right_type))
780 operation_type = right_type;
782 /* If we are copying between padded objects with compatible types, use
783 the padded view of the objects, this is very likely more efficient.
784 Likewise for a padded object that is assigned a constructor, if we
785 can convert the constructor to the inner type, to avoid putting a
786 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
787 actually copied anything. */
788 else if (TYPE_IS_PADDING_P (left_type)
789 && TREE_CONSTANT (TYPE_SIZE (left_type))
790 && ((TREE_CODE (right_operand) == COMPONENT_REF
791 && TYPE_MAIN_VARIANT (left_type)
792 == TYPE_MAIN_VARIANT
793 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
794 || (TREE_CODE (right_operand) == CONSTRUCTOR
795 && !CONTAINS_PLACEHOLDER_P
796 (DECL_SIZE (TYPE_FIELDS (left_type)))))
797 && !integer_zerop (TYPE_SIZE (right_type)))
799 /* We make an exception for a BLKmode type padding a non-BLKmode
800 inner type and do the conversion of the LHS right away, since
801 unchecked_convert wouldn't do it properly. */
802 if (TYPE_MODE (left_type) == BLKmode
803 && TYPE_MODE (right_type) != BLKmode
804 && TREE_CODE (right_operand) != CONSTRUCTOR)
806 operation_type = right_type;
807 left_operand = convert (operation_type, left_operand);
808 left_type = operation_type;
810 else
811 operation_type = left_type;
814 /* If we have a call to a function that returns an unconstrained type
815 with default discriminant on the RHS, use the RHS type (which is
816 padded) as we cannot compute the size of the actual assignment. */
817 else if (TREE_CODE (right_operand) == CALL_EXPR
818 && TYPE_IS_PADDING_P (right_type)
819 && CONTAINS_PLACEHOLDER_P
820 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (right_type)))))
821 operation_type = right_type;
823 /* Find the best type to use for copying between aggregate types. */
824 else if (((TREE_CODE (left_type) == ARRAY_TYPE
825 && TREE_CODE (right_type) == ARRAY_TYPE)
826 || (TREE_CODE (left_type) == RECORD_TYPE
827 && TREE_CODE (right_type) == RECORD_TYPE))
828 && (best_type = find_common_type (left_type, right_type)))
829 operation_type = best_type;
831 /* Otherwise use the LHS type. */
832 else
833 operation_type = left_type;
835 /* Ensure everything on the LHS is valid. If we have a field reference,
836 strip anything that get_inner_reference can handle. Then remove any
837 conversions between types having the same code and mode. And mark
838 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
839 either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
840 result = left_operand;
841 while (true)
843 tree restype = TREE_TYPE (result);
845 if (TREE_CODE (result) == COMPONENT_REF
846 || TREE_CODE (result) == ARRAY_REF
847 || TREE_CODE (result) == ARRAY_RANGE_REF)
848 while (handled_component_p (result))
849 result = TREE_OPERAND (result, 0);
850 else if (TREE_CODE (result) == REALPART_EXPR
851 || TREE_CODE (result) == IMAGPART_EXPR
852 || (CONVERT_EXPR_P (result)
853 && (((TREE_CODE (restype)
854 == TREE_CODE (TREE_TYPE
855 (TREE_OPERAND (result, 0))))
856 && (TYPE_MODE (TREE_TYPE
857 (TREE_OPERAND (result, 0)))
858 == TYPE_MODE (restype)))
859 || TYPE_ALIGN_OK (restype))))
860 result = TREE_OPERAND (result, 0);
861 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
863 TREE_ADDRESSABLE (result) = 1;
864 result = TREE_OPERAND (result, 0);
866 else
867 break;
870 gcc_assert (TREE_CODE (result) == INDIRECT_REF
871 || TREE_CODE (result) == NULL_EXPR
872 || DECL_P (result));
874 /* Convert the right operand to the operation type unless it is
875 either already of the correct type or if the type involves a
876 placeholder, since the RHS may not have the same record type. */
877 if (operation_type != right_type
878 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
880 right_operand = convert (operation_type, right_operand);
881 right_type = operation_type;
884 /* If the left operand is not of the same type as the operation
885 type, wrap it up in a VIEW_CONVERT_EXPR. */
886 if (left_type != operation_type)
887 left_operand = unchecked_convert (operation_type, left_operand, false);
889 has_side_effects = true;
890 modulus = NULL_TREE;
891 break;
893 case ARRAY_REF:
894 if (!operation_type)
895 operation_type = TREE_TYPE (left_type);
897 /* ... fall through ... */
899 case ARRAY_RANGE_REF:
900 /* First look through conversion between type variants. Note that
901 this changes neither the operation type nor the type domain. */
902 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
903 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
904 == TYPE_MAIN_VARIANT (left_type))
906 left_operand = TREE_OPERAND (left_operand, 0);
907 left_type = TREE_TYPE (left_operand);
910 /* For a range, make sure the element type is consistent. */
911 if (op_code == ARRAY_RANGE_REF
912 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
913 operation_type = build_array_type (TREE_TYPE (left_type),
914 TYPE_DOMAIN (operation_type));
916 /* Then convert the right operand to its base type. This will prevent
917 unneeded sign conversions when sizetype is wider than integer. */
918 right_operand = convert (right_base_type, right_operand);
919 right_operand = convert_to_index_type (right_operand);
920 modulus = NULL_TREE;
921 break;
923 case TRUTH_ANDIF_EXPR:
924 case TRUTH_ORIF_EXPR:
925 case TRUTH_AND_EXPR:
926 case TRUTH_OR_EXPR:
927 case TRUTH_XOR_EXPR:
928 #ifdef ENABLE_CHECKING
929 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
930 #endif
931 operation_type = left_base_type;
932 left_operand = convert (operation_type, left_operand);
933 right_operand = convert (operation_type, right_operand);
934 break;
936 case GE_EXPR:
937 case LE_EXPR:
938 case GT_EXPR:
939 case LT_EXPR:
940 case EQ_EXPR:
941 case NE_EXPR:
942 #ifdef ENABLE_CHECKING
943 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
944 #endif
945 /* If either operand is a NULL_EXPR, just return a new one. */
946 if (TREE_CODE (left_operand) == NULL_EXPR)
947 return build2 (op_code, result_type,
948 build1 (NULL_EXPR, integer_type_node,
949 TREE_OPERAND (left_operand, 0)),
950 integer_zero_node);
952 else if (TREE_CODE (right_operand) == NULL_EXPR)
953 return build2 (op_code, result_type,
954 build1 (NULL_EXPR, integer_type_node,
955 TREE_OPERAND (right_operand, 0)),
956 integer_zero_node);
958 /* If either object is a justified modular types, get the
959 fields from within. */
960 if (TREE_CODE (left_type) == RECORD_TYPE
961 && TYPE_JUSTIFIED_MODULAR_P (left_type))
963 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
964 left_operand);
965 left_type = TREE_TYPE (left_operand);
966 left_base_type = get_base_type (left_type);
969 if (TREE_CODE (right_type) == RECORD_TYPE
970 && TYPE_JUSTIFIED_MODULAR_P (right_type))
972 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
973 right_operand);
974 right_type = TREE_TYPE (right_operand);
975 right_base_type = get_base_type (right_type);
978 /* If both objects are arrays, compare them specially. */
979 if ((TREE_CODE (left_type) == ARRAY_TYPE
980 || (TREE_CODE (left_type) == INTEGER_TYPE
981 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
982 && (TREE_CODE (right_type) == ARRAY_TYPE
983 || (TREE_CODE (right_type) == INTEGER_TYPE
984 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
986 result = compare_arrays (input_location,
987 result_type, left_operand, right_operand);
988 if (op_code == NE_EXPR)
989 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
990 else
991 gcc_assert (op_code == EQ_EXPR);
993 return result;
996 /* Otherwise, the base types must be the same, unless they are both fat
997 pointer types or record types. In the latter case, use the best type
998 and convert both operands to that type. */
999 if (left_base_type != right_base_type)
1001 if (TYPE_IS_FAT_POINTER_P (left_base_type)
1002 && TYPE_IS_FAT_POINTER_P (right_base_type))
1004 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1005 == TYPE_MAIN_VARIANT (right_base_type));
1006 best_type = left_base_type;
1009 else if (TREE_CODE (left_base_type) == RECORD_TYPE
1010 && TREE_CODE (right_base_type) == RECORD_TYPE)
1012 /* The only way this is permitted is if both types have the same
1013 name. In that case, one of them must not be self-referential.
1014 Use it as the best type. Even better with a fixed size. */
1015 gcc_assert (TYPE_NAME (left_base_type)
1016 && TYPE_NAME (left_base_type)
1017 == TYPE_NAME (right_base_type));
1019 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1020 best_type = left_base_type;
1021 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1022 best_type = right_base_type;
1023 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1024 best_type = left_base_type;
1025 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1026 best_type = right_base_type;
1027 else
1028 gcc_unreachable ();
1031 else
1032 gcc_unreachable ();
1034 left_operand = convert (best_type, left_operand);
1035 right_operand = convert (best_type, right_operand);
1037 else
1039 left_operand = convert (left_base_type, left_operand);
1040 right_operand = convert (right_base_type, right_operand);
1043 /* If both objects are fat pointers, compare them specially. */
1044 if (TYPE_IS_FAT_POINTER_P (left_base_type))
1046 result
1047 = compare_fat_pointers (input_location,
1048 result_type, left_operand, right_operand);
1049 if (op_code == NE_EXPR)
1050 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1051 else
1052 gcc_assert (op_code == EQ_EXPR);
1054 return result;
1057 modulus = NULL_TREE;
1058 break;
1060 case LSHIFT_EXPR:
1061 case RSHIFT_EXPR:
1062 case LROTATE_EXPR:
1063 case RROTATE_EXPR:
1064 /* The RHS of a shift can be any type. Also, ignore any modulus
1065 (we used to abort, but this is needed for unchecked conversion
1066 to modular types). Otherwise, processing is the same as normal. */
1067 gcc_assert (operation_type == left_base_type);
1068 modulus = NULL_TREE;
1069 left_operand = convert (operation_type, left_operand);
1070 break;
1072 case BIT_AND_EXPR:
1073 case BIT_IOR_EXPR:
1074 case BIT_XOR_EXPR:
1075 /* For binary modulus, if the inputs are in range, so are the
1076 outputs. */
1077 if (modulus && integer_pow2p (modulus))
1078 modulus = NULL_TREE;
1079 goto common;
1081 case COMPLEX_EXPR:
1082 gcc_assert (TREE_TYPE (result_type) == left_base_type
1083 && TREE_TYPE (result_type) == right_base_type);
1084 left_operand = convert (left_base_type, left_operand);
1085 right_operand = convert (right_base_type, right_operand);
1086 break;
1088 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1089 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1090 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1091 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1092 /* These always produce results lower than either operand. */
1093 modulus = NULL_TREE;
1094 goto common;
1096 case POINTER_PLUS_EXPR:
1097 gcc_assert (operation_type == left_base_type
1098 && sizetype == right_base_type);
1099 left_operand = convert (operation_type, left_operand);
1100 right_operand = convert (sizetype, right_operand);
1101 break;
1103 case PLUS_NOMOD_EXPR:
1104 case MINUS_NOMOD_EXPR:
1105 if (op_code == PLUS_NOMOD_EXPR)
1106 op_code = PLUS_EXPR;
1107 else
1108 op_code = MINUS_EXPR;
1109 modulus = NULL_TREE;
1111 /* ... fall through ... */
1113 case PLUS_EXPR:
1114 case MINUS_EXPR:
1115 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1116 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1117 these types but can generate addition/subtraction for Succ/Pred. */
1118 if (operation_type
1119 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1120 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1121 operation_type = left_base_type = right_base_type
1122 = gnat_type_for_mode (TYPE_MODE (operation_type),
1123 TYPE_UNSIGNED (operation_type));
1125 /* ... fall through ... */
1127 default:
1128 common:
1129 /* The result type should be the same as the base types of the
1130 both operands (and they should be the same). Convert
1131 everything to the result type. */
1133 gcc_assert (operation_type == left_base_type
1134 && left_base_type == right_base_type);
1135 left_operand = convert (operation_type, left_operand);
1136 right_operand = convert (operation_type, right_operand);
1139 if (modulus && !integer_pow2p (modulus))
1141 result = nonbinary_modular_operation (op_code, operation_type,
1142 left_operand, right_operand);
1143 modulus = NULL_TREE;
1145 /* If either operand is a NULL_EXPR, just return a new one. */
1146 else if (TREE_CODE (left_operand) == NULL_EXPR)
1147 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1148 else if (TREE_CODE (right_operand) == NULL_EXPR)
1149 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1150 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1151 result = fold (build4 (op_code, operation_type, left_operand,
1152 right_operand, NULL_TREE, NULL_TREE));
1153 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1154 result = build2 (op_code, void_type_node, left_operand, right_operand);
1155 else
1156 result
1157 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1159 if (TREE_CONSTANT (result))
1161 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1163 TREE_THIS_NOTRAP (result) = 1;
1164 if (TYPE_VOLATILE (operation_type))
1165 TREE_THIS_VOLATILE (result) = 1;
1167 else
1168 TREE_CONSTANT (result)
1169 |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1171 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1173 /* If we are working with modular types, perform the MOD operation
1174 if something above hasn't eliminated the need for it. */
1175 if (modulus)
1176 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1177 convert (operation_type, modulus));
1179 if (result_type && result_type != operation_type)
1180 result = convert (result_type, result);
1182 return result;
1185 /* Similar, but for unary operations. */
1187 tree
1188 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1190 tree type = TREE_TYPE (operand);
1191 tree base_type = get_base_type (type);
1192 tree operation_type = result_type;
1193 tree result;
1195 if (operation_type
1196 && TREE_CODE (operation_type) == RECORD_TYPE
1197 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1198 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1200 if (operation_type
1201 && TREE_CODE (operation_type) == INTEGER_TYPE
1202 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1203 operation_type = get_base_type (operation_type);
1205 switch (op_code)
1207 case REALPART_EXPR:
1208 case IMAGPART_EXPR:
1209 if (!operation_type)
1210 result_type = operation_type = TREE_TYPE (type);
1211 else
1212 gcc_assert (result_type == TREE_TYPE (type));
1214 result = fold_build1 (op_code, operation_type, operand);
1215 break;
1217 case TRUTH_NOT_EXPR:
1218 #ifdef ENABLE_CHECKING
1219 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1220 #endif
1221 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1222 /* When not optimizing, fold the result as invert_truthvalue_loc
1223 doesn't fold the result of comparisons. This is intended to undo
1224 the trick used for boolean rvalues in gnat_to_gnu. */
1225 if (!optimize)
1226 result = fold (result);
1227 break;
1229 case ATTR_ADDR_EXPR:
1230 case ADDR_EXPR:
1231 switch (TREE_CODE (operand))
1233 case INDIRECT_REF:
1234 case UNCONSTRAINED_ARRAY_REF:
1235 result = TREE_OPERAND (operand, 0);
1237 /* Make sure the type here is a pointer, not a reference.
1238 GCC wants pointer types for function addresses. */
1239 if (!result_type)
1240 result_type = build_pointer_type (type);
1242 /* If the underlying object can alias everything, propagate the
1243 property since we are effectively retrieving the object. */
1244 if (POINTER_TYPE_P (TREE_TYPE (result))
1245 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1247 if (TREE_CODE (result_type) == POINTER_TYPE
1248 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1249 result_type
1250 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1251 TYPE_MODE (result_type),
1252 true);
1253 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1254 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1255 result_type
1256 = build_reference_type_for_mode (TREE_TYPE (result_type),
1257 TYPE_MODE (result_type),
1258 true);
1260 break;
1262 case NULL_EXPR:
1263 result = operand;
1264 TREE_TYPE (result) = type = build_pointer_type (type);
1265 break;
1267 case COMPOUND_EXPR:
1268 /* Fold a compound expression if it has unconstrained array type
1269 since the middle-end cannot handle it. But we don't it in the
1270 general case because it may introduce aliasing issues if the
1271 first operand is an indirect assignment and the second operand
1272 the corresponding address, e.g. for an allocator. */
1273 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1275 result = build_unary_op (ADDR_EXPR, result_type,
1276 TREE_OPERAND (operand, 1));
1277 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1278 TREE_OPERAND (operand, 0), result);
1279 break;
1281 goto common;
1283 case ARRAY_REF:
1284 case ARRAY_RANGE_REF:
1285 case COMPONENT_REF:
1286 case BIT_FIELD_REF:
1287 /* If this is for 'Address, find the address of the prefix and add
1288 the offset to the field. Otherwise, do this the normal way. */
1289 if (op_code == ATTR_ADDR_EXPR)
1291 HOST_WIDE_INT bitsize;
1292 HOST_WIDE_INT bitpos;
1293 tree offset, inner;
1294 enum machine_mode mode;
1295 int unsignedp, volatilep;
1297 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1298 &mode, &unsignedp, &volatilep,
1299 false);
1301 /* If INNER is a padding type whose field has a self-referential
1302 size, convert to that inner type. We know the offset is zero
1303 and we need to have that type visible. */
1304 if (TYPE_IS_PADDING_P (TREE_TYPE (inner))
1305 && CONTAINS_PLACEHOLDER_P
1306 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1307 (TREE_TYPE (inner))))))
1308 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1309 inner);
1311 /* Compute the offset as a byte offset from INNER. */
1312 if (!offset)
1313 offset = size_zero_node;
1315 offset = size_binop (PLUS_EXPR, offset,
1316 size_int (bitpos / BITS_PER_UNIT));
1318 /* Take the address of INNER, convert the offset to void *, and
1319 add then. It will later be converted to the desired result
1320 type, if any. */
1321 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1322 inner = convert (ptr_void_type_node, inner);
1323 result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1324 inner, offset);
1325 result = convert (build_pointer_type (TREE_TYPE (operand)),
1326 result);
1327 break;
1329 goto common;
1331 case CONSTRUCTOR:
1332 /* If this is just a constructor for a padded record, we can
1333 just take the address of the single field and convert it to
1334 a pointer to our type. */
1335 if (TYPE_IS_PADDING_P (type))
1337 result = (*CONSTRUCTOR_ELTS (operand))[0].value;
1338 result = convert (build_pointer_type (TREE_TYPE (operand)),
1339 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1340 break;
1343 goto common;
1345 case NOP_EXPR:
1346 if (AGGREGATE_TYPE_P (type)
1347 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1348 return build_unary_op (ADDR_EXPR, result_type,
1349 TREE_OPERAND (operand, 0));
1351 /* ... fallthru ... */
1353 case VIEW_CONVERT_EXPR:
1354 /* If this just a variant conversion or if the conversion doesn't
1355 change the mode, get the result type from this type and go down.
1356 This is needed for conversions of CONST_DECLs, to eventually get
1357 to the address of their CORRESPONDING_VARs. */
1358 if ((TYPE_MAIN_VARIANT (type)
1359 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1360 || (TYPE_MODE (type) != BLKmode
1361 && (TYPE_MODE (type)
1362 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1363 return build_unary_op (ADDR_EXPR,
1364 (result_type ? result_type
1365 : build_pointer_type (type)),
1366 TREE_OPERAND (operand, 0));
1367 goto common;
1369 case CONST_DECL:
1370 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1372 /* ... fall through ... */
1374 default:
1375 common:
1377 /* If we are taking the address of a padded record whose field
1378 contains a template, take the address of the field. */
1379 if (TYPE_IS_PADDING_P (type)
1380 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1381 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1383 type = TREE_TYPE (TYPE_FIELDS (type));
1384 operand = convert (type, operand);
1387 gnat_mark_addressable (operand);
1388 result = build_fold_addr_expr (operand);
1391 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1392 break;
1394 case INDIRECT_REF:
1396 tree t = remove_conversions (operand, false);
1397 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1399 /* If TYPE is a thin pointer, either first retrieve the base if this
1400 is an expression with an offset built for the initialization of an
1401 object with an unconstrained nominal subtype, or else convert to
1402 the fat pointer. */
1403 if (TYPE_IS_THIN_POINTER_P (type))
1405 tree rec_type = TREE_TYPE (type);
1407 if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1408 && TREE_OPERAND (operand, 1)
1409 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1410 && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1412 operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1413 type = TREE_TYPE (operand);
1415 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1417 operand
1418 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1419 operand);
1420 type = TREE_TYPE (operand);
1424 /* If we want to refer to an unconstrained array, use the appropriate
1425 expression. But this will never survive down to the back-end. */
1426 if (TYPE_IS_FAT_POINTER_P (type))
1428 result = build1 (UNCONSTRAINED_ARRAY_REF,
1429 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1430 TREE_READONLY (result)
1431 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1434 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1435 else if (TREE_CODE (operand) == ADDR_EXPR)
1436 result = TREE_OPERAND (operand, 0);
1438 /* Otherwise, build and fold the indirect reference. */
1439 else
1441 result = build_fold_indirect_ref (operand);
1442 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1445 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1447 TREE_SIDE_EFFECTS (result) = 1;
1448 if (TREE_CODE (result) == INDIRECT_REF)
1449 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1452 if ((TREE_CODE (result) == INDIRECT_REF
1453 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1454 && can_never_be_null)
1455 TREE_THIS_NOTRAP (result) = 1;
1457 break;
1460 case NEGATE_EXPR:
1461 case BIT_NOT_EXPR:
1463 tree modulus = ((operation_type
1464 && TREE_CODE (operation_type) == INTEGER_TYPE
1465 && TYPE_MODULAR_P (operation_type))
1466 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1467 int mod_pow2 = modulus && integer_pow2p (modulus);
1469 /* If this is a modular type, there are various possibilities
1470 depending on the operation and whether the modulus is a
1471 power of two or not. */
1473 if (modulus)
1475 gcc_assert (operation_type == base_type);
1476 operand = convert (operation_type, operand);
1478 /* The fastest in the negate case for binary modulus is
1479 the straightforward code; the TRUNC_MOD_EXPR below
1480 is an AND operation. */
1481 if (op_code == NEGATE_EXPR && mod_pow2)
1482 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1483 fold_build1 (NEGATE_EXPR, operation_type,
1484 operand),
1485 modulus);
1487 /* For nonbinary negate case, return zero for zero operand,
1488 else return the modulus minus the operand. If the modulus
1489 is a power of two minus one, we can do the subtraction
1490 as an XOR since it is equivalent and faster on most machines. */
1491 else if (op_code == NEGATE_EXPR && !mod_pow2)
1493 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1494 modulus,
1495 convert (operation_type,
1496 integer_one_node))))
1497 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1498 operand, modulus);
1499 else
1500 result = fold_build2 (MINUS_EXPR, operation_type,
1501 modulus, operand);
1503 result = fold_build3 (COND_EXPR, operation_type,
1504 fold_build2 (NE_EXPR,
1505 boolean_type_node,
1506 operand,
1507 convert
1508 (operation_type,
1509 integer_zero_node)),
1510 result, operand);
1512 else
1514 /* For the NOT cases, we need a constant equal to
1515 the modulus minus one. For a binary modulus, we
1516 XOR against the constant and subtract the operand from
1517 that constant for nonbinary modulus. */
1519 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1520 convert (operation_type,
1521 integer_one_node));
1523 if (mod_pow2)
1524 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1525 operand, cnst);
1526 else
1527 result = fold_build2 (MINUS_EXPR, operation_type,
1528 cnst, operand);
1531 break;
1535 /* ... fall through ... */
1537 default:
1538 gcc_assert (operation_type == base_type);
1539 result = fold_build1 (op_code, operation_type,
1540 convert (operation_type, operand));
1543 if (result_type && TREE_TYPE (result) != result_type)
1544 result = convert (result_type, result);
1546 return result;
1549 /* Similar, but for COND_EXPR. */
1551 tree
1552 build_cond_expr (tree result_type, tree condition_operand,
1553 tree true_operand, tree false_operand)
1555 bool addr_p = false;
1556 tree result;
1558 /* The front-end verified that result, true and false operands have
1559 same base type. Convert everything to the result type. */
1560 true_operand = convert (result_type, true_operand);
1561 false_operand = convert (result_type, false_operand);
1563 /* If the result type is unconstrained, take the address of the operands and
1564 then dereference the result. Likewise if the result type is passed by
1565 reference, because creating a temporary of this type is not allowed. */
1566 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1567 || TYPE_IS_BY_REFERENCE_P (result_type)
1568 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1570 result_type = build_pointer_type (result_type);
1571 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1572 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1573 addr_p = true;
1576 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1577 true_operand, false_operand);
1579 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1580 in both arms, make sure it gets evaluated by moving it ahead of the
1581 conditional expression. This is necessary because it is evaluated
1582 in only one place at run time and would otherwise be uninitialized
1583 in one of the arms. */
1584 true_operand = skip_simple_arithmetic (true_operand);
1585 false_operand = skip_simple_arithmetic (false_operand);
1587 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1588 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1590 if (addr_p)
1591 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1593 return result;
1596 /* Similar, but for COMPOUND_EXPR. */
1598 tree
1599 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1601 bool addr_p = false;
1602 tree result;
1604 /* If the result type is unconstrained, take the address of the operand and
1605 then dereference the result. Likewise if the result type is passed by
1606 reference, but this is natively handled in the gimplifier. */
1607 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1608 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1610 result_type = build_pointer_type (result_type);
1611 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1612 addr_p = true;
1615 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1616 expr_operand);
1618 if (addr_p)
1619 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1621 return result;
1624 /* Conveniently construct a function call expression. FNDECL names the
1625 function to be called, N is the number of arguments, and the "..."
1626 parameters are the argument expressions. Unlike build_call_expr
1627 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1629 tree
1630 build_call_n_expr (tree fndecl, int n, ...)
1632 va_list ap;
1633 tree fntype = TREE_TYPE (fndecl);
1634 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1636 va_start (ap, n);
1637 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1638 va_end (ap);
1639 return fn;
1642 /* Call a function that raises an exception and pass the line number and file
1643 name, if requested. MSG says which exception function to call.
1645 GNAT_NODE is the gnat node conveying the source location for which the
1646 error should be signaled, or Empty in which case the error is signaled on
1647 the current ref_file_name/input_line.
1649 KIND says which kind of exception this is for
1650 (N_Raise_{Constraint,Storage,Program}_Error). */
1652 tree
1653 build_call_raise (int msg, Node_Id gnat_node, char kind)
1655 tree fndecl = gnat_raise_decls[msg];
1656 tree label = get_exception_label (kind);
1657 tree filename;
1658 int line_number;
1659 const char *str;
1660 int len;
1662 /* If this is to be done as a goto, handle that case. */
1663 if (label)
1665 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1666 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1668 /* If Local_Raise is present, generate
1669 Local_Raise (exception'Identity); */
1670 if (Present (local_raise))
1672 tree gnu_local_raise
1673 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1674 tree gnu_exception_entity
1675 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1676 tree gnu_call
1677 = build_call_n_expr (gnu_local_raise, 1,
1678 build_unary_op (ADDR_EXPR, NULL_TREE,
1679 gnu_exception_entity));
1681 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1682 gnu_call, gnu_result);}
1684 return gnu_result;
1688 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1689 ? ""
1690 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1691 ? IDENTIFIER_POINTER
1692 (get_identifier (Get_Name_String
1693 (Debug_Source_Name
1694 (Get_Source_File_Index (Sloc (gnat_node))))))
1695 : ref_filename;
1697 len = strlen (str);
1698 filename = build_string (len, str);
1699 line_number
1700 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1701 ? Get_Logical_Line_Number (Sloc(gnat_node)) : input_line;
1703 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1704 build_index_type (size_int (len)));
1706 return
1707 build_call_n_expr (fndecl, 2,
1708 build1 (ADDR_EXPR,
1709 build_pointer_type (unsigned_char_type_node),
1710 filename),
1711 build_int_cst (NULL_TREE, line_number));
1714 /* Similar to build_call_raise, for an index or range check exception as
1715 determined by MSG, with extra information generated of the form
1716 "INDEX out of range FIRST..LAST". */
1718 tree
1719 build_call_raise_range (int msg, Node_Id gnat_node,
1720 tree index, tree first, tree last)
1722 tree fndecl = gnat_raise_decls_ext[msg];
1723 tree filename;
1724 int line_number, column_number;
1725 const char *str;
1726 int len;
1729 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1730 ? ""
1731 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1732 ? IDENTIFIER_POINTER
1733 (get_identifier (Get_Name_String
1734 (Debug_Source_Name
1735 (Get_Source_File_Index (Sloc (gnat_node))))))
1736 : ref_filename;
1738 len = strlen (str);
1739 filename = build_string (len, str);
1740 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1742 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1743 column_number = Get_Column_Number (Sloc (gnat_node));
1745 else
1747 line_number = input_line;
1748 column_number = 0;
1751 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1752 build_index_type (size_int (len)));
1754 return
1755 build_call_n_expr (fndecl, 6,
1756 build1 (ADDR_EXPR,
1757 build_pointer_type (unsigned_char_type_node),
1758 filename),
1759 build_int_cst (NULL_TREE, line_number),
1760 build_int_cst (NULL_TREE, column_number),
1761 convert (integer_type_node, index),
1762 convert (integer_type_node, first),
1763 convert (integer_type_node, last));
1766 /* Similar to build_call_raise, with extra information about the column
1767 where the check failed. */
1769 tree
1770 build_call_raise_column (int msg, Node_Id gnat_node)
1772 tree fndecl = gnat_raise_decls_ext[msg];
1773 tree filename;
1774 int line_number, column_number;
1775 const char *str;
1776 int len;
1779 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1780 ? ""
1781 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1782 ? IDENTIFIER_POINTER
1783 (get_identifier (Get_Name_String
1784 (Debug_Source_Name
1785 (Get_Source_File_Index (Sloc (gnat_node))))))
1786 : ref_filename;
1788 len = strlen (str);
1789 filename = build_string (len, str);
1790 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1792 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1793 column_number = Get_Column_Number (Sloc (gnat_node));
1795 else
1797 line_number = input_line;
1798 column_number = 0;
1801 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1802 build_index_type (size_int (len)));
1804 return
1805 build_call_n_expr (fndecl, 3,
1806 build1 (ADDR_EXPR,
1807 build_pointer_type (unsigned_char_type_node),
1808 filename),
1809 build_int_cst (NULL_TREE, line_number),
1810 build_int_cst (NULL_TREE, column_number));
1813 /* qsort comparer for the bit positions of two constructor elements
1814 for record components. */
1816 static int
1817 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1819 const constructor_elt * const elmt1 = (const constructor_elt * const) rt1;
1820 const constructor_elt * const elmt2 = (const constructor_elt * const) rt2;
1821 const_tree const field1 = elmt1->index;
1822 const_tree const field2 = elmt2->index;
1823 const int ret
1824 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1826 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1829 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1831 tree
1832 gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
1834 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1835 bool side_effects = false;
1836 tree result, obj, val;
1837 unsigned int n_elmts;
1839 /* Scan the elements to see if they are all constant or if any has side
1840 effects, to let us set global flags on the resulting constructor. Count
1841 the elements along the way for possible sorting purposes below. */
1842 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1844 /* The predicate must be in keeping with output_constructor. */
1845 if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1846 || (TREE_CODE (type) == RECORD_TYPE
1847 && CONSTRUCTOR_BITFIELD_P (obj)
1848 && !initializer_constant_valid_for_bitfield_p (val))
1849 || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1850 allconstant = false;
1852 if (TREE_SIDE_EFFECTS (val))
1853 side_effects = true;
1856 /* For record types with constant components only, sort field list
1857 by increasing bit position. This is necessary to ensure the
1858 constructor can be output as static data. */
1859 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1860 v->qsort (compare_elmt_bitpos);
1862 result = build_constructor (type, v);
1863 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1864 TREE_SIDE_EFFECTS (result) = side_effects;
1865 TREE_READONLY (result) = TYPE_READONLY (type) || allconstant;
1866 return result;
1869 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1870 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1871 for the field. Don't fold the result if NO_FOLD_P is true.
1873 We also handle the fact that we might have been passed a pointer to the
1874 actual record and know how to look for fields in variant parts. */
1876 static tree
1877 build_simple_component_ref (tree record_variable, tree component,
1878 tree field, bool no_fold_p)
1880 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1881 tree ref, inner_variable;
1883 gcc_assert (RECORD_OR_UNION_TYPE_P (record_type)
1884 && COMPLETE_TYPE_P (record_type)
1885 && (component == NULL_TREE) != (field == NULL_TREE));
1887 /* If no field was specified, look for a field with the specified name in
1888 the current record only. */
1889 if (!field)
1890 for (field = TYPE_FIELDS (record_type);
1891 field;
1892 field = DECL_CHAIN (field))
1893 if (DECL_NAME (field) == component)
1894 break;
1896 if (!field)
1897 return NULL_TREE;
1899 /* If this field is not in the specified record, see if we can find a field
1900 in the specified record whose original field is the same as this one. */
1901 if (DECL_CONTEXT (field) != record_type)
1903 tree new_field;
1905 /* First loop through normal components. */
1906 for (new_field = TYPE_FIELDS (record_type);
1907 new_field;
1908 new_field = DECL_CHAIN (new_field))
1909 if (SAME_FIELD_P (field, new_field))
1910 break;
1912 /* Next, see if we're looking for an inherited component in an extension.
1913 If so, look through the extension directly, but not if the type contains
1914 a placeholder, as it might be needed for a later substitution. */
1915 if (!new_field
1916 && TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1917 && TYPE_ALIGN_OK (record_type)
1918 && !type_contains_placeholder_p (record_type)
1919 && TREE_CODE (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1920 == RECORD_TYPE
1921 && TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (record_variable, 0))))
1923 ref = build_simple_component_ref (TREE_OPERAND (record_variable, 0),
1924 NULL_TREE, field, no_fold_p);
1925 if (ref)
1926 return ref;
1929 /* Next, loop through DECL_INTERNAL_P components if we haven't found the
1930 component in the first search. Doing this search in two steps is
1931 required to avoid hidden homonymous fields in the _Parent field. */
1932 if (!new_field)
1933 for (new_field = TYPE_FIELDS (record_type);
1934 new_field;
1935 new_field = DECL_CHAIN (new_field))
1936 if (DECL_INTERNAL_P (new_field))
1938 tree field_ref
1939 = build_simple_component_ref (record_variable,
1940 NULL_TREE, new_field, no_fold_p);
1941 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1942 no_fold_p);
1943 if (ref)
1944 return ref;
1947 field = new_field;
1950 if (!field)
1951 return NULL_TREE;
1953 /* If the field's offset has overflowed, do not try to access it, as doing
1954 so may trigger sanity checks deeper in the back-end. Note that we don't
1955 need to warn since this will be done on trying to declare the object. */
1956 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1957 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1958 return NULL_TREE;
1960 /* Look through conversion between type variants. This is transparent as
1961 far as the field is concerned. */
1962 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1963 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1964 == record_type)
1965 inner_variable = TREE_OPERAND (record_variable, 0);
1966 else
1967 inner_variable = record_variable;
1969 ref = build3 (COMPONENT_REF, TREE_TYPE (field), inner_variable, field,
1970 NULL_TREE);
1972 if (TREE_READONLY (record_variable)
1973 || TREE_READONLY (field)
1974 || TYPE_READONLY (record_type))
1975 TREE_READONLY (ref) = 1;
1977 if (TREE_THIS_VOLATILE (record_variable)
1978 || TREE_THIS_VOLATILE (field)
1979 || TYPE_VOLATILE (record_type))
1980 TREE_THIS_VOLATILE (ref) = 1;
1982 if (no_fold_p)
1983 return ref;
1985 /* The generic folder may punt in this case because the inner array type
1986 can be self-referential, but folding is in fact not problematic. */
1987 if (TREE_CODE (record_variable) == CONSTRUCTOR
1988 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record_variable)))
1990 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (record_variable);
1991 unsigned HOST_WIDE_INT idx;
1992 tree index, value;
1993 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
1994 if (index == field)
1995 return value;
1996 return ref;
1999 return fold (ref);
2002 /* Like build_simple_component_ref, except that we give an error if the
2003 reference could not be found. */
2005 tree
2006 build_component_ref (tree record_variable, tree component,
2007 tree field, bool no_fold_p)
2009 tree ref = build_simple_component_ref (record_variable, component, field,
2010 no_fold_p);
2012 if (ref)
2013 return ref;
2015 /* If FIELD was specified, assume this is an invalid user field so raise
2016 Constraint_Error. Otherwise, we have no type to return so abort. */
2017 gcc_assert (field);
2018 return build1 (NULL_EXPR, TREE_TYPE (field),
2019 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2020 N_Raise_Constraint_Error));
2023 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2024 identically. Process the case where a GNAT_PROC to call is provided. */
2026 static inline tree
2027 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2028 Entity_Id gnat_proc, Entity_Id gnat_pool)
2030 tree gnu_proc = gnat_to_gnu (gnat_proc);
2031 tree gnu_call;
2033 /* A storage pool's underlying type is a record type (for both predefined
2034 storage pools and GNAT simple storage pools). The secondary stack uses
2035 the same mechanism, but its pool object (SS_Pool) is an integer. */
2036 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2038 /* The size is the third parameter; the alignment is the
2039 same type. */
2040 Entity_Id gnat_size_type
2041 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2042 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2044 tree gnu_pool = gnat_to_gnu (gnat_pool);
2045 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2046 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2048 gnu_size = convert (gnu_size_type, gnu_size);
2049 gnu_align = convert (gnu_size_type, gnu_align);
2051 /* The first arg is always the address of the storage pool; next
2052 comes the address of the object, for a deallocator, then the
2053 size and alignment. */
2054 if (gnu_obj)
2055 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2056 gnu_size, gnu_align);
2057 else
2058 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2059 gnu_size, gnu_align);
2062 /* Secondary stack case. */
2063 else
2065 /* The size is the second parameter. */
2066 Entity_Id gnat_size_type
2067 = Etype (Next_Formal (First_Formal (gnat_proc)));
2068 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2070 gnu_size = convert (gnu_size_type, gnu_size);
2072 /* The first arg is the address of the object, for a deallocator,
2073 then the size. */
2074 if (gnu_obj)
2075 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2076 else
2077 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2080 return gnu_call;
2083 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2084 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2085 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2086 latter offers. */
2088 static inline tree
2089 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2091 /* When the DATA_TYPE alignment is stricter than what malloc offers
2092 (super-aligned case), we allocate an "aligning" wrapper type and return
2093 the address of its single data field with the malloc's return value
2094 stored just in front. */
2096 unsigned int data_align = TYPE_ALIGN (data_type);
2097 unsigned int system_allocator_alignment
2098 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2100 tree aligning_type
2101 = ((data_align > system_allocator_alignment)
2102 ? make_aligning_type (data_type, data_align, data_size,
2103 system_allocator_alignment,
2104 POINTER_SIZE / BITS_PER_UNIT,
2105 gnat_node)
2106 : NULL_TREE);
2108 tree size_to_malloc
2109 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2111 tree malloc_ptr;
2113 /* On VMS, if pointers are 64-bit and the allocator size is 32-bit or
2114 Convention C, allocate 32-bit memory. */
2115 if (TARGET_ABI_OPEN_VMS
2116 && POINTER_SIZE == 64
2117 && Nkind (gnat_node) == N_Allocator
2118 && (UI_To_Int (Esize (Etype (gnat_node))) == 32
2119 || Convention (Etype (gnat_node)) == Convention_C))
2120 malloc_ptr = build_call_n_expr (malloc32_decl, 1, size_to_malloc);
2121 else
2122 malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2124 if (aligning_type)
2126 /* Latch malloc's return value and get a pointer to the aligning field
2127 first. */
2128 tree storage_ptr = gnat_protect_expr (malloc_ptr);
2130 tree aligning_record_addr
2131 = convert (build_pointer_type (aligning_type), storage_ptr);
2133 tree aligning_record
2134 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2136 tree aligning_field
2137 = build_component_ref (aligning_record, NULL_TREE,
2138 TYPE_FIELDS (aligning_type), false);
2140 tree aligning_field_addr
2141 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2143 /* Then arrange to store the allocator's return value ahead
2144 and return. */
2145 tree storage_ptr_slot_addr
2146 = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
2147 convert (ptr_void_type_node, aligning_field_addr),
2148 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2149 / BITS_PER_UNIT));
2151 tree storage_ptr_slot
2152 = build_unary_op (INDIRECT_REF, NULL_TREE,
2153 convert (build_pointer_type (ptr_void_type_node),
2154 storage_ptr_slot_addr));
2156 return
2157 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2158 build_binary_op (INIT_EXPR, NULL_TREE,
2159 storage_ptr_slot, storage_ptr),
2160 aligning_field_addr);
2162 else
2163 return malloc_ptr;
2166 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2167 designated by DATA_PTR using the __gnat_free entry point. */
2169 static inline tree
2170 maybe_wrap_free (tree data_ptr, tree data_type)
2172 /* In the regular alignment case, we pass the data pointer straight to free.
2173 In the superaligned case, we need to retrieve the initial allocator
2174 return value, stored in front of the data block at allocation time. */
2176 unsigned int data_align = TYPE_ALIGN (data_type);
2177 unsigned int system_allocator_alignment
2178 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2180 tree free_ptr;
2182 if (data_align > system_allocator_alignment)
2184 /* DATA_FRONT_PTR (void *)
2185 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2186 tree data_front_ptr
2187 = build_binary_op
2188 (POINTER_PLUS_EXPR, ptr_void_type_node,
2189 convert (ptr_void_type_node, data_ptr),
2190 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2192 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2193 free_ptr
2194 = build_unary_op
2195 (INDIRECT_REF, NULL_TREE,
2196 convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
2198 else
2199 free_ptr = data_ptr;
2201 return build_call_n_expr (free_decl, 1, free_ptr);
2204 /* Build a GCC tree to call an allocation or deallocation function.
2205 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2206 generate an allocator.
2208 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2209 object type, used to determine the to-be-honored address alignment.
2210 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2211 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2212 to provide an error location for restriction violation messages. */
2214 tree
2215 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2216 Entity_Id gnat_proc, Entity_Id gnat_pool,
2217 Node_Id gnat_node)
2219 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
2221 /* Explicit proc to call ? This one is assumed to deal with the type
2222 alignment constraints. */
2223 if (Present (gnat_proc))
2224 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2225 gnat_proc, gnat_pool);
2227 /* Otherwise, object to "free" or "malloc" with possible special processing
2228 for alignments stricter than what the default allocator honors. */
2229 else if (gnu_obj)
2230 return maybe_wrap_free (gnu_obj, gnu_type);
2231 else
2233 /* Assert that we no longer can be called with this special pool. */
2234 gcc_assert (gnat_pool != -1);
2236 /* Check that we aren't violating the associated restriction. */
2237 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2238 Check_No_Implicit_Heap_Alloc (gnat_node);
2240 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2244 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2245 initial value is INIT, if INIT is nonzero. Convert the expression to
2246 RESULT_TYPE, which must be some pointer type, and return the result.
2248 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2249 the storage pool to use. GNAT_NODE is used to provide an error
2250 location for restriction violation messages. If IGNORE_INIT_TYPE is
2251 true, ignore the type of INIT for the purpose of determining the size;
2252 this will cause the maximum size to be allocated if TYPE is of
2253 self-referential size. */
2255 tree
2256 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2257 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2259 tree size, storage, storage_deref, storage_init;
2261 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2262 if (init && TREE_CODE (init) == NULL_EXPR)
2263 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2265 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2266 else if (init && TREE_CODE (init) == COND_EXPR)
2267 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2268 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2269 gnat_proc, gnat_pool, gnat_node,
2270 ignore_init_type),
2271 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2272 gnat_proc, gnat_pool, gnat_node,
2273 ignore_init_type));
2275 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2276 sizes of the object and its template. Allocate the whole thing and
2277 fill in the parts that are known. */
2278 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2280 tree storage_type
2281 = build_unc_object_type_from_ptr (result_type, type,
2282 get_identifier ("ALLOC"), false);
2283 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2284 tree storage_ptr_type = build_pointer_type (storage_type);
2286 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2287 init);
2289 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2290 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2291 size = size_int (-1);
2293 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2294 gnat_proc, gnat_pool, gnat_node);
2295 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2296 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2297 TREE_THIS_NOTRAP (storage_deref) = 1;
2299 /* If there is an initializing expression, then make a constructor for
2300 the entire object including the bounds and copy it into the object.
2301 If there is no initializing expression, just set the bounds. */
2302 if (init)
2304 vec<constructor_elt, va_gc> *v;
2305 vec_alloc (v, 2);
2307 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2308 build_template (template_type, type, init));
2309 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2310 init);
2311 storage_init
2312 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2313 gnat_build_constructor (storage_type, v));
2315 else
2316 storage_init
2317 = build_binary_op (INIT_EXPR, NULL_TREE,
2318 build_component_ref (storage_deref, NULL_TREE,
2319 TYPE_FIELDS (storage_type),
2320 false),
2321 build_template (template_type, type, NULL_TREE));
2323 return build2 (COMPOUND_EXPR, result_type,
2324 storage_init, convert (result_type, storage));
2327 size = TYPE_SIZE_UNIT (type);
2329 /* If we have an initializing expression, see if its size is simpler
2330 than the size from the type. */
2331 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2332 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2333 || CONTAINS_PLACEHOLDER_P (size)))
2334 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2336 /* If the size is still self-referential, reference the initializing
2337 expression, if it is present. If not, this must have been a
2338 call to allocate a library-level object, in which case we use
2339 the maximum size. */
2340 if (CONTAINS_PLACEHOLDER_P (size))
2342 if (!ignore_init_type && init)
2343 size = substitute_placeholder_in_expr (size, init);
2344 else
2345 size = max_size (size, true);
2348 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2349 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2350 size = size_int (-1);
2352 storage = convert (result_type,
2353 build_call_alloc_dealloc (NULL_TREE, size, type,
2354 gnat_proc, gnat_pool,
2355 gnat_node));
2357 /* If we have an initial value, protect the new address, assign the value
2358 and return the address with a COMPOUND_EXPR. */
2359 if (init)
2361 storage = gnat_protect_expr (storage);
2362 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2363 TREE_THIS_NOTRAP (storage_deref) = 1;
2364 storage_init
2365 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2366 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2369 return storage;
2372 /* Indicate that we need to take the address of T and that it therefore
2373 should not be allocated in a register. Returns true if successful. */
2375 bool
2376 gnat_mark_addressable (tree t)
2378 while (true)
2379 switch (TREE_CODE (t))
2381 case ADDR_EXPR:
2382 case COMPONENT_REF:
2383 case ARRAY_REF:
2384 case ARRAY_RANGE_REF:
2385 case REALPART_EXPR:
2386 case IMAGPART_EXPR:
2387 case VIEW_CONVERT_EXPR:
2388 case NON_LVALUE_EXPR:
2389 CASE_CONVERT:
2390 t = TREE_OPERAND (t, 0);
2391 break;
2393 case COMPOUND_EXPR:
2394 t = TREE_OPERAND (t, 1);
2395 break;
2397 case CONSTRUCTOR:
2398 TREE_ADDRESSABLE (t) = 1;
2399 return true;
2401 case VAR_DECL:
2402 case PARM_DECL:
2403 case RESULT_DECL:
2404 TREE_ADDRESSABLE (t) = 1;
2405 return true;
2407 case FUNCTION_DECL:
2408 TREE_ADDRESSABLE (t) = 1;
2409 return true;
2411 case CONST_DECL:
2412 return DECL_CONST_CORRESPONDING_VAR (t)
2413 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2415 default:
2416 return true;
2420 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2421 but we know how to handle our own nodes. */
2423 tree
2424 gnat_save_expr (tree exp)
2426 tree type = TREE_TYPE (exp);
2427 enum tree_code code = TREE_CODE (exp);
2429 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2430 return exp;
2432 if (code == UNCONSTRAINED_ARRAY_REF)
2434 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2435 TREE_READONLY (t) = TYPE_READONLY (type);
2436 return t;
2439 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2440 This may be more efficient, but will also allow us to more easily find
2441 the match for the PLACEHOLDER_EXPR. */
2442 if (code == COMPONENT_REF
2443 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2444 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2445 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2447 return save_expr (exp);
2450 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2451 is optimized under the assumption that EXP's value doesn't change before
2452 its subsequent reuse(s) except through its potential reevaluation. */
2454 tree
2455 gnat_protect_expr (tree exp)
2457 tree type = TREE_TYPE (exp);
2458 enum tree_code code = TREE_CODE (exp);
2460 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2461 return exp;
2463 /* If EXP has no side effects, we theoretically don't need to do anything.
2464 However, we may be recursively passed more and more complex expressions
2465 involving checks which will be reused multiple times and eventually be
2466 unshared for gimplification; in order to avoid a complexity explosion
2467 at that point, we protect any expressions more complex than a simple
2468 arithmetic expression. */
2469 if (!TREE_SIDE_EFFECTS (exp))
2471 tree inner = skip_simple_arithmetic (exp);
2472 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2473 return exp;
2476 /* If this is a conversion, protect what's inside the conversion. */
2477 if (code == NON_LVALUE_EXPR
2478 || CONVERT_EXPR_CODE_P (code)
2479 || code == VIEW_CONVERT_EXPR)
2480 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2482 /* If we're indirectly referencing something, we only need to protect the
2483 address since the data itself can't change in these situations. */
2484 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2486 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2487 TREE_READONLY (t) = TYPE_READONLY (type);
2488 return t;
2491 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2492 This may be more efficient, but will also allow us to more easily find
2493 the match for the PLACEHOLDER_EXPR. */
2494 if (code == COMPONENT_REF
2495 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2496 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2497 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2499 /* If this is a fat pointer or something that can be placed in a register,
2500 just make a SAVE_EXPR. Likewise for a CALL_EXPR as large objects are
2501 returned via invisible reference in most ABIs so the temporary will
2502 directly be filled by the callee. */
2503 if (TYPE_IS_FAT_POINTER_P (type)
2504 || TYPE_MODE (type) != BLKmode
2505 || code == CALL_EXPR)
2506 return save_expr (exp);
2508 /* Otherwise reference, protect the address and dereference. */
2509 return
2510 build_unary_op (INDIRECT_REF, type,
2511 save_expr (build_unary_op (ADDR_EXPR,
2512 build_reference_type (type),
2513 exp)));
2516 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2517 argument to force evaluation of everything. */
2519 static tree
2520 gnat_stabilize_reference_1 (tree e, bool force)
2522 enum tree_code code = TREE_CODE (e);
2523 tree type = TREE_TYPE (e);
2524 tree result;
2526 /* We cannot ignore const expressions because it might be a reference
2527 to a const array but whose index contains side-effects. But we can
2528 ignore things that are actual constant or that already have been
2529 handled by this function. */
2530 if (TREE_CONSTANT (e) || code == SAVE_EXPR)
2531 return e;
2533 switch (TREE_CODE_CLASS (code))
2535 case tcc_exceptional:
2536 case tcc_declaration:
2537 case tcc_comparison:
2538 case tcc_expression:
2539 case tcc_reference:
2540 case tcc_vl_exp:
2541 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2542 fat pointer. This may be more efficient, but will also allow
2543 us to more easily find the match for the PLACEHOLDER_EXPR. */
2544 if (code == COMPONENT_REF
2545 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2546 result
2547 = build3 (code, type,
2548 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2549 TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2550 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2551 so that it will only be evaluated once. */
2552 /* The tcc_reference and tcc_comparison classes could be handled as
2553 below, but it is generally faster to only evaluate them once. */
2554 else if (TREE_SIDE_EFFECTS (e) || force)
2555 return save_expr (e);
2556 else
2557 return e;
2558 break;
2560 case tcc_binary:
2561 /* Recursively stabilize each operand. */
2562 result
2563 = build2 (code, type,
2564 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2565 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), force));
2566 break;
2568 case tcc_unary:
2569 /* Recursively stabilize each operand. */
2570 result
2571 = build1 (code, type,
2572 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force));
2573 break;
2575 default:
2576 gcc_unreachable ();
2579 /* See similar handling in gnat_stabilize_reference. */
2580 TREE_READONLY (result) = TREE_READONLY (e);
2581 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2582 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2584 if (code == INDIRECT_REF
2585 || code == UNCONSTRAINED_ARRAY_REF
2586 || code == ARRAY_REF
2587 || code == ARRAY_RANGE_REF)
2588 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (e);
2590 return result;
2593 /* This is equivalent to stabilize_reference in tree.c but we know how to
2594 handle our own nodes and we take extra arguments. FORCE says whether to
2595 force evaluation of everything. We set SUCCESS to true unless we walk
2596 through something we don't know how to stabilize. */
2598 tree
2599 gnat_stabilize_reference (tree ref, bool force, bool *success)
2601 tree type = TREE_TYPE (ref);
2602 enum tree_code code = TREE_CODE (ref);
2603 tree result;
2605 /* Assume we'll success unless proven otherwise. */
2606 if (success)
2607 *success = true;
2609 switch (code)
2611 case CONST_DECL:
2612 case VAR_DECL:
2613 case PARM_DECL:
2614 case RESULT_DECL:
2615 /* No action is needed in this case. */
2616 return ref;
2618 case ADDR_EXPR:
2619 CASE_CONVERT:
2620 case FLOAT_EXPR:
2621 case FIX_TRUNC_EXPR:
2622 case VIEW_CONVERT_EXPR:
2623 result
2624 = build1 (code, type,
2625 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2626 success));
2627 break;
2629 case INDIRECT_REF:
2630 case UNCONSTRAINED_ARRAY_REF:
2631 result = build1 (code, type,
2632 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 0),
2633 force));
2634 break;
2636 case COMPONENT_REF:
2637 result = build3 (COMPONENT_REF, type,
2638 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2639 success),
2640 TREE_OPERAND (ref, 1), NULL_TREE);
2641 break;
2643 case BIT_FIELD_REF:
2644 result = build3 (BIT_FIELD_REF, type,
2645 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2646 success),
2647 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
2648 break;
2650 case ARRAY_REF:
2651 case ARRAY_RANGE_REF:
2652 result = build4 (code, type,
2653 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2654 success),
2655 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2656 force),
2657 NULL_TREE, NULL_TREE);
2658 break;
2660 case CALL_EXPR:
2661 result = gnat_stabilize_reference_1 (ref, force);
2662 break;
2664 case COMPOUND_EXPR:
2665 result = build2 (COMPOUND_EXPR, type,
2666 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2667 success),
2668 gnat_stabilize_reference (TREE_OPERAND (ref, 1), force,
2669 success));
2670 break;
2672 case CONSTRUCTOR:
2673 /* Constructors with 1 element are used extensively to formally
2674 convert objects to special wrapping types. */
2675 if (TREE_CODE (type) == RECORD_TYPE
2676 && vec_safe_length (CONSTRUCTOR_ELTS (ref)) == 1)
2678 tree index = (*CONSTRUCTOR_ELTS (ref))[0].index;
2679 tree value = (*CONSTRUCTOR_ELTS (ref))[0].value;
2680 result
2681 = build_constructor_single (type, index,
2682 gnat_stabilize_reference_1 (value,
2683 force));
2685 else
2687 if (success)
2688 *success = false;
2689 return ref;
2691 break;
2693 case ERROR_MARK:
2694 ref = error_mark_node;
2696 /* ... fall through to failure ... */
2698 /* If arg isn't a kind of lvalue we recognize, make no change.
2699 Caller should recognize the error for an invalid lvalue. */
2700 default:
2701 if (success)
2702 *success = false;
2703 return ref;
2706 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2707 may not be sustained across some paths, such as the way via build1 for
2708 INDIRECT_REF. We reset those flags here in the general case, which is
2709 consistent with the GCC version of this routine.
2711 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2712 paths introduce side-effects where there was none initially (e.g. if a
2713 SAVE_EXPR is built) and we also want to keep track of that. */
2714 TREE_READONLY (result) = TREE_READONLY (ref);
2715 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2716 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2718 if (code == INDIRECT_REF
2719 || code == UNCONSTRAINED_ARRAY_REF
2720 || code == ARRAY_REF
2721 || code == ARRAY_RANGE_REF)
2722 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2724 return result;
2727 /* If EXPR is an expression that is invariant in the current function, in the
2728 sense that it can be evaluated anywhere in the function and any number of
2729 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2731 tree
2732 gnat_invariant_expr (tree expr)
2734 tree type = TREE_TYPE (expr), t;
2736 expr = remove_conversions (expr, false);
2738 while ((TREE_CODE (expr) == CONST_DECL
2739 || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2740 && decl_function_context (expr) == current_function_decl
2741 && DECL_INITIAL (expr))
2742 expr = remove_conversions (DECL_INITIAL (expr), false);
2744 if (TREE_CONSTANT (expr))
2745 return fold_convert (type, expr);
2747 t = expr;
2749 while (true)
2751 switch (TREE_CODE (t))
2753 case COMPONENT_REF:
2754 if (TREE_OPERAND (t, 2) != NULL_TREE)
2755 return NULL_TREE;
2756 break;
2758 case ARRAY_REF:
2759 case ARRAY_RANGE_REF:
2760 if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
2761 || TREE_OPERAND (t, 2) != NULL_TREE
2762 || TREE_OPERAND (t, 3) != NULL_TREE)
2763 return NULL_TREE;
2764 break;
2766 case BIT_FIELD_REF:
2767 case VIEW_CONVERT_EXPR:
2768 case REALPART_EXPR:
2769 case IMAGPART_EXPR:
2770 break;
2772 case INDIRECT_REF:
2773 if (!TREE_READONLY (t)
2774 || TREE_SIDE_EFFECTS (t)
2775 || !TREE_THIS_NOTRAP (t))
2776 return NULL_TREE;
2777 break;
2779 default:
2780 goto object;
2783 t = TREE_OPERAND (t, 0);
2786 object:
2787 if (TREE_SIDE_EFFECTS (t))
2788 return NULL_TREE;
2790 if (TREE_CODE (t) == CONST_DECL
2791 && (DECL_EXTERNAL (t)
2792 || decl_function_context (t) != current_function_decl))
2793 return fold_convert (type, expr);
2795 if (!TREE_READONLY (t))
2796 return NULL_TREE;
2798 if (TREE_CODE (t) == PARM_DECL)
2799 return fold_convert (type, expr);
2801 if (TREE_CODE (t) == VAR_DECL
2802 && (DECL_EXTERNAL (t)
2803 || decl_function_context (t) != current_function_decl))
2804 return fold_convert (type, expr);
2806 return NULL_TREE;