* gnu/regexp/CharIndexedReader.java: Removed.
[official-gcc.git] / gcc / ada / utils2.c
blob5882d094b5db88448c1825a0ef20b6a9f1d9f8ff
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2004, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
21 * *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 * *
25 ****************************************************************************/
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "tree.h"
32 #include "flags.h"
33 #include "output.h"
34 #include "ada.h"
35 #include "types.h"
36 #include "atree.h"
37 #include "stringt.h"
38 #include "uintp.h"
39 #include "fe.h"
40 #include "elists.h"
41 #include "nlists.h"
42 #include "sinfo.h"
43 #include "einfo.h"
44 #include "ada-tree.h"
45 #include "gigi.h"
47 static tree find_common_type (tree, tree);
48 static int contains_save_expr_p (tree);
49 static tree contains_null_expr (tree);
50 static tree compare_arrays (tree, tree, tree);
51 static tree nonbinary_modular_operation (enum tree_code, tree, tree, tree);
52 static tree build_simple_component_ref (tree, tree, tree, int);
54 /* Prepare expr to be an argument of a TRUTH_NOT_EXPR or other logical
55 operation.
57 This preparation consists of taking the ordinary representation of
58 an expression expr and producing a valid tree boolean expression
59 describing whether expr is nonzero. We could simply always do
61 build_binary_op (NE_EXPR, expr, integer_zero_node, 1),
63 but we optimize comparisons, &&, ||, and !.
65 The resulting type should always be the same as the input type.
66 This function is simpler than the corresponding C version since
67 the only possible operands will be things of Boolean type. */
69 tree
70 gnat_truthvalue_conversion (tree expr)
72 tree type = TREE_TYPE (expr);
74 switch (TREE_CODE (expr))
76 case EQ_EXPR: case NE_EXPR: case LE_EXPR: case GE_EXPR:
77 case LT_EXPR: case GT_EXPR:
78 case TRUTH_ANDIF_EXPR:
79 case TRUTH_ORIF_EXPR:
80 case TRUTH_AND_EXPR:
81 case TRUTH_OR_EXPR:
82 case TRUTH_XOR_EXPR:
83 case ERROR_MARK:
84 return expr;
86 case COND_EXPR:
87 /* Distribute the conversion into the arms of a COND_EXPR. */
88 return fold
89 (build (COND_EXPR, type, TREE_OPERAND (expr, 0),
90 gnat_truthvalue_conversion (TREE_OPERAND (expr, 1)),
91 gnat_truthvalue_conversion (TREE_OPERAND (expr, 2))));
93 default:
94 return build_binary_op (NE_EXPR, type, expr,
95 convert (type, integer_zero_node));
99 /* Return the base type of TYPE. */
101 tree
102 get_base_type (tree type)
104 if (TREE_CODE (type) == RECORD_TYPE
105 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type))
106 type = TREE_TYPE (TYPE_FIELDS (type));
108 while (TREE_TYPE (type) != 0
109 && (TREE_CODE (type) == INTEGER_TYPE
110 || TREE_CODE (type) == REAL_TYPE))
111 type = TREE_TYPE (type);
113 return type;
116 /* Likewise, but only return types known to the Ada source. */
117 tree
118 get_ada_base_type (tree type)
120 while (TREE_TYPE (type) != 0
121 && (TREE_CODE (type) == INTEGER_TYPE
122 || TREE_CODE (type) == REAL_TYPE)
123 && ! TYPE_EXTRA_SUBTYPE_P (type))
124 type = TREE_TYPE (type);
126 return type;
129 /* EXP is a GCC tree representing an address. See if we can find how
130 strictly the object at that address is aligned. Return that alignment
131 in bits. If we don't know anything about the alignment, return 0. */
133 unsigned int
134 known_alignment (tree exp)
136 unsigned int this_alignment;
137 unsigned int lhs, rhs;
138 unsigned int type_alignment;
140 /* For pointer expressions, we know that the designated object is always at
141 least as strictly aligned as the designated subtype, so we account for
142 both type and expression information in this case.
144 Beware that we can still get a dummy designated subtype here (e.g. Taft
145 Amendement types), in which the alignment information is meaningless and
146 should be ignored.
148 We always compute a type_alignment value and return the MAX of it
149 compared with what we get from the expression tree. Just set the
150 type_alignment value to 0 when the type information is to be ignored. */
151 type_alignment
152 = ((POINTER_TYPE_P (TREE_TYPE (exp))
153 && ! TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
154 ? TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))) : 0);
156 switch (TREE_CODE (exp))
158 case CONVERT_EXPR:
159 case NOP_EXPR:
160 case NON_LVALUE_EXPR:
161 /* Conversions between pointers and integers don't change the alignment
162 of the underlying object. */
163 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
164 break;
166 case PLUS_EXPR:
167 case MINUS_EXPR:
168 /* If two address are added, the alignment of the result is the
169 minimum of the two aligments. */
170 lhs = known_alignment (TREE_OPERAND (exp, 0));
171 rhs = known_alignment (TREE_OPERAND (exp, 1));
172 this_alignment = MIN (lhs, rhs);
173 break;
175 case INTEGER_CST:
176 /* The first part of this represents the lowest bit in the constant,
177 but is it in bytes, not bits. */
178 this_alignment
179 = MIN (BITS_PER_UNIT
180 * (TREE_INT_CST_LOW (exp) & - TREE_INT_CST_LOW (exp)),
181 BIGGEST_ALIGNMENT);
182 break;
184 case MULT_EXPR:
185 /* If we know the alignment of just one side, use it. Otherwise,
186 use the product of the alignments. */
187 lhs = known_alignment (TREE_OPERAND (exp, 0));
188 rhs = known_alignment (TREE_OPERAND (exp, 1));
190 if (lhs == 0 || rhs == 0)
191 this_alignment = MIN (BIGGEST_ALIGNMENT, MAX (lhs, rhs));
192 else
193 this_alignment = MIN (BIGGEST_ALIGNMENT, lhs * rhs);
194 break;
196 case ADDR_EXPR:
197 this_alignment = expr_align (TREE_OPERAND (exp, 0));
198 break;
200 default:
201 this_alignment = 0;
202 break;
205 return MAX (type_alignment, this_alignment);
208 /* We have a comparison or assignment operation on two types, T1 and T2,
209 which are both either array types or both record types.
210 Return the type that both operands should be converted to, if any.
211 Otherwise return zero. */
213 static tree
214 find_common_type (tree t1, tree t2)
216 /* If either type is non-BLKmode, use it. Note that we know that we will
217 not have any alignment problems since if we did the non-BLKmode
218 type could not have been used. */
219 if (TYPE_MODE (t1) != BLKmode)
220 return t1;
221 else if (TYPE_MODE (t2) != BLKmode)
222 return t2;
224 /* Otherwise, return the type that has a constant size. */
225 if (TREE_CONSTANT (TYPE_SIZE (t1)))
226 return t1;
227 else if (TREE_CONSTANT (TYPE_SIZE (t2)))
228 return t2;
230 /* In this case, both types have variable size. It's probably
231 best to leave the "type mismatch" because changing it could
232 case a bad self-referential reference. */
233 return 0;
236 /* See if EXP contains a SAVE_EXPR in a position where we would
237 normally put it.
239 ??? This is a real kludge, but is probably the best approach short
240 of some very general solution. */
242 static int
243 contains_save_expr_p (tree exp)
245 switch (TREE_CODE (exp))
247 case SAVE_EXPR:
248 return 1;
250 case ADDR_EXPR: case INDIRECT_REF:
251 case COMPONENT_REF:
252 case NOP_EXPR: case CONVERT_EXPR: case VIEW_CONVERT_EXPR:
253 return contains_save_expr_p (TREE_OPERAND (exp, 0));
255 case CONSTRUCTOR:
256 return (CONSTRUCTOR_ELTS (exp) != 0
257 && contains_save_expr_p (CONSTRUCTOR_ELTS (exp)));
259 case TREE_LIST:
260 return (contains_save_expr_p (TREE_VALUE (exp))
261 || (TREE_CHAIN (exp) != 0
262 && contains_save_expr_p (TREE_CHAIN (exp))));
264 default:
265 return 0;
269 /* See if EXP contains a NULL_EXPR in an expression we use for sizes. Return
270 it if so. This is used to detect types whose sizes involve computations
271 that are known to raise Constraint_Error. */
273 static tree
274 contains_null_expr (tree exp)
276 tree tem;
278 if (TREE_CODE (exp) == NULL_EXPR)
279 return exp;
281 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
283 case '1':
284 return contains_null_expr (TREE_OPERAND (exp, 0));
286 case '<': case '2':
287 tem = contains_null_expr (TREE_OPERAND (exp, 0));
288 if (tem != 0)
289 return tem;
291 return contains_null_expr (TREE_OPERAND (exp, 1));
293 case 'e':
294 switch (TREE_CODE (exp))
296 case SAVE_EXPR:
297 return contains_null_expr (TREE_OPERAND (exp, 0));
299 case COND_EXPR:
300 tem = contains_null_expr (TREE_OPERAND (exp, 0));
301 if (tem != 0)
302 return tem;
304 tem = contains_null_expr (TREE_OPERAND (exp, 1));
305 if (tem != 0)
306 return tem;
308 return contains_null_expr (TREE_OPERAND (exp, 2));
310 default:
311 return 0;
314 default:
315 return 0;
319 /* Return an expression tree representing an equality comparison of
320 A1 and A2, two objects of ARRAY_TYPE. The returned expression should
321 be of type RESULT_TYPE
323 Two arrays are equal in one of two ways: (1) if both have zero length
324 in some dimension (not necessarily the same dimension) or (2) if the
325 lengths in each dimension are equal and the data is equal. We perform the
326 length tests in as efficient a manner as possible. */
328 static tree
329 compare_arrays (tree result_type, tree a1, tree a2)
331 tree t1 = TREE_TYPE (a1);
332 tree t2 = TREE_TYPE (a2);
333 tree result = convert (result_type, integer_one_node);
334 tree a1_is_null = convert (result_type, integer_zero_node);
335 tree a2_is_null = convert (result_type, integer_zero_node);
336 int length_zero_p = 0;
338 /* Process each dimension separately and compare the lengths. If any
339 dimension has a size known to be zero, set SIZE_ZERO_P to 1 to
340 suppress the comparison of the data. */
341 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
343 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
344 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
345 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
346 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
347 tree bt = get_base_type (TREE_TYPE (lb1));
348 tree length1 = fold (build (MINUS_EXPR, bt, ub1, lb1));
349 tree length2 = fold (build (MINUS_EXPR, bt, ub2, lb2));
350 tree nbt;
351 tree tem;
352 tree comparison, this_a1_is_null, this_a2_is_null;
354 /* If the length of the first array is a constant, swap our operands
355 unless the length of the second array is the constant zero.
356 Note that we have set the `length' values to the length - 1. */
357 if (TREE_CODE (length1) == INTEGER_CST
358 && ! integer_zerop (fold (build (PLUS_EXPR, bt, length2,
359 convert (bt, integer_one_node)))))
361 tem = a1, a1 = a2, a2 = tem;
362 tem = t1, t1 = t2, t2 = tem;
363 tem = lb1, lb1 = lb2, lb2 = tem;
364 tem = ub1, ub1 = ub2, ub2 = tem;
365 tem = length1, length1 = length2, length2 = tem;
366 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
369 /* If the length of this dimension in the second array is the constant
370 zero, we can just go inside the original bounds for the first
371 array and see if last < first. */
372 if (integer_zerop (fold (build (PLUS_EXPR, bt, length2,
373 convert (bt, integer_one_node)))))
375 tree ub = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
376 tree lb = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
378 comparison = build_binary_op (LT_EXPR, result_type, ub, lb);
379 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
380 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
382 length_zero_p = 1;
383 this_a1_is_null = comparison;
384 this_a2_is_null = convert (result_type, integer_one_node);
387 /* If the length is some other constant value, we know that the
388 this dimension in the first array cannot be superflat, so we
389 can just use its length from the actual stored bounds. */
390 else if (TREE_CODE (length2) == INTEGER_CST)
392 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
393 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
394 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
395 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
396 nbt = get_base_type (TREE_TYPE (ub1));
398 comparison
399 = build_binary_op (EQ_EXPR, result_type,
400 build_binary_op (MINUS_EXPR, nbt, ub1, lb1),
401 build_binary_op (MINUS_EXPR, nbt, ub2, lb2));
403 /* Note that we know that UB2 and LB2 are constant and hence
404 cannot contain a PLACEHOLDER_EXPR. */
406 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
407 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
409 this_a1_is_null = build_binary_op (LT_EXPR, result_type, ub1, lb1);
410 this_a2_is_null = convert (result_type, integer_zero_node);
413 /* Otherwise compare the computed lengths. */
414 else
416 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
417 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
419 comparison
420 = build_binary_op (EQ_EXPR, result_type, length1, length2);
422 this_a1_is_null
423 = build_binary_op (LT_EXPR, result_type, length1,
424 convert (bt, integer_zero_node));
425 this_a2_is_null
426 = build_binary_op (LT_EXPR, result_type, length2,
427 convert (bt, integer_zero_node));
430 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
431 result, comparison);
433 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
434 this_a1_is_null, a1_is_null);
435 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
436 this_a2_is_null, a2_is_null);
438 t1 = TREE_TYPE (t1);
439 t2 = TREE_TYPE (t2);
442 /* Unless the size of some bound is known to be zero, compare the
443 data in the array. */
444 if (! length_zero_p)
446 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
448 if (type != 0)
449 a1 = convert (type, a1), a2 = convert (type, a2);
451 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result,
452 fold (build (EQ_EXPR, result_type, a1, a2)));
456 /* The result is also true if both sizes are zero. */
457 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
458 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
459 a1_is_null, a2_is_null),
460 result);
462 /* If either operand contains SAVE_EXPRs, they have to be evaluated before
463 starting the comparison above since the place it would be otherwise
464 evaluated would be wrong. */
466 if (contains_save_expr_p (a1))
467 result = build (COMPOUND_EXPR, result_type, a1, result);
469 if (contains_save_expr_p (a2))
470 result = build (COMPOUND_EXPR, result_type, a2, result);
472 return result;
475 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
476 type TYPE. We know that TYPE is a modular type with a nonbinary
477 modulus. */
479 static tree
480 nonbinary_modular_operation (enum tree_code op_code,
481 tree type,
482 tree lhs,
483 tree rhs)
485 tree modulus = TYPE_MODULUS (type);
486 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
487 unsigned int precision;
488 int unsignedp = 1;
489 tree op_type = type;
490 tree result;
492 /* If this is an addition of a constant, convert it to a subtraction
493 of a constant since we can do that faster. */
494 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
495 rhs = fold (build (MINUS_EXPR, type, modulus, rhs)), op_code = MINUS_EXPR;
497 /* For the logical operations, we only need PRECISION bits. For
498 addition and subraction, we need one more and for multiplication we
499 need twice as many. But we never want to make a size smaller than
500 our size. */
501 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
502 needed_precision += 1;
503 else if (op_code == MULT_EXPR)
504 needed_precision *= 2;
506 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
508 /* Unsigned will do for everything but subtraction. */
509 if (op_code == MINUS_EXPR)
510 unsignedp = 0;
512 /* If our type is the wrong signedness or isn't wide enough, make a new
513 type and convert both our operands to it. */
514 if (TYPE_PRECISION (op_type) < precision
515 || TYPE_UNSIGNED (op_type) != unsignedp)
517 /* Copy the node so we ensure it can be modified to make it modular. */
518 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
519 modulus = convert (op_type, modulus);
520 SET_TYPE_MODULUS (op_type, modulus);
521 TYPE_MODULAR_P (op_type) = 1;
522 lhs = convert (op_type, lhs);
523 rhs = convert (op_type, rhs);
526 /* Do the operation, then we'll fix it up. */
527 result = fold (build (op_code, op_type, lhs, rhs));
529 /* For multiplication, we have no choice but to do a full modulus
530 operation. However, we want to do this in the narrowest
531 possible size. */
532 if (op_code == MULT_EXPR)
534 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
535 modulus = convert (div_type, modulus);
536 SET_TYPE_MODULUS (div_type, modulus);
537 TYPE_MODULAR_P (div_type) = 1;
538 result = convert (op_type,
539 fold (build (TRUNC_MOD_EXPR, div_type,
540 convert (div_type, result), modulus)));
543 /* For subtraction, add the modulus back if we are negative. */
544 else if (op_code == MINUS_EXPR)
546 result = save_expr (result);
547 result = fold (build (COND_EXPR, op_type,
548 build (LT_EXPR, integer_type_node, result,
549 convert (op_type, integer_zero_node)),
550 fold (build (PLUS_EXPR, op_type,
551 result, modulus)),
552 result));
555 /* For the other operations, subtract the modulus if we are >= it. */
556 else
558 result = save_expr (result);
559 result = fold (build (COND_EXPR, op_type,
560 build (GE_EXPR, integer_type_node,
561 result, modulus),
562 fold (build (MINUS_EXPR, op_type,
563 result, modulus)),
564 result));
567 return convert (type, result);
570 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
571 desired for the result. Usually the operation is to be performed
572 in that type. For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
573 in which case the type to be used will be derived from the operands.
575 This function is very much unlike the ones for C and C++ since we
576 have already done any type conversion and matching required. All we
577 have to do here is validate the work done by SEM and handle subtypes. */
579 tree
580 build_binary_op (enum tree_code op_code,
581 tree result_type,
582 tree left_operand,
583 tree right_operand)
585 tree left_type = TREE_TYPE (left_operand);
586 tree right_type = TREE_TYPE (right_operand);
587 tree left_base_type = get_base_type (left_type);
588 tree right_base_type = get_base_type (right_type);
589 tree operation_type = result_type;
590 tree best_type = 0;
591 tree modulus;
592 tree result;
593 int has_side_effects = 0;
595 if (operation_type != 0
596 && TREE_CODE (operation_type) == RECORD_TYPE
597 && TYPE_LEFT_JUSTIFIED_MODULAR_P (operation_type))
598 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
600 if (operation_type != 0
601 && ! AGGREGATE_TYPE_P (operation_type)
602 && TYPE_EXTRA_SUBTYPE_P (operation_type))
603 operation_type = get_base_type (operation_type);
605 modulus = (operation_type != 0 && TREE_CODE (operation_type) == INTEGER_TYPE
606 && TYPE_MODULAR_P (operation_type)
607 ? TYPE_MODULUS (operation_type) : 0);
609 switch (op_code)
611 case MODIFY_EXPR:
612 /* If there were any integral or pointer conversions on LHS, remove
613 them; we'll be putting them back below if needed. Likewise for
614 conversions between array and record types. But don't do this if
615 the right operand is not BLKmode (for packed arrays)
616 unless we are not changing the mode. */
617 while ((TREE_CODE (left_operand) == CONVERT_EXPR
618 || TREE_CODE (left_operand) == NOP_EXPR
619 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
620 && (((INTEGRAL_TYPE_P (left_type)
621 || POINTER_TYPE_P (left_type))
622 && (INTEGRAL_TYPE_P (TREE_TYPE
623 (TREE_OPERAND (left_operand, 0)))
624 || POINTER_TYPE_P (TREE_TYPE
625 (TREE_OPERAND (left_operand, 0)))))
626 || (((TREE_CODE (left_type) == RECORD_TYPE
627 /* Don't remove conversions to left-justified modular
628 types. */
629 && ! TYPE_LEFT_JUSTIFIED_MODULAR_P (left_type))
630 || TREE_CODE (left_type) == ARRAY_TYPE)
631 && ((TREE_CODE (TREE_TYPE
632 (TREE_OPERAND (left_operand, 0)))
633 == RECORD_TYPE)
634 || (TREE_CODE (TREE_TYPE
635 (TREE_OPERAND (left_operand, 0)))
636 == ARRAY_TYPE))
637 && (TYPE_MODE (right_type) == BLKmode
638 || (TYPE_MODE (left_type)
639 == TYPE_MODE (TREE_TYPE
640 (TREE_OPERAND
641 (left_operand, 0))))))))
643 left_operand = TREE_OPERAND (left_operand, 0);
644 left_type = TREE_TYPE (left_operand);
647 if (operation_type == 0)
648 operation_type = left_type;
650 /* If the RHS has a conversion between record and array types and
651 an inner type is no worse, use it. Note we cannot do this for
652 modular types or types with TYPE_ALIGN_OK, since the latter
653 might indicate a conversion between a root type and a class-wide
654 type, which we must not remove. */
655 while (TREE_CODE (right_operand) == VIEW_CONVERT_EXPR
656 && ((TREE_CODE (right_type) == RECORD_TYPE
657 && ! TYPE_LEFT_JUSTIFIED_MODULAR_P (right_type)
658 && ! TYPE_ALIGN_OK (right_type)
659 && ! TYPE_IS_FAT_POINTER_P (right_type))
660 || TREE_CODE (right_type) == ARRAY_TYPE)
661 && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
662 == RECORD_TYPE)
663 && ! (TYPE_LEFT_JUSTIFIED_MODULAR_P
664 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
665 && ! (TYPE_ALIGN_OK
666 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
667 && ! (TYPE_IS_FAT_POINTER_P
668 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))))
669 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
670 == ARRAY_TYPE))
671 && (0 == (best_type
672 == find_common_type (right_type,
673 TREE_TYPE (TREE_OPERAND
674 (right_operand, 0))))
675 || right_type != best_type))
677 right_operand = TREE_OPERAND (right_operand, 0);
678 right_type = TREE_TYPE (right_operand);
681 /* If we are copying one array or record to another, find the best type
682 to use. */
683 if (((TREE_CODE (left_type) == ARRAY_TYPE
684 && TREE_CODE (right_type) == ARRAY_TYPE)
685 || (TREE_CODE (left_type) == RECORD_TYPE
686 && TREE_CODE (right_type) == RECORD_TYPE))
687 && (best_type = find_common_type (left_type, right_type)) != 0)
688 operation_type = best_type;
690 /* If a class-wide type may be involved, force use of the RHS type. */
691 if (TREE_CODE (right_type) == RECORD_TYPE && TYPE_ALIGN_OK (right_type))
692 operation_type = right_type;
694 /* Ensure everything on the LHS is valid. If we have a field reference,
695 strip anything that get_inner_reference can handle. Then remove any
696 conversions with type types having the same code and mode. Mark
697 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
698 either an INDIRECT_REF or a decl. */
699 result = left_operand;
700 while (1)
702 tree restype = TREE_TYPE (result);
704 if (TREE_CODE (result) == COMPONENT_REF
705 || TREE_CODE (result) == ARRAY_REF
706 || TREE_CODE (result) == ARRAY_RANGE_REF)
707 while (handled_component_p (result))
708 result = TREE_OPERAND (result, 0);
709 else if (TREE_CODE (result) == REALPART_EXPR
710 || TREE_CODE (result) == IMAGPART_EXPR
711 || ((TREE_CODE (result) == NOP_EXPR
712 || TREE_CODE (result) == CONVERT_EXPR)
713 && (((TREE_CODE (restype)
714 == TREE_CODE (TREE_TYPE
715 (TREE_OPERAND (result, 0))))
716 && (TYPE_MODE (TREE_TYPE
717 (TREE_OPERAND (result, 0)))
718 == TYPE_MODE (restype)))
719 || TYPE_ALIGN_OK (restype))))
720 result = TREE_OPERAND (result, 0);
721 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
723 TREE_ADDRESSABLE (result) = 1;
724 result = TREE_OPERAND (result, 0);
726 else
727 break;
730 if (TREE_CODE (result) != INDIRECT_REF && TREE_CODE (result) != NULL_EXPR
731 && ! DECL_P (result))
732 gigi_abort (516);
734 /* Convert the right operand to the operation type unless
735 it is either already of the correct type or if the type
736 involves a placeholder, since the RHS may not have the same
737 record type. */
738 if (operation_type != right_type
739 && (! CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type))))
741 /* For a variable-size type, with both BLKmode, convert using
742 CONVERT_EXPR instead of an unchecked conversion since we don't
743 need to make a temporary (and can't anyway). */
744 if (TREE_CODE (TYPE_SIZE (operation_type)) != INTEGER_CST
745 && TYPE_MODE (TREE_TYPE (right_operand)) == BLKmode
746 && TREE_CODE (right_operand) != UNCONSTRAINED_ARRAY_REF)
747 right_operand = build1 (CONVERT_EXPR, operation_type,
748 right_operand);
749 else
750 right_operand = convert (operation_type, right_operand);
752 right_type = operation_type;
755 /* If the modes differ, make up a bogus type and convert the RHS to
756 it. This can happen with packed types. */
757 if (TYPE_MODE (left_type) != TYPE_MODE (right_type))
759 tree new_type = copy_node (left_type);
761 TYPE_SIZE (new_type) = TYPE_SIZE (right_type);
762 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (right_type);
763 TYPE_MAIN_VARIANT (new_type) = new_type;
764 right_operand = convert (new_type, right_operand);
767 has_side_effects = 1;
768 modulus = 0;
769 break;
771 case ARRAY_REF:
772 if (operation_type == 0)
773 operation_type = TREE_TYPE (left_type);
775 /* ... fall through ... */
777 case ARRAY_RANGE_REF:
779 /* First convert the right operand to its base type. This will
780 prevent unneed signedness conversions when sizetype is wider than
781 integer. */
782 right_operand = convert (right_base_type, right_operand);
783 right_operand = convert (TYPE_DOMAIN (left_type), right_operand);
785 if (! TREE_CONSTANT (right_operand)
786 || ! TREE_CONSTANT (TYPE_MIN_VALUE (right_type)))
787 gnat_mark_addressable (left_operand);
789 modulus = 0;
790 break;
792 case GE_EXPR:
793 case LE_EXPR:
794 case GT_EXPR:
795 case LT_EXPR:
796 if (POINTER_TYPE_P (left_type))
797 gigi_abort (501);
799 /* ... fall through ... */
801 case EQ_EXPR:
802 case NE_EXPR:
803 /* If either operand is a NULL_EXPR, just return a new one. */
804 if (TREE_CODE (left_operand) == NULL_EXPR)
805 return build (op_code, result_type,
806 build1 (NULL_EXPR, integer_type_node,
807 TREE_OPERAND (left_operand, 0)),
808 integer_zero_node);
810 else if (TREE_CODE (right_operand) == NULL_EXPR)
811 return build (op_code, result_type,
812 build1 (NULL_EXPR, integer_type_node,
813 TREE_OPERAND (right_operand, 0)),
814 integer_zero_node);
816 /* If either object is a left-justified modular types, get the
817 fields from within. */
818 if (TREE_CODE (left_type) == RECORD_TYPE
819 && TYPE_LEFT_JUSTIFIED_MODULAR_P (left_type))
821 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
822 left_operand);
823 left_type = TREE_TYPE (left_operand);
824 left_base_type = get_base_type (left_type);
827 if (TREE_CODE (right_type) == RECORD_TYPE
828 && TYPE_LEFT_JUSTIFIED_MODULAR_P (right_type))
830 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
831 right_operand);
832 right_type = TREE_TYPE (right_operand);
833 right_base_type = get_base_type (right_type);
836 /* If both objects are arrays, compare them specially. */
837 if ((TREE_CODE (left_type) == ARRAY_TYPE
838 || (TREE_CODE (left_type) == INTEGER_TYPE
839 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
840 && (TREE_CODE (right_type) == ARRAY_TYPE
841 || (TREE_CODE (right_type) == INTEGER_TYPE
842 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
844 result = compare_arrays (result_type, left_operand, right_operand);
846 if (op_code == EQ_EXPR)
848 else if (op_code == NE_EXPR)
849 result = invert_truthvalue (result);
850 else
851 gigi_abort (502);
853 return result;
856 /* Otherwise, the base types must be the same unless the objects are
857 records. If we have records, use the best type and convert both
858 operands to that type. */
859 if (left_base_type != right_base_type)
861 if (TREE_CODE (left_base_type) == RECORD_TYPE
862 && TREE_CODE (right_base_type) == RECORD_TYPE)
864 /* The only way these are permitted to be the same is if both
865 types have the same name. In that case, one of them must
866 not be self-referential. Use that one as the best type.
867 Even better is if one is of fixed size. */
868 best_type = 0;
870 if (TYPE_NAME (left_base_type) == 0
871 || TYPE_NAME (left_base_type) != TYPE_NAME (right_base_type))
872 gigi_abort (503);
874 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
875 best_type = left_base_type;
876 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
877 best_type = right_base_type;
878 else if (! CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
879 best_type = left_base_type;
880 else if (! CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
881 best_type = right_base_type;
882 else
883 gigi_abort (504);
885 left_operand = convert (best_type, left_operand);
886 right_operand = convert (best_type, right_operand);
888 else
889 gigi_abort (505);
892 /* If we are comparing a fat pointer against zero, we need to
893 just compare the data pointer. */
894 else if (TYPE_FAT_POINTER_P (left_base_type)
895 && TREE_CODE (right_operand) == CONSTRUCTOR
896 && integer_zerop (TREE_VALUE (CONSTRUCTOR_ELTS (right_operand))))
898 right_operand = build_component_ref (left_operand, NULL_TREE,
899 TYPE_FIELDS (left_base_type),
901 left_operand = convert (TREE_TYPE (right_operand),
902 integer_zero_node);
904 else
906 left_operand = convert (left_base_type, left_operand);
907 right_operand = convert (right_base_type, right_operand);
910 modulus = 0;
911 break;
913 case PREINCREMENT_EXPR:
914 case PREDECREMENT_EXPR:
915 case POSTINCREMENT_EXPR:
916 case POSTDECREMENT_EXPR:
917 /* In these, the result type and the left operand type should be the
918 same. Do the operation in the base type of those and convert the
919 right operand (which is an integer) to that type.
921 Note that these operations are only used in loop control where
922 we guarantee that no overflow can occur. So nothing special need
923 be done for modular types. */
925 if (left_type != result_type)
926 gigi_abort (506);
928 operation_type = get_base_type (result_type);
929 left_operand = convert (operation_type, left_operand);
930 right_operand = convert (operation_type, right_operand);
931 has_side_effects = 1;
932 modulus = 0;
933 break;
935 case LSHIFT_EXPR:
936 case RSHIFT_EXPR:
937 case LROTATE_EXPR:
938 case RROTATE_EXPR:
939 /* The RHS of a shift can be any type. Also, ignore any modulus
940 (we used to abort, but this is needed for unchecked conversion
941 to modular types). Otherwise, processing is the same as normal. */
942 if (operation_type != left_base_type)
943 gigi_abort (514);
945 modulus = 0;
946 left_operand = convert (operation_type, left_operand);
947 break;
949 case TRUTH_ANDIF_EXPR:
950 case TRUTH_ORIF_EXPR:
951 case TRUTH_AND_EXPR:
952 case TRUTH_OR_EXPR:
953 case TRUTH_XOR_EXPR:
954 left_operand = gnat_truthvalue_conversion (left_operand);
955 right_operand = gnat_truthvalue_conversion (right_operand);
956 goto common;
958 case BIT_AND_EXPR:
959 case BIT_IOR_EXPR:
960 case BIT_XOR_EXPR:
961 /* For binary modulus, if the inputs are in range, so are the
962 outputs. */
963 if (modulus != 0 && integer_pow2p (modulus))
964 modulus = 0;
966 goto common;
968 case COMPLEX_EXPR:
969 if (TREE_TYPE (result_type) != left_base_type
970 || TREE_TYPE (result_type) != right_base_type)
971 gigi_abort (515);
973 left_operand = convert (left_base_type, left_operand);
974 right_operand = convert (right_base_type, right_operand);
975 break;
977 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
978 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
979 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
980 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
981 /* These always produce results lower than either operand. */
982 modulus = 0;
983 goto common;
985 default:
986 common:
987 /* The result type should be the same as the base types of the
988 both operands (and they should be the same). Convert
989 everything to the result type. */
991 if (operation_type != left_base_type
992 || left_base_type != right_base_type)
993 gigi_abort (507);
995 left_operand = convert (operation_type, left_operand);
996 right_operand = convert (operation_type, right_operand);
999 if (modulus != 0 && ! integer_pow2p (modulus))
1001 result = nonbinary_modular_operation (op_code, operation_type,
1002 left_operand, right_operand);
1003 modulus = 0;
1005 /* If either operand is a NULL_EXPR, just return a new one. */
1006 else if (TREE_CODE (left_operand) == NULL_EXPR)
1007 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1008 else if (TREE_CODE (right_operand) == NULL_EXPR)
1009 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1010 else
1011 result = fold (build (op_code, operation_type,
1012 left_operand, right_operand));
1014 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1015 TREE_CONSTANT (result)
1016 |= (TREE_CONSTANT (left_operand) & TREE_CONSTANT (right_operand)
1017 && op_code != ARRAY_REF && op_code != ARRAY_RANGE_REF);
1019 if ((op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1020 && TYPE_VOLATILE (operation_type))
1021 TREE_THIS_VOLATILE (result) = 1;
1023 /* If we are working with modular types, perform the MOD operation
1024 if something above hasn't eliminated the need for it. */
1025 if (modulus != 0)
1026 result = fold (build (FLOOR_MOD_EXPR, operation_type, result,
1027 convert (operation_type, modulus)));
1029 if (result_type != 0 && result_type != operation_type)
1030 result = convert (result_type, result);
1032 return result;
1035 /* Similar, but for unary operations. */
1037 tree
1038 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1040 tree type = TREE_TYPE (operand);
1041 tree base_type = get_base_type (type);
1042 tree operation_type = result_type;
1043 tree result;
1044 int side_effects = 0;
1046 if (operation_type != 0
1047 && TREE_CODE (operation_type) == RECORD_TYPE
1048 && TYPE_LEFT_JUSTIFIED_MODULAR_P (operation_type))
1049 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1051 if (operation_type != 0
1052 && ! AGGREGATE_TYPE_P (operation_type)
1053 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1054 operation_type = get_base_type (operation_type);
1056 switch (op_code)
1058 case REALPART_EXPR:
1059 case IMAGPART_EXPR:
1060 if (operation_type == 0)
1061 result_type = operation_type = TREE_TYPE (type);
1062 else if (result_type != TREE_TYPE (type))
1063 gigi_abort (513);
1065 result = fold (build1 (op_code, operation_type, operand));
1066 break;
1068 case TRUTH_NOT_EXPR:
1069 if (result_type != base_type)
1070 gigi_abort (508);
1072 result = invert_truthvalue (gnat_truthvalue_conversion (operand));
1073 break;
1075 case ATTR_ADDR_EXPR:
1076 case ADDR_EXPR:
1077 switch (TREE_CODE (operand))
1079 case INDIRECT_REF:
1080 case UNCONSTRAINED_ARRAY_REF:
1081 result = TREE_OPERAND (operand, 0);
1083 /* Make sure the type here is a pointer, not a reference.
1084 GCC wants pointer types for function addresses. */
1085 if (result_type == 0)
1086 result_type = build_pointer_type (type);
1087 break;
1089 case NULL_EXPR:
1090 result = operand;
1091 TREE_TYPE (result) = type = build_pointer_type (type);
1092 break;
1094 case ARRAY_REF:
1095 case ARRAY_RANGE_REF:
1096 case COMPONENT_REF:
1097 case BIT_FIELD_REF:
1098 /* If this is for 'Address, find the address of the prefix and
1099 add the offset to the field. Otherwise, do this the normal
1100 way. */
1101 if (op_code == ATTR_ADDR_EXPR)
1103 HOST_WIDE_INT bitsize;
1104 HOST_WIDE_INT bitpos;
1105 tree offset, inner;
1106 enum machine_mode mode;
1107 int unsignedp, volatilep;
1109 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1110 &mode, &unsignedp, &volatilep);
1112 /* If INNER is a padding type whose field has a self-referential
1113 size, convert to that inner type. We know the offset is zero
1114 and we need to have that type visible. */
1115 if (TREE_CODE (TREE_TYPE (inner)) == RECORD_TYPE
1116 && TYPE_IS_PADDING_P (TREE_TYPE (inner))
1117 && (CONTAINS_PLACEHOLDER_P
1118 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1119 (TREE_TYPE (inner)))))))
1120 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1121 inner);
1123 /* Compute the offset as a byte offset from INNER. */
1124 if (offset == 0)
1125 offset = size_zero_node;
1127 if (bitpos % BITS_PER_UNIT != 0)
1128 post_error
1129 ("taking address of object not aligned on storage unit?",
1130 error_gnat_node);
1132 offset = size_binop (PLUS_EXPR, offset,
1133 size_int (bitpos / BITS_PER_UNIT));
1135 /* Take the address of INNER, convert the offset to void *, and
1136 add then. It will later be converted to the desired result
1137 type, if any. */
1138 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1139 inner = convert (ptr_void_type_node, inner);
1140 offset = convert (ptr_void_type_node, offset);
1141 result = build_binary_op (PLUS_EXPR, ptr_void_type_node,
1142 inner, offset);
1143 result = convert (build_pointer_type (TREE_TYPE (operand)),
1144 result);
1145 break;
1147 goto common;
1149 case CONSTRUCTOR:
1150 /* If this is just a constructor for a padded record, we can
1151 just take the address of the single field and convert it to
1152 a pointer to our type. */
1153 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1155 result
1156 = build_unary_op (ADDR_EXPR, NULL_TREE,
1157 TREE_VALUE (CONSTRUCTOR_ELTS (operand)));
1158 result = convert (build_pointer_type (TREE_TYPE (operand)),
1159 result);
1160 break;
1163 goto common;
1165 case NOP_EXPR:
1166 if (AGGREGATE_TYPE_P (type)
1167 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1168 return build_unary_op (ADDR_EXPR, result_type,
1169 TREE_OPERAND (operand, 0));
1171 /* If this NOP_EXPR doesn't change the mode, get the result type
1172 from this type and go down. We need to do this in case
1173 this is a conversion of a CONST_DECL. */
1174 if (TYPE_MODE (type) != BLKmode
1175 && (TYPE_MODE (type)
1176 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0)))))
1177 return build_unary_op (ADDR_EXPR,
1178 (result_type == 0
1179 ? build_pointer_type (type)
1180 : result_type),
1181 TREE_OPERAND (operand, 0));
1182 goto common;
1184 case CONST_DECL:
1185 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1187 /* ... fall through ... */
1189 default:
1190 common:
1192 /* If we are taking the address of a padded record whose field is
1193 contains a template, take the address of the template. */
1194 if (TREE_CODE (type) == RECORD_TYPE
1195 && TYPE_IS_PADDING_P (type)
1196 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1197 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1199 type = TREE_TYPE (TYPE_FIELDS (type));
1200 operand = convert (type, operand);
1203 if (type != error_mark_node)
1204 operation_type = build_pointer_type (type);
1206 gnat_mark_addressable (operand);
1207 result = fold (build1 (ADDR_EXPR, operation_type, operand));
1210 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1211 break;
1213 case INDIRECT_REF:
1214 /* If we want to refer to an entire unconstrained array,
1215 make up an expression to do so. This will never survive to
1216 the backend. If TYPE is a thin pointer, first convert the
1217 operand to a fat pointer. */
1218 if (TYPE_THIN_POINTER_P (type)
1219 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
1221 operand
1222 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1223 operand);
1224 type = TREE_TYPE (operand);
1227 if (TYPE_FAT_POINTER_P (type))
1229 result = build1 (UNCONSTRAINED_ARRAY_REF,
1230 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1231 TREE_READONLY (result) = TREE_STATIC (result)
1232 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1234 else if (TREE_CODE (operand) == ADDR_EXPR)
1235 result = TREE_OPERAND (operand, 0);
1237 else
1239 result = fold (build1 (op_code, TREE_TYPE (type), operand));
1240 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1243 side_effects
1244 = (! TYPE_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)));
1245 break;
1247 case NEGATE_EXPR:
1248 case BIT_NOT_EXPR:
1250 tree modulus = ((operation_type != 0
1251 && TREE_CODE (operation_type) == INTEGER_TYPE
1252 && TYPE_MODULAR_P (operation_type))
1253 ? TYPE_MODULUS (operation_type) : 0);
1254 int mod_pow2 = modulus != 0 && integer_pow2p (modulus);
1256 /* If this is a modular type, there are various possibilities
1257 depending on the operation and whether the modulus is a
1258 power of two or not. */
1260 if (modulus != 0)
1262 if (operation_type != base_type)
1263 gigi_abort (509);
1265 operand = convert (operation_type, operand);
1267 /* The fastest in the negate case for binary modulus is
1268 the straightforward code; the TRUNC_MOD_EXPR below
1269 is an AND operation. */
1270 if (op_code == NEGATE_EXPR && mod_pow2)
1271 result = fold (build (TRUNC_MOD_EXPR, operation_type,
1272 fold (build1 (NEGATE_EXPR, operation_type,
1273 operand)),
1274 modulus));
1276 /* For nonbinary negate case, return zero for zero operand,
1277 else return the modulus minus the operand. If the modulus
1278 is a power of two minus one, we can do the subtraction
1279 as an XOR since it is equivalent and faster on most machines. */
1280 else if (op_code == NEGATE_EXPR && ! mod_pow2)
1282 if (integer_pow2p (fold (build (PLUS_EXPR, operation_type,
1283 modulus,
1284 convert (operation_type,
1285 integer_one_node)))))
1286 result = fold (build (BIT_XOR_EXPR, operation_type,
1287 operand, modulus));
1288 else
1289 result = fold (build (MINUS_EXPR, operation_type,
1290 modulus, operand));
1292 result = fold (build (COND_EXPR, operation_type,
1293 fold (build (NE_EXPR, integer_type_node,
1294 operand,
1295 convert (operation_type,
1296 integer_zero_node))),
1297 result, operand));
1299 else
1301 /* For the NOT cases, we need a constant equal to
1302 the modulus minus one. For a binary modulus, we
1303 XOR against the constant and subtract the operand from
1304 that constant for nonbinary modulus. */
1306 tree cnst = fold (build (MINUS_EXPR, operation_type, modulus,
1307 convert (operation_type,
1308 integer_one_node)));
1310 if (mod_pow2)
1311 result = fold (build (BIT_XOR_EXPR, operation_type,
1312 operand, cnst));
1313 else
1314 result = fold (build (MINUS_EXPR, operation_type,
1315 cnst, operand));
1318 break;
1322 /* ... fall through ... */
1324 default:
1325 if (operation_type != base_type)
1326 gigi_abort (509);
1328 result = fold (build1 (op_code, operation_type, convert (operation_type,
1329 operand)));
1332 if (side_effects)
1334 TREE_SIDE_EFFECTS (result) = 1;
1335 if (TREE_CODE (result) == INDIRECT_REF)
1336 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1339 if (result_type != 0 && TREE_TYPE (result) != result_type)
1340 result = convert (result_type, result);
1342 return result;
1345 /* Similar, but for COND_EXPR. */
1347 tree
1348 build_cond_expr (tree result_type,
1349 tree condition_operand,
1350 tree true_operand,
1351 tree false_operand)
1353 tree result;
1354 int addr_p = 0;
1356 /* Front-end verifies that result, true and false operands have same base
1357 type. Convert everything to the result type. */
1359 true_operand = convert (result_type, true_operand);
1360 false_operand = convert (result_type, false_operand);
1362 /* If the result type is unconstrained, take the address of
1363 the operands and then dereference our result. */
1365 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1366 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1368 addr_p = 1;
1369 result_type = build_pointer_type (result_type);
1370 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1371 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1374 result = fold (build (COND_EXPR, result_type, condition_operand,
1375 true_operand, false_operand));
1377 /* If either operand is a SAVE_EXPR (possibly surrounded by
1378 arithmetic, make sure it gets done. */
1379 true_operand = skip_simple_arithmetic (true_operand);
1380 false_operand = skip_simple_arithmetic (false_operand);
1382 if (TREE_CODE (true_operand) == SAVE_EXPR)
1383 result = build (COMPOUND_EXPR, result_type, true_operand, result);
1385 if (TREE_CODE (false_operand) == SAVE_EXPR)
1386 result = build (COMPOUND_EXPR, result_type, false_operand, result);
1388 /* ??? Seems the code above is wrong, as it may move ahead of the COND
1389 SAVE_EXPRs with side effects and not shared by both arms. */
1391 if (addr_p)
1392 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1394 return result;
1398 /* Build a CALL_EXPR to call FUNDECL with one argument, ARG. Return
1399 the CALL_EXPR. */
1401 tree
1402 build_call_1_expr (tree fundecl, tree arg)
1404 tree call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1405 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1406 chainon (NULL_TREE, build_tree_list (NULL_TREE, arg)),
1407 NULL_TREE);
1409 TREE_SIDE_EFFECTS (call) = 1;
1411 return call;
1414 /* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2. Return
1415 the CALL_EXPR. */
1417 tree
1418 build_call_2_expr (tree fundecl, tree arg1, tree arg2)
1420 tree call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1421 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1422 chainon (chainon (NULL_TREE,
1423 build_tree_list (NULL_TREE, arg1)),
1424 build_tree_list (NULL_TREE, arg2)),
1425 NULL_TREE);
1427 TREE_SIDE_EFFECTS (call) = 1;
1429 return call;
1432 /* Likewise to call FUNDECL with no arguments. */
1434 tree
1435 build_call_0_expr (tree fundecl)
1437 tree call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1438 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1439 NULL_TREE, NULL_TREE);
1441 TREE_SIDE_EFFECTS (call) = 1;
1443 return call;
1446 /* Call a function that raises an exception and pass the line number and file
1447 name, if requested. MSG says which exception function to call. */
1449 tree
1450 build_call_raise (int msg)
1452 tree fndecl = gnat_raise_decls[msg];
1453 const char *str = discard_file_names ? "" : ref_filename;
1454 int len = strlen (str) + 1;
1455 tree filename = build_string (len, str);
1457 TREE_TYPE (filename)
1458 = build_array_type (char_type_node,
1459 build_index_type (build_int_2 (len, 0)));
1461 return
1462 build_call_2_expr (fndecl,
1463 build1 (ADDR_EXPR, build_pointer_type (char_type_node),
1464 filename),
1465 build_int_2 (input_line, 0));
1468 /* Return a CONSTRUCTOR of TYPE whose list is LIST. */
1470 tree
1471 gnat_build_constructor (tree type, tree list)
1473 tree elmt;
1474 int allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1475 int side_effects = 0;
1476 tree result;
1478 for (elmt = list; elmt; elmt = TREE_CHAIN (elmt))
1480 if (! TREE_CONSTANT (TREE_VALUE (elmt))
1481 || (TREE_CODE (type) == RECORD_TYPE
1482 && DECL_BIT_FIELD (TREE_PURPOSE (elmt))
1483 && TREE_CODE (TREE_VALUE (elmt)) != INTEGER_CST)
1484 || ! initializer_constant_valid_p (TREE_VALUE (elmt),
1485 TREE_TYPE (TREE_VALUE (elmt))))
1486 allconstant = 0;
1488 if (TREE_SIDE_EFFECTS (TREE_VALUE (elmt)))
1489 side_effects = 1;
1491 /* Propagate an NULL_EXPR from the size of the type. We won't ever
1492 be executing the code we generate here in that case, but handle it
1493 specially to avoid the cmpiler blowing up. */
1494 if (TREE_CODE (type) == RECORD_TYPE
1495 && (0 != (result
1496 = contains_null_expr (DECL_SIZE (TREE_PURPOSE (elmt))))))
1497 return build1 (NULL_EXPR, type, TREE_OPERAND (result, 0));
1500 /* If TYPE is a RECORD_TYPE and the fields are not in the
1501 same order as their bit position, don't treat this as constant
1502 since varasm.c can't handle it. */
1503 if (allconstant && TREE_CODE (type) == RECORD_TYPE)
1505 tree last_pos = bitsize_zero_node;
1506 tree field;
1508 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1510 tree this_pos = bit_position (field);
1512 if (TREE_CODE (this_pos) != INTEGER_CST
1513 || tree_int_cst_lt (this_pos, last_pos))
1515 allconstant = 0;
1516 break;
1519 last_pos = this_pos;
1523 result = build_constructor (type, list);
1524 TREE_CONSTANT (result) = allconstant;
1525 TREE_STATIC (result) = allconstant;
1526 TREE_SIDE_EFFECTS (result) = side_effects;
1527 TREE_READONLY (result) = TYPE_READONLY (type);
1529 return result;
1532 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1533 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1534 for the field. Don't fold the result if NO_FOLD_P is nonzero.
1536 We also handle the fact that we might have been passed a pointer to the
1537 actual record and know how to look for fields in variant parts. */
1539 static tree
1540 build_simple_component_ref (tree record_variable,
1541 tree component,
1542 tree field,
1543 int no_fold_p)
1545 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1546 tree ref;
1548 if ((TREE_CODE (record_type) != RECORD_TYPE
1549 && TREE_CODE (record_type) != UNION_TYPE
1550 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1551 || TYPE_SIZE (record_type) == 0)
1552 gigi_abort (510);
1554 /* Either COMPONENT or FIELD must be specified, but not both. */
1555 if ((component != 0) == (field != 0))
1556 gigi_abort (511);
1558 /* If no field was specified, look for a field with the specified name
1559 in the current record only. */
1560 if (field == 0)
1561 for (field = TYPE_FIELDS (record_type); field;
1562 field = TREE_CHAIN (field))
1563 if (DECL_NAME (field) == component)
1564 break;
1566 if (field == 0)
1567 return 0;
1569 /* If this field is not in the specified record, see if we can find
1570 something in the record whose original field is the same as this one. */
1571 if (DECL_CONTEXT (field) != record_type)
1572 /* Check if there is a field with name COMPONENT in the record. */
1574 tree new_field;
1576 /* First loop thru normal components. */
1578 for (new_field = TYPE_FIELDS (record_type); new_field != 0;
1579 new_field = TREE_CHAIN (new_field))
1580 if (DECL_ORIGINAL_FIELD (new_field) == field
1581 || new_field == DECL_ORIGINAL_FIELD (field)
1582 || (DECL_ORIGINAL_FIELD (field) != 0
1583 && (DECL_ORIGINAL_FIELD (field)
1584 == DECL_ORIGINAL_FIELD (new_field))))
1585 break;
1587 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1588 the component in the first search. Doing this search in 2 steps
1589 is required to avoiding hidden homonymous fields in the
1590 _Parent field. */
1592 if (new_field == 0)
1593 for (new_field = TYPE_FIELDS (record_type); new_field != 0;
1594 new_field = TREE_CHAIN (new_field))
1595 if (DECL_INTERNAL_P (new_field))
1597 tree field_ref
1598 = build_simple_component_ref (record_variable,
1599 NULL_TREE, new_field, no_fold_p);
1600 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1601 no_fold_p);
1603 if (ref != 0)
1604 return ref;
1607 field = new_field;
1610 if (field == 0)
1611 return 0;
1613 /* It would be nice to call "fold" here, but that can lose a type
1614 we need to tag a PLACEHOLDER_EXPR with, so we can't do it. */
1615 ref = build (COMPONENT_REF, TREE_TYPE (field), record_variable, field);
1617 if (TREE_READONLY (record_variable) || TREE_READONLY (field))
1618 TREE_READONLY (ref) = 1;
1619 if (TREE_THIS_VOLATILE (record_variable) || TREE_THIS_VOLATILE (field)
1620 || TYPE_VOLATILE (record_type))
1621 TREE_THIS_VOLATILE (ref) = 1;
1623 return no_fold_p ? ref : fold (ref);
1626 /* Like build_simple_component_ref, except that we give an error if the
1627 reference could not be found. */
1629 tree
1630 build_component_ref (tree record_variable,
1631 tree component,
1632 tree field,
1633 int no_fold_p)
1635 tree ref = build_simple_component_ref (record_variable, component, field,
1636 no_fold_p);
1638 if (ref != 0)
1639 return ref;
1641 /* If FIELD was specified, assume this is an invalid user field so
1642 raise constraint error. Otherwise, we can't find the type to return, so
1643 abort. */
1645 else if (field != 0)
1646 return build1 (NULL_EXPR, TREE_TYPE (field),
1647 build_call_raise (CE_Discriminant_Check_Failed));
1648 else
1649 gigi_abort (512);
1652 /* Build a GCC tree to call an allocation or deallocation function.
1653 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
1654 generate an allocator.
1656 GNU_SIZE is the size of the object in bytes and ALIGN is the alignment in
1657 bits. GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the
1658 storage pool to use. If not preset, malloc and free will be used except
1659 if GNAT_PROC is the "fake" value of -1, in which case we allocate the
1660 object dynamically on the stack frame. */
1662 tree
1663 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, unsigned align,
1664 Entity_Id gnat_proc, Entity_Id gnat_pool,
1665 Node_Id gnat_node)
1667 tree gnu_align = size_int (align / BITS_PER_UNIT);
1669 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
1671 if (Present (gnat_proc))
1673 /* The storage pools are obviously always tagged types, but the
1674 secondary stack uses the same mechanism and is not tagged */
1675 if (Is_Tagged_Type (Etype (gnat_pool)))
1677 /* The size is the third parameter; the alignment is the
1678 same type. */
1679 Entity_Id gnat_size_type
1680 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1681 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1682 tree gnu_proc = gnat_to_gnu (gnat_proc);
1683 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1684 tree gnu_pool = gnat_to_gnu (gnat_pool);
1685 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1686 tree gnu_args = NULL_TREE;
1687 tree gnu_call;
1689 /* The first arg is always the address of the storage pool; next
1690 comes the address of the object, for a deallocator, then the
1691 size and alignment. */
1692 gnu_args
1693 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_pool_addr));
1695 if (gnu_obj)
1696 gnu_args
1697 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_obj));
1699 gnu_args
1700 = chainon (gnu_args,
1701 build_tree_list (NULL_TREE,
1702 convert (gnu_size_type, gnu_size)));
1703 gnu_args
1704 = chainon (gnu_args,
1705 build_tree_list (NULL_TREE,
1706 convert (gnu_size_type, gnu_align)));
1708 gnu_call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (gnu_proc)),
1709 gnu_proc_addr, gnu_args, NULL_TREE);
1710 TREE_SIDE_EFFECTS (gnu_call) = 1;
1711 return gnu_call;
1714 /* Secondary stack case. */
1715 else
1717 /* The size is the second parameter */
1718 Entity_Id gnat_size_type
1719 = Etype (Next_Formal (First_Formal (gnat_proc)));
1720 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1721 tree gnu_proc = gnat_to_gnu (gnat_proc);
1722 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1723 tree gnu_args = NULL_TREE;
1724 tree gnu_call;
1726 /* The first arg is the address of the object, for a
1727 deallocator, then the size */
1728 if (gnu_obj)
1729 gnu_args
1730 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_obj));
1732 gnu_args
1733 = chainon (gnu_args,
1734 build_tree_list (NULL_TREE,
1735 convert (gnu_size_type, gnu_size)));
1737 gnu_call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (gnu_proc)),
1738 gnu_proc_addr, gnu_args, NULL_TREE);
1739 TREE_SIDE_EFFECTS (gnu_call) = 1;
1740 return gnu_call;
1744 else if (gnu_obj)
1745 return build_call_1_expr (free_decl, gnu_obj);
1746 else if (gnat_pool == -1)
1748 /* If the size is a constant, we can put it in the fixed portion of
1749 the stack frame to avoid the need to adjust the stack pointer. */
1750 if (TREE_CODE (gnu_size) == INTEGER_CST && ! flag_stack_check)
1752 tree gnu_range
1753 = build_range_type (NULL_TREE, size_one_node, gnu_size);
1754 tree gnu_array_type = build_array_type (char_type_node, gnu_range);
1755 tree gnu_decl =
1756 create_var_decl (get_identifier ("RETVAL"), NULL_TREE,
1757 gnu_array_type, NULL_TREE, 0, 0, 0, 0, 0);
1759 return convert (ptr_void_type_node,
1760 build_unary_op (ADDR_EXPR, NULL_TREE, gnu_decl));
1762 else
1763 return build (ALLOCATE_EXPR, ptr_void_type_node, gnu_size, gnu_align);
1765 else
1767 if (Nkind (gnat_node) != N_Allocator || !Comes_From_Source (gnat_node))
1768 Check_No_Implicit_Heap_Alloc (gnat_node);
1769 return build_call_1_expr (malloc_decl, gnu_size);
1773 /* Build a GCC tree to correspond to allocating an object of TYPE whose
1774 initial value is INIT, if INIT is nonzero. Convert the expression to
1775 RESULT_TYPE, which must be some type of pointer. Return the tree.
1776 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
1777 the storage pool to use. */
1779 tree
1780 build_allocator (tree type,
1781 tree init,
1782 tree result_type,
1783 Entity_Id gnat_proc,
1784 Entity_Id gnat_pool,
1785 Node_Id gnat_node)
1787 tree size = TYPE_SIZE_UNIT (type);
1788 tree result;
1790 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
1791 if (init != 0 && TREE_CODE (init) == NULL_EXPR)
1792 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
1794 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
1795 sizes of the object and its template. Allocate the whole thing and
1796 fill in the parts that are known. */
1797 else if (TYPE_FAT_OR_THIN_POINTER_P (result_type))
1799 tree template_type
1800 = (TYPE_FAT_POINTER_P (result_type)
1801 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (result_type))))
1802 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (result_type))));
1803 tree storage_type
1804 = build_unc_object_type (template_type, type,
1805 get_identifier ("ALLOC"));
1806 tree storage_ptr_type = build_pointer_type (storage_type);
1807 tree storage;
1808 tree template_cons = NULL_TREE;
1810 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
1811 init);
1813 /* If the size overflows, pass -1 so the allocator will raise
1814 storage error. */
1815 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1816 size = ssize_int (-1);
1818 storage = build_call_alloc_dealloc (NULL_TREE, size,
1819 TYPE_ALIGN (storage_type),
1820 gnat_proc, gnat_pool, gnat_node);
1821 storage = convert (storage_ptr_type, protect_multiple_eval (storage));
1823 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1825 type = TREE_TYPE (TYPE_FIELDS (type));
1827 if (init != 0)
1828 init = convert (type, init);
1831 /* If there is an initializing expression, make a constructor for
1832 the entire object including the bounds and copy it into the
1833 object. If there is no initializing expression, just set the
1834 bounds. */
1835 if (init != 0)
1837 template_cons = tree_cons (TREE_CHAIN (TYPE_FIELDS (storage_type)),
1838 init, NULL_TREE);
1839 template_cons = tree_cons (TYPE_FIELDS (storage_type),
1840 build_template (template_type, type,
1841 init),
1842 template_cons);
1844 return convert
1845 (result_type,
1846 build (COMPOUND_EXPR, storage_ptr_type,
1847 build_binary_op
1848 (MODIFY_EXPR, storage_type,
1849 build_unary_op (INDIRECT_REF, NULL_TREE,
1850 convert (storage_ptr_type, storage)),
1851 gnat_build_constructor (storage_type, template_cons)),
1852 convert (storage_ptr_type, storage)));
1854 else
1855 return build
1856 (COMPOUND_EXPR, result_type,
1857 build_binary_op
1858 (MODIFY_EXPR, template_type,
1859 build_component_ref
1860 (build_unary_op (INDIRECT_REF, NULL_TREE,
1861 convert (storage_ptr_type, storage)),
1862 NULL_TREE, TYPE_FIELDS (storage_type), 0),
1863 build_template (template_type, type, NULL_TREE)),
1864 convert (result_type, convert (storage_ptr_type, storage)));
1867 /* If we have an initializing expression, see if its size is simpler
1868 than the size from the type. */
1869 if (init != 0 && TYPE_SIZE_UNIT (TREE_TYPE (init)) != 0
1870 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
1871 || CONTAINS_PLACEHOLDER_P (size)))
1872 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
1874 /* If the size is still self-referential, reference the initializing
1875 expression, if it is present. If not, this must have been a
1876 call to allocate a library-level object, in which case we use
1877 the maximum size. */
1878 if (CONTAINS_PLACEHOLDER_P (size))
1880 if (init == 0)
1881 size = max_size (size, 1);
1882 else
1883 size = substitute_placeholder_in_expr (size, init);
1886 /* If the size overflows, pass -1 so the allocator will raise
1887 storage error. */
1888 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1889 size = ssize_int (-1);
1891 /* If this is a type whose alignment is larger than the
1892 biggest we support in normal alignment and this is in
1893 the default storage pool, make an "aligning type", allocate
1894 it, point to the field we need, and return that. */
1895 if (TYPE_ALIGN (type) > BIGGEST_ALIGNMENT
1896 && No (gnat_proc))
1898 tree new_type = make_aligning_type (type, TYPE_ALIGN (type), size);
1900 result = build_call_alloc_dealloc (NULL_TREE, TYPE_SIZE_UNIT (new_type),
1901 BIGGEST_ALIGNMENT, Empty,
1902 Empty, gnat_node);
1903 result = save_expr (result);
1904 result = convert (build_pointer_type (new_type), result);
1905 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1906 result = build_component_ref (result, NULL_TREE,
1907 TYPE_FIELDS (new_type), 0);
1908 result = convert (result_type,
1909 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1911 else
1912 result = convert (result_type,
1913 build_call_alloc_dealloc (NULL_TREE, size,
1914 TYPE_ALIGN (type),
1915 gnat_proc,
1916 gnat_pool,
1917 gnat_node));
1919 /* If we have an initial value, put the new address into a SAVE_EXPR, assign
1920 the value, and return the address. Do this with a COMPOUND_EXPR. */
1922 if (init)
1924 result = save_expr (result);
1925 result
1926 = build (COMPOUND_EXPR, TREE_TYPE (result),
1927 build_binary_op
1928 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (result)),
1929 build_unary_op (INDIRECT_REF, TREE_TYPE (TREE_TYPE (result)),
1930 result),
1931 init),
1932 result);
1935 return convert (result_type, result);
1938 /* Fill in a VMS descriptor for EXPR and return a constructor for it.
1939 GNAT_FORMAL is how we find the descriptor record. */
1941 tree
1942 fill_vms_descriptor (tree expr, Entity_Id gnat_formal)
1944 tree record_type = TREE_TYPE (TREE_TYPE (get_gnu_tree (gnat_formal)));
1945 tree field;
1946 tree const_list = 0;
1948 expr = maybe_unconstrained_array (expr);
1949 gnat_mark_addressable (expr);
1951 for (field = TYPE_FIELDS (record_type); field; field = TREE_CHAIN (field))
1952 const_list
1953 = tree_cons (field,
1954 convert (TREE_TYPE (field),
1955 SUBSTITUTE_PLACEHOLDER_IN_EXPR
1956 (DECL_INITIAL (field), expr)),
1957 const_list);
1959 return gnat_build_constructor (record_type, nreverse (const_list));
1962 /* Indicate that we need to make the address of EXPR_NODE and it therefore
1963 should not be allocated in a register. Returns true if successful. */
1965 bool
1966 gnat_mark_addressable (tree expr_node)
1968 while (1)
1969 switch (TREE_CODE (expr_node))
1971 case ADDR_EXPR:
1972 case COMPONENT_REF:
1973 case ARRAY_REF:
1974 case ARRAY_RANGE_REF:
1975 case REALPART_EXPR:
1976 case IMAGPART_EXPR:
1977 case VIEW_CONVERT_EXPR:
1978 case CONVERT_EXPR:
1979 case NON_LVALUE_EXPR:
1980 case GNAT_NOP_EXPR:
1981 case NOP_EXPR:
1982 expr_node = TREE_OPERAND (expr_node, 0);
1983 break;
1985 case CONSTRUCTOR:
1986 TREE_ADDRESSABLE (expr_node) = 1;
1987 return true;
1989 case VAR_DECL:
1990 case PARM_DECL:
1991 case RESULT_DECL:
1992 put_var_into_stack (expr_node, true);
1993 TREE_ADDRESSABLE (expr_node) = 1;
1994 return true;
1996 case FUNCTION_DECL:
1997 TREE_ADDRESSABLE (expr_node) = 1;
1998 return true;
2000 case CONST_DECL:
2001 return (DECL_CONST_CORRESPONDING_VAR (expr_node) != 0
2002 && (gnat_mark_addressable
2003 (DECL_CONST_CORRESPONDING_VAR (expr_node))));
2004 default:
2005 return true;