* sh.h (REG_CLASS_FROM_LETTER): Change to:
[official-gcc.git] / gcc / ada / utils2.c
blob1964be12276950728a27930434ff247153457b14
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * *
10 * Copyright (C) 1992-2003, Free Software Foundation, Inc. *
11 * *
12 * GNAT is free software; you can redistribute it and/or modify it under *
13 * terms of the GNU General Public License as published by the Free Soft- *
14 * ware Foundation; either version 2, or (at your option) any later ver- *
15 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
16 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
17 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
18 * for more details. You should have received a copy of the GNU General *
19 * Public License distributed with GNAT; see file COPYING. If not, write *
20 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
21 * MA 02111-1307, USA. *
22 * *
23 * GNAT was originally developed by the GNAT team at New York University. *
24 * Extensive contributions were provided by Ada Core Technologies Inc. *
25 * *
26 ****************************************************************************/
28 #include "config.h"
29 #include "system.h"
30 #include "coretypes.h"
31 #include "tm.h"
32 #include "tree.h"
33 #include "flags.h"
34 #include "ada.h"
35 #include "types.h"
36 #include "atree.h"
37 #include "stringt.h"
38 #include "uintp.h"
39 #include "fe.h"
40 #include "elists.h"
41 #include "nlists.h"
42 #include "sinfo.h"
43 #include "einfo.h"
44 #include "ada-tree.h"
45 #include "gigi.h"
47 static tree find_common_type PARAMS ((tree, tree));
48 static int contains_save_expr_p PARAMS ((tree));
49 static tree contains_null_expr PARAMS ((tree));
50 static tree compare_arrays PARAMS ((tree, tree, tree));
51 static tree nonbinary_modular_operation PARAMS ((enum tree_code, tree,
52 tree, tree));
53 static tree build_simple_component_ref PARAMS ((tree, tree, tree));
55 /* Prepare expr to be an argument of a TRUTH_NOT_EXPR or other logical
56 operation.
58 This preparation consists of taking the ordinary representation of
59 an expression expr and producing a valid tree boolean expression
60 describing whether expr is nonzero. We could simply always do
62 build_binary_op (NE_EXPR, expr, integer_zero_node, 1),
64 but we optimize comparisons, &&, ||, and !.
66 The resulting type should always be the same as the input type.
67 This function is simpler than the corresponding C version since
68 the only possible operands will be things of Boolean type. */
70 tree
71 gnat_truthvalue_conversion (expr)
72 tree expr;
74 tree type = TREE_TYPE (expr);
76 switch (TREE_CODE (expr))
78 case EQ_EXPR: case NE_EXPR: case LE_EXPR: case GE_EXPR:
79 case LT_EXPR: case GT_EXPR:
80 case TRUTH_ANDIF_EXPR:
81 case TRUTH_ORIF_EXPR:
82 case TRUTH_AND_EXPR:
83 case TRUTH_OR_EXPR:
84 case TRUTH_XOR_EXPR:
85 case ERROR_MARK:
86 return expr;
88 case COND_EXPR:
89 /* Distribute the conversion into the arms of a COND_EXPR. */
90 return fold
91 (build (COND_EXPR, type, TREE_OPERAND (expr, 0),
92 gnat_truthvalue_conversion (TREE_OPERAND (expr, 1)),
93 gnat_truthvalue_conversion (TREE_OPERAND (expr, 2))));
95 case WITH_RECORD_EXPR:
96 return build (WITH_RECORD_EXPR, type,
97 gnat_truthvalue_conversion (TREE_OPERAND (expr, 0)),
98 TREE_OPERAND (expr, 1));
100 default:
101 return build_binary_op (NE_EXPR, type, expr,
102 convert (type, integer_zero_node));
106 /* Return the base type of TYPE. */
108 tree
109 get_base_type (type)
110 tree type;
112 if (TREE_CODE (type) == RECORD_TYPE
113 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type))
114 type = TREE_TYPE (TYPE_FIELDS (type));
116 while (TREE_TYPE (type) != 0
117 && (TREE_CODE (type) == INTEGER_TYPE
118 || TREE_CODE (type) == REAL_TYPE))
119 type = TREE_TYPE (type);
121 return type;
124 /* Likewise, but only return types known to the Ada source. */
125 tree
126 get_ada_base_type (type)
127 tree type;
129 while (TREE_TYPE (type) != 0
130 && (TREE_CODE (type) == INTEGER_TYPE
131 || TREE_CODE (type) == REAL_TYPE)
132 && ! TYPE_EXTRA_SUBTYPE_P (type))
133 type = TREE_TYPE (type);
135 return type;
138 /* EXP is a GCC tree representing an address. See if we can find how
139 strictly the object at that address is aligned. Return that alignment
140 in bits. If we don't know anything about the alignment, return 0.
141 We do not go merely by type information here since the check on
142 N_Validate_Unchecked_Alignment does that. */
144 unsigned int
145 known_alignment (exp)
146 tree exp;
148 unsigned int lhs, rhs;
150 switch (TREE_CODE (exp))
152 case CONVERT_EXPR:
153 case NOP_EXPR:
154 case NON_LVALUE_EXPR:
155 /* Conversions between pointers and integers don't change the alignment
156 of the underlying object. */
157 return known_alignment (TREE_OPERAND (exp, 0));
159 case PLUS_EXPR:
160 case MINUS_EXPR:
161 /* If two address are added, the alignment of the result is the
162 minimum of the two aligments. */
163 lhs = known_alignment (TREE_OPERAND (exp, 0));
164 rhs = known_alignment (TREE_OPERAND (exp, 1));
165 return MIN (lhs, rhs);
167 case INTEGER_CST:
168 /* The first part of this represents the lowest bit in the constant,
169 but is it in bytes, not bits. */
170 return MIN (BITS_PER_UNIT
171 * (TREE_INT_CST_LOW (exp) & - TREE_INT_CST_LOW (exp)),
172 BIGGEST_ALIGNMENT);
174 case MULT_EXPR:
175 /* If we know the alignment of just one side, use it. Otherwise,
176 use the product of the alignments. */
177 lhs = known_alignment (TREE_OPERAND (exp, 0));
178 rhs = known_alignment (TREE_OPERAND (exp, 1));
179 if (lhs == 0 || rhs == 0)
180 return MIN (BIGGEST_ALIGNMENT, MAX (lhs, rhs));
182 return MIN (BIGGEST_ALIGNMENT, lhs * rhs);
184 case ADDR_EXPR:
185 return expr_align (TREE_OPERAND (exp, 0));
187 default:
188 return 0;
192 /* We have a comparison or assignment operation on two types, T1 and T2,
193 which are both either array types or both record types.
194 Return the type that both operands should be converted to, if any.
195 Otherwise return zero. */
197 static tree
198 find_common_type (t1, t2)
199 tree t1, t2;
201 /* If either type is non-BLKmode, use it. Note that we know that we will
202 not have any alignment problems since if we did the non-BLKmode
203 type could not have been used. */
204 if (TYPE_MODE (t1) != BLKmode)
205 return t1;
206 else if (TYPE_MODE (t2) != BLKmode)
207 return t2;
209 /* Otherwise, return the type that has a constant size. */
210 if (TREE_CONSTANT (TYPE_SIZE (t1)))
211 return t1;
212 else if (TREE_CONSTANT (TYPE_SIZE (t2)))
213 return t2;
215 /* In this case, both types have variable size. It's probably
216 best to leave the "type mismatch" because changing it could
217 case a bad self-referential reference. */
218 return 0;
221 /* See if EXP contains a SAVE_EXPR in a position where we would
222 normally put it.
224 ??? This is a real kludge, but is probably the best approach short
225 of some very general solution. */
227 static int
228 contains_save_expr_p (exp)
229 tree exp;
231 switch (TREE_CODE (exp))
233 case SAVE_EXPR:
234 return 1;
236 case ADDR_EXPR: case INDIRECT_REF:
237 case COMPONENT_REF:
238 case NOP_EXPR: case CONVERT_EXPR: case VIEW_CONVERT_EXPR:
239 return contains_save_expr_p (TREE_OPERAND (exp, 0));
241 case CONSTRUCTOR:
242 return (CONSTRUCTOR_ELTS (exp) != 0
243 && contains_save_expr_p (CONSTRUCTOR_ELTS (exp)));
245 case TREE_LIST:
246 return (contains_save_expr_p (TREE_VALUE (exp))
247 || (TREE_CHAIN (exp) != 0
248 && contains_save_expr_p (TREE_CHAIN (exp))));
250 default:
251 return 0;
255 /* See if EXP contains a NULL_EXPR in an expression we use for sizes. Return
256 it if so. This is used to detect types whose sizes involve computations
257 that are known to raise Constraint_Error. */
259 static tree
260 contains_null_expr (exp)
261 tree exp;
263 tree tem;
265 if (TREE_CODE (exp) == NULL_EXPR)
266 return exp;
268 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
270 case '1':
271 return contains_null_expr (TREE_OPERAND (exp, 0));
273 case '<': case '2':
274 tem = contains_null_expr (TREE_OPERAND (exp, 0));
275 if (tem != 0)
276 return tem;
278 return contains_null_expr (TREE_OPERAND (exp, 1));
280 case 'e':
281 switch (TREE_CODE (exp))
283 case SAVE_EXPR:
284 return contains_null_expr (TREE_OPERAND (exp, 0));
286 case COND_EXPR:
287 tem = contains_null_expr (TREE_OPERAND (exp, 0));
288 if (tem != 0)
289 return tem;
291 tem = contains_null_expr (TREE_OPERAND (exp, 1));
292 if (tem != 0)
293 return tem;
295 return contains_null_expr (TREE_OPERAND (exp, 2));
297 default:
298 return 0;
301 default:
302 return 0;
306 /* Return an expression tree representing an equality comparison of
307 A1 and A2, two objects of ARRAY_TYPE. The returned expression should
308 be of type RESULT_TYPE
310 Two arrays are equal in one of two ways: (1) if both have zero length
311 in some dimension (not necessarily the same dimension) or (2) if the
312 lengths in each dimension are equal and the data is equal. We perform the
313 length tests in as efficient a manner as possible. */
315 static tree
316 compare_arrays (result_type, a1, a2)
317 tree a1, a2;
318 tree result_type;
320 tree t1 = TREE_TYPE (a1);
321 tree t2 = TREE_TYPE (a2);
322 tree result = convert (result_type, integer_one_node);
323 tree a1_is_null = convert (result_type, integer_zero_node);
324 tree a2_is_null = convert (result_type, integer_zero_node);
325 int length_zero_p = 0;
327 /* Process each dimension separately and compare the lengths. If any
328 dimension has a size known to be zero, set SIZE_ZERO_P to 1 to
329 suppress the comparison of the data. */
330 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
332 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
333 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
334 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
335 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
336 tree bt = get_base_type (TREE_TYPE (lb1));
337 tree length1 = fold (build (MINUS_EXPR, bt, ub1, lb1));
338 tree length2 = fold (build (MINUS_EXPR, bt, ub2, lb2));
339 tree nbt;
340 tree tem;
341 tree comparison, this_a1_is_null, this_a2_is_null;
343 /* If the length of the first array is a constant, swap our operands
344 unless the length of the second array is the constant zero.
345 Note that we have set the `length' values to the length - 1. */
346 if (TREE_CODE (length1) == INTEGER_CST
347 && ! integer_zerop (fold (build (PLUS_EXPR, bt, length2,
348 convert (bt, integer_one_node)))))
350 tem = a1, a1 = a2, a2 = tem;
351 tem = t1, t1 = t2, t2 = tem;
352 tem = lb1, lb1 = lb2, lb2 = tem;
353 tem = ub1, ub1 = ub2, ub2 = tem;
354 tem = length1, length1 = length2, length2 = tem;
355 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
358 /* If the length of this dimension in the second array is the constant
359 zero, we can just go inside the original bounds for the first
360 array and see if last < first. */
361 if (integer_zerop (fold (build (PLUS_EXPR, bt, length2,
362 convert (bt, integer_one_node)))))
364 tree ub = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
365 tree lb = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
367 comparison = build_binary_op (LT_EXPR, result_type, ub, lb);
369 if (contains_placeholder_p (comparison))
370 comparison = build (WITH_RECORD_EXPR, result_type,
371 comparison, a1);
372 if (contains_placeholder_p (length1))
373 length1 = build (WITH_RECORD_EXPR, bt, length1, a1);
375 length_zero_p = 1;
377 this_a1_is_null = comparison;
378 this_a2_is_null = convert (result_type, integer_one_node);
381 /* If the length is some other constant value, we know that the
382 this dimension in the first array cannot be superflat, so we
383 can just use its length from the actual stored bounds. */
384 else if (TREE_CODE (length2) == INTEGER_CST)
386 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
387 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
388 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
389 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
390 nbt = get_base_type (TREE_TYPE (ub1));
392 comparison
393 = build_binary_op (EQ_EXPR, result_type,
394 build_binary_op (MINUS_EXPR, nbt, ub1, lb1),
395 build_binary_op (MINUS_EXPR, nbt, ub2, lb2));
397 /* Note that we know that UB2 and LB2 are constant and hence
398 cannot contain a PLACEHOLDER_EXPR. */
400 if (contains_placeholder_p (comparison))
401 comparison = build (WITH_RECORD_EXPR, result_type, comparison, a1);
402 if (contains_placeholder_p (length1))
403 length1 = build (WITH_RECORD_EXPR, bt, length1, a1);
405 this_a1_is_null = build_binary_op (LT_EXPR, result_type, ub1, lb1);
406 this_a2_is_null = convert (result_type, integer_zero_node);
409 /* Otherwise compare the computed lengths. */
410 else
412 if (contains_placeholder_p (length1))
413 length1 = build (WITH_RECORD_EXPR, bt, length1, a1);
414 if (contains_placeholder_p (length2))
415 length2 = build (WITH_RECORD_EXPR, bt, length2, a2);
417 comparison
418 = build_binary_op (EQ_EXPR, result_type, length1, length2);
420 this_a1_is_null
421 = build_binary_op (LT_EXPR, result_type, length1,
422 convert (bt, integer_zero_node));
423 this_a2_is_null
424 = build_binary_op (LT_EXPR, result_type, length2,
425 convert (bt, integer_zero_node));
428 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
429 result, comparison);
431 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
432 this_a1_is_null, a1_is_null);
433 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
434 this_a2_is_null, a2_is_null);
436 t1 = TREE_TYPE (t1);
437 t2 = TREE_TYPE (t2);
440 /* Unless the size of some bound is known to be zero, compare the
441 data in the array. */
442 if (! length_zero_p)
444 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
446 if (type != 0)
447 a1 = convert (type, a1), a2 = convert (type, a2);
450 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result,
451 fold (build (EQ_EXPR, result_type, a1, a2)));
455 /* The result is also true if both sizes are zero. */
456 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
457 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
458 a1_is_null, a2_is_null),
459 result);
461 /* If either operand contains SAVE_EXPRs, they have to be evaluated before
462 starting the comparison above since the place it would be otherwise
463 evaluated would be wrong. */
465 if (contains_save_expr_p (a1))
466 result = build (COMPOUND_EXPR, result_type, a1, result);
468 if (contains_save_expr_p (a2))
469 result = build (COMPOUND_EXPR, result_type, a2, result);
471 return result;
474 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
475 type TYPE. We know that TYPE is a modular type with a nonbinary
476 modulus. */
478 static tree
479 nonbinary_modular_operation (op_code, type, lhs, rhs)
480 enum tree_code op_code;
481 tree type;
482 tree lhs, rhs;
484 tree modulus = TYPE_MODULUS (type);
485 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
486 unsigned int precision;
487 int unsignedp = 1;
488 tree op_type = type;
489 tree result;
491 /* If this is an addition of a constant, convert it to a subtraction
492 of a constant since we can do that faster. */
493 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
494 rhs = fold (build (MINUS_EXPR, type, modulus, rhs)), op_code = MINUS_EXPR;
496 /* For the logical operations, we only need PRECISION bits. For
497 addition and subraction, we need one more and for multiplication we
498 need twice as many. But we never want to make a size smaller than
499 our size. */
500 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
501 needed_precision += 1;
502 else if (op_code == MULT_EXPR)
503 needed_precision *= 2;
505 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
507 /* Unsigned will do for everything but subtraction. */
508 if (op_code == MINUS_EXPR)
509 unsignedp = 0;
511 /* If our type is the wrong signedness or isn't wide enough, make a new
512 type and convert both our operands to it. */
513 if (TYPE_PRECISION (op_type) < precision
514 || TREE_UNSIGNED (op_type) != unsignedp)
516 /* Copy the node so we ensure it can be modified to make it modular. */
517 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
518 modulus = convert (op_type, modulus);
519 SET_TYPE_MODULUS (op_type, modulus);
520 TYPE_MODULAR_P (op_type) = 1;
521 lhs = convert (op_type, lhs);
522 rhs = convert (op_type, rhs);
525 /* Do the operation, then we'll fix it up. */
526 result = fold (build (op_code, op_type, lhs, rhs));
528 /* For multiplication, we have no choice but to do a full modulus
529 operation. However, we want to do this in the narrowest
530 possible size. */
531 if (op_code == MULT_EXPR)
533 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
534 modulus = convert (div_type, modulus);
535 SET_TYPE_MODULUS (div_type, modulus);
536 TYPE_MODULAR_P (div_type) = 1;
537 result = convert (op_type,
538 fold (build (TRUNC_MOD_EXPR, div_type,
539 convert (div_type, result), modulus)));
542 /* For subtraction, add the modulus back if we are negative. */
543 else if (op_code == MINUS_EXPR)
545 result = save_expr (result);
546 result = fold (build (COND_EXPR, op_type,
547 build (LT_EXPR, integer_type_node, result,
548 convert (op_type, integer_zero_node)),
549 fold (build (PLUS_EXPR, op_type,
550 result, modulus)),
551 result));
554 /* For the other operations, subtract the modulus if we are >= it. */
555 else
557 result = save_expr (result);
558 result = fold (build (COND_EXPR, op_type,
559 build (GE_EXPR, integer_type_node,
560 result, modulus),
561 fold (build (MINUS_EXPR, op_type,
562 result, modulus)),
563 result));
566 return convert (type, result);
569 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
570 desired for the result. Usually the operation is to be performed
571 in that type. For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
572 in which case the type to be used will be derived from the operands.
574 This function is very much unlike the ones for C and C++ since we
575 have already done any type conversion and matching required. All we
576 have to do here is validate the work done by SEM and handle subtypes. */
578 tree
579 build_binary_op (op_code, result_type, left_operand, right_operand)
580 enum tree_code op_code;
581 tree result_type;
582 tree left_operand;
583 tree right_operand;
585 tree left_type = TREE_TYPE (left_operand);
586 tree right_type = TREE_TYPE (right_operand);
587 tree left_base_type = get_base_type (left_type);
588 tree right_base_type = get_base_type (right_type);
589 tree operation_type = result_type;
590 tree best_type = 0;
591 tree modulus;
592 tree result;
593 int has_side_effects = 0;
595 /* If one (but not both, unless they have the same object) operands are a
596 WITH_RECORD_EXPR, do the operation and then surround it with the
597 WITH_RECORD_EXPR. Don't do this for assignment, for an ARRAY_REF, or
598 for an ARRAY_RANGE_REF because we need to keep track of the
599 WITH_RECORD_EXPRs on both operands very carefully. */
600 if (op_code != MODIFY_EXPR && op_code != ARRAY_REF
601 && op_code != ARRAY_RANGE_REF
602 && TREE_CODE (left_operand) == WITH_RECORD_EXPR
603 && (TREE_CODE (right_operand) != WITH_RECORD_EXPR
604 || operand_equal_p (TREE_OPERAND (left_operand, 1),
605 TREE_OPERAND (right_operand, 1), 0)))
607 tree right = right_operand;
609 if (TREE_CODE (right) == WITH_RECORD_EXPR)
610 right = TREE_OPERAND (right, 0);
612 result = build_binary_op (op_code, result_type,
613 TREE_OPERAND (left_operand, 0), right);
614 return build (WITH_RECORD_EXPR, TREE_TYPE (result), result,
615 TREE_OPERAND (left_operand, 1));
617 else if (op_code != MODIFY_EXPR && op_code != ARRAY_REF
618 && op_code != ARRAY_RANGE_REF
619 && TREE_CODE (left_operand) != WITH_RECORD_EXPR
620 && TREE_CODE (right_operand) == WITH_RECORD_EXPR)
622 result = build_binary_op (op_code, result_type, left_operand,
623 TREE_OPERAND (right_operand, 0));
624 return build (WITH_RECORD_EXPR, TREE_TYPE (result), result,
625 TREE_OPERAND (right_operand, 1));
628 if (operation_type != 0
629 && TREE_CODE (operation_type) == RECORD_TYPE
630 && TYPE_LEFT_JUSTIFIED_MODULAR_P (operation_type))
631 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
633 if (operation_type != 0
634 && ! AGGREGATE_TYPE_P (operation_type)
635 && TYPE_EXTRA_SUBTYPE_P (operation_type))
636 operation_type = get_base_type (operation_type);
638 modulus = (operation_type != 0 && TREE_CODE (operation_type) == INTEGER_TYPE
639 && TYPE_MODULAR_P (operation_type)
640 ? TYPE_MODULUS (operation_type) : 0);
642 switch (op_code)
644 case MODIFY_EXPR:
645 /* If there were any integral or pointer conversions on LHS, remove
646 them; we'll be putting them back below if needed. Likewise for
647 conversions between array and record types. But don't do this if
648 the right operand is not BLKmode (for packed arrays)
649 unless we are not changing the mode. */
650 while ((TREE_CODE (left_operand) == CONVERT_EXPR
651 || TREE_CODE (left_operand) == NOP_EXPR
652 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
653 && (((INTEGRAL_TYPE_P (left_type)
654 || POINTER_TYPE_P (left_type))
655 && (INTEGRAL_TYPE_P (TREE_TYPE
656 (TREE_OPERAND (left_operand, 0)))
657 || POINTER_TYPE_P (TREE_TYPE
658 (TREE_OPERAND (left_operand, 0)))))
659 || (((TREE_CODE (left_type) == RECORD_TYPE
660 /* Don't remove conversions to left-justified modular
661 types. */
662 && ! TYPE_LEFT_JUSTIFIED_MODULAR_P (left_type))
663 || TREE_CODE (left_type) == ARRAY_TYPE)
664 && ((TREE_CODE (TREE_TYPE
665 (TREE_OPERAND (left_operand, 0)))
666 == RECORD_TYPE)
667 || (TREE_CODE (TREE_TYPE
668 (TREE_OPERAND (left_operand, 0)))
669 == ARRAY_TYPE))
670 && (TYPE_MODE (right_type) == BLKmode
671 || (TYPE_MODE (left_type)
672 == TYPE_MODE (TREE_TYPE
673 (TREE_OPERAND
674 (left_operand, 0))))))))
676 left_operand = TREE_OPERAND (left_operand, 0);
677 left_type = TREE_TYPE (left_operand);
680 if (operation_type == 0)
681 operation_type = left_type;
683 /* If the RHS has a conversion between record and array types and
684 an inner type is no worse, use it. Note we cannot do this for
685 modular types or types with TYPE_ALIGN_OK, since the latter
686 might indicate a conversion between a root type and a class-wide
687 type, which we must not remove. */
688 while (TREE_CODE (right_operand) == VIEW_CONVERT_EXPR
689 && ((TREE_CODE (right_type) == RECORD_TYPE
690 && ! TYPE_LEFT_JUSTIFIED_MODULAR_P (right_type)
691 && ! TYPE_ALIGN_OK (right_type)
692 && ! TYPE_IS_FAT_POINTER_P (right_type))
693 || TREE_CODE (right_type) == ARRAY_TYPE)
694 && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
695 == RECORD_TYPE)
696 && ! (TYPE_LEFT_JUSTIFIED_MODULAR_P
697 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
698 && ! (TYPE_ALIGN_OK
699 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
700 && ! (TYPE_IS_FAT_POINTER_P
701 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))))
702 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
703 == ARRAY_TYPE))
704 && (0 == (best_type
705 == find_common_type (right_type,
706 TREE_TYPE (TREE_OPERAND
707 (right_operand, 0))))
708 || right_type != best_type))
710 right_operand = TREE_OPERAND (right_operand, 0);
711 right_type = TREE_TYPE (right_operand);
714 /* If we are copying one array or record to another, find the best type
715 to use. */
716 if (((TREE_CODE (left_type) == ARRAY_TYPE
717 && TREE_CODE (right_type) == ARRAY_TYPE)
718 || (TREE_CODE (left_type) == RECORD_TYPE
719 && TREE_CODE (right_type) == RECORD_TYPE))
720 && (best_type = find_common_type (left_type, right_type)) != 0)
721 operation_type = best_type;
723 /* If a class-wide type may be involved, force use of the RHS type. */
724 if (TREE_CODE (right_type) == RECORD_TYPE && TYPE_ALIGN_OK (right_type))
725 operation_type = right_type;
727 /* Ensure everything on the LHS is valid. If we have a field reference,
728 strip anything that get_inner_reference can handle. Then remove any
729 conversions with type types having the same code and mode. Mark
730 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
731 either an INDIRECT_REF or a decl. */
732 result = left_operand;
733 while (1)
735 tree restype = TREE_TYPE (result);
737 if (TREE_CODE (result) == COMPONENT_REF
738 || TREE_CODE (result) == ARRAY_REF
739 || TREE_CODE (result) == ARRAY_RANGE_REF)
740 while (handled_component_p (result))
741 result = TREE_OPERAND (result, 0);
742 else if (TREE_CODE (result) == REALPART_EXPR
743 || TREE_CODE (result) == IMAGPART_EXPR
744 || TREE_CODE (result) == WITH_RECORD_EXPR
745 || ((TREE_CODE (result) == NOP_EXPR
746 || TREE_CODE (result) == CONVERT_EXPR)
747 && (((TREE_CODE (restype)
748 == TREE_CODE (TREE_TYPE
749 (TREE_OPERAND (result, 0))))
750 && (TYPE_MODE (TREE_TYPE
751 (TREE_OPERAND (result, 0)))
752 == TYPE_MODE (restype)))
753 || TYPE_ALIGN_OK (restype))))
754 result = TREE_OPERAND (result, 0);
755 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
757 TREE_ADDRESSABLE (result) = 1;
758 result = TREE_OPERAND (result, 0);
760 else
761 break;
764 if (TREE_CODE (result) != INDIRECT_REF && TREE_CODE (result) != NULL_EXPR
765 && ! DECL_P (result))
766 gigi_abort (516);
768 /* Convert the right operand to the operation type unless
769 it is either already of the correct type or if the type
770 involves a placeholder, since the RHS may not have the same
771 record type. */
772 if (operation_type != right_type
773 && (! (TREE_CODE (TYPE_SIZE (operation_type)) != INTEGER_CST
774 && contains_placeholder_p (TYPE_SIZE (operation_type)))))
776 /* For a variable-size type, with both BLKmode, convert using
777 CONVERT_EXPR instead of an unchecked conversion since we don't
778 need to make a temporary (and can't anyway). */
779 if (TREE_CODE (TYPE_SIZE (operation_type)) != INTEGER_CST
780 && TYPE_MODE (TREE_TYPE (right_operand)) == BLKmode
781 && TREE_CODE (right_operand) != UNCONSTRAINED_ARRAY_REF)
782 right_operand = build1 (CONVERT_EXPR, operation_type,
783 right_operand);
784 else
785 right_operand = convert (operation_type, right_operand);
787 right_type = operation_type;
790 /* If the modes differ, make up a bogus type and convert the RHS to
791 it. This can happen with packed types. */
792 if (TYPE_MODE (left_type) != TYPE_MODE (right_type))
794 tree new_type = copy_node (left_type);
796 TYPE_SIZE (new_type) = TYPE_SIZE (right_type);
797 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (right_type);
798 TYPE_MAIN_VARIANT (new_type) = new_type;
799 right_operand = convert (new_type, right_operand);
802 has_side_effects = 1;
803 modulus = 0;
804 break;
806 case ARRAY_REF:
807 if (operation_type == 0)
808 operation_type = TREE_TYPE (left_type);
810 /* ... fall through ... */
812 case ARRAY_RANGE_REF:
814 /* First convert the right operand to its base type. This will
815 prevent unneed signedness conversions when sizetype is wider than
816 integer. */
817 right_operand = convert (right_base_type, right_operand);
818 right_operand = convert (TYPE_DOMAIN (left_type), right_operand);
820 if (! TREE_CONSTANT (right_operand)
821 || ! TREE_CONSTANT (TYPE_MIN_VALUE (right_type))
822 || op_code == ARRAY_RANGE_REF)
823 gnat_mark_addressable (left_operand);
825 modulus = 0;
826 break;
828 case GE_EXPR:
829 case LE_EXPR:
830 case GT_EXPR:
831 case LT_EXPR:
832 if (POINTER_TYPE_P (left_type))
833 gigi_abort (501);
835 /* ... fall through ... */
837 case EQ_EXPR:
838 case NE_EXPR:
839 /* If either operand is a NULL_EXPR, just return a new one. */
840 if (TREE_CODE (left_operand) == NULL_EXPR)
841 return build (op_code, result_type,
842 build1 (NULL_EXPR, integer_type_node,
843 TREE_OPERAND (left_operand, 0)),
844 integer_zero_node);
846 else if (TREE_CODE (right_operand) == NULL_EXPR)
847 return build (op_code, result_type,
848 build1 (NULL_EXPR, integer_type_node,
849 TREE_OPERAND (right_operand, 0)),
850 integer_zero_node);
852 /* If either object is a left-justified modular types, get the
853 fields from within. */
854 if (TREE_CODE (left_type) == RECORD_TYPE
855 && TYPE_LEFT_JUSTIFIED_MODULAR_P (left_type))
857 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
858 left_operand);
859 left_type = TREE_TYPE (left_operand);
860 left_base_type = get_base_type (left_type);
863 if (TREE_CODE (right_type) == RECORD_TYPE
864 && TYPE_LEFT_JUSTIFIED_MODULAR_P (right_type))
866 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
867 right_operand);
868 right_type = TREE_TYPE (right_operand);
869 right_base_type = get_base_type (right_type);
872 /* If both objects are arrays, compare them specially. */
873 if ((TREE_CODE (left_type) == ARRAY_TYPE
874 || (TREE_CODE (left_type) == INTEGER_TYPE
875 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
876 && (TREE_CODE (right_type) == ARRAY_TYPE
877 || (TREE_CODE (right_type) == INTEGER_TYPE
878 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
880 result = compare_arrays (result_type, left_operand, right_operand);
882 if (op_code == EQ_EXPR)
884 else if (op_code == NE_EXPR)
885 result = invert_truthvalue (result);
886 else
887 gigi_abort (502);
889 return result;
892 /* Otherwise, the base types must be the same unless the objects are
893 records. If we have records, use the best type and convert both
894 operands to that type. */
895 if (left_base_type != right_base_type)
897 if (TREE_CODE (left_base_type) == RECORD_TYPE
898 && TREE_CODE (right_base_type) == RECORD_TYPE)
900 /* The only way these are permitted to be the same is if both
901 types have the same name. In that case, one of them must
902 not be self-referential. Use that one as the best type.
903 Even better is if one is of fixed size. */
904 best_type = 0;
906 if (TYPE_NAME (left_base_type) == 0
907 || TYPE_NAME (left_base_type) != TYPE_NAME (right_base_type))
908 gigi_abort (503);
910 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
911 best_type = left_base_type;
912 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
913 best_type = right_base_type;
914 else if (! contains_placeholder_p (TYPE_SIZE (left_base_type)))
915 best_type = left_base_type;
916 else if (! contains_placeholder_p (TYPE_SIZE (right_base_type)))
917 best_type = right_base_type;
918 else
919 gigi_abort (504);
921 left_operand = convert (best_type, left_operand);
922 right_operand = convert (best_type, right_operand);
924 else
925 gigi_abort (505);
928 /* If we are comparing a fat pointer against zero, we need to
929 just compare the data pointer. */
930 else if (TYPE_FAT_POINTER_P (left_base_type)
931 && TREE_CODE (right_operand) == CONSTRUCTOR
932 && integer_zerop (TREE_VALUE (CONSTRUCTOR_ELTS (right_operand))))
934 right_operand = build_component_ref (left_operand, NULL_TREE,
935 TYPE_FIELDS (left_base_type));
936 left_operand = convert (TREE_TYPE (right_operand),
937 integer_zero_node);
939 else
941 left_operand = convert (left_base_type, left_operand);
942 right_operand = convert (right_base_type, right_operand);
945 modulus = 0;
946 break;
948 case PREINCREMENT_EXPR:
949 case PREDECREMENT_EXPR:
950 case POSTINCREMENT_EXPR:
951 case POSTDECREMENT_EXPR:
952 /* In these, the result type and the left operand type should be the
953 same. Do the operation in the base type of those and convert the
954 right operand (which is an integer) to that type.
956 Note that these operations are only used in loop control where
957 we guarantee that no overflow can occur. So nothing special need
958 be done for modular types. */
960 if (left_type != result_type)
961 gigi_abort (506);
963 operation_type = get_base_type (result_type);
964 left_operand = convert (operation_type, left_operand);
965 right_operand = convert (operation_type, right_operand);
966 has_side_effects = 1;
967 modulus = 0;
968 break;
970 case LSHIFT_EXPR:
971 case RSHIFT_EXPR:
972 case LROTATE_EXPR:
973 case RROTATE_EXPR:
974 /* The RHS of a shift can be any type. Also, ignore any modulus
975 (we used to abort, but this is needed for unchecked conversion
976 to modular types). Otherwise, processing is the same as normal. */
977 if (operation_type != left_base_type)
978 gigi_abort (514);
980 modulus = 0;
981 left_operand = convert (operation_type, left_operand);
982 break;
984 case TRUTH_ANDIF_EXPR:
985 case TRUTH_ORIF_EXPR:
986 case TRUTH_AND_EXPR:
987 case TRUTH_OR_EXPR:
988 case TRUTH_XOR_EXPR:
989 left_operand = gnat_truthvalue_conversion (left_operand);
990 right_operand = gnat_truthvalue_conversion (right_operand);
991 goto common;
993 case BIT_AND_EXPR:
994 case BIT_IOR_EXPR:
995 case BIT_XOR_EXPR:
996 /* For binary modulus, if the inputs are in range, so are the
997 outputs. */
998 if (modulus != 0 && integer_pow2p (modulus))
999 modulus = 0;
1001 goto common;
1003 case COMPLEX_EXPR:
1004 if (TREE_TYPE (result_type) != left_base_type
1005 || TREE_TYPE (result_type) != right_base_type)
1006 gigi_abort (515);
1008 left_operand = convert (left_base_type, left_operand);
1009 right_operand = convert (right_base_type, right_operand);
1010 break;
1012 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1013 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1014 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1015 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1016 /* These always produce results lower than either operand. */
1017 modulus = 0;
1018 goto common;
1020 default:
1021 common:
1022 /* The result type should be the same as the base types of the
1023 both operands (and they should be the same). Convert
1024 everything to the result type. */
1026 if (operation_type != left_base_type
1027 || left_base_type != right_base_type)
1028 gigi_abort (507);
1030 left_operand = convert (operation_type, left_operand);
1031 right_operand = convert (operation_type, right_operand);
1034 if (modulus != 0 && ! integer_pow2p (modulus))
1036 result = nonbinary_modular_operation (op_code, operation_type,
1037 left_operand, right_operand);
1038 modulus = 0;
1040 /* If either operand is a NULL_EXPR, just return a new one. */
1041 else if (TREE_CODE (left_operand) == NULL_EXPR)
1042 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1043 else if (TREE_CODE (right_operand) == NULL_EXPR)
1044 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1045 else
1046 result = fold (build (op_code, operation_type,
1047 left_operand, right_operand));
1049 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1050 TREE_CONSTANT (result)
1051 |= (TREE_CONSTANT (left_operand) & TREE_CONSTANT (right_operand)
1052 && op_code != ARRAY_REF && op_code != ARRAY_RANGE_REF);
1054 if ((op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1055 && TYPE_VOLATILE (operation_type))
1056 TREE_THIS_VOLATILE (result) = 1;
1058 /* If we are working with modular types, perform the MOD operation
1059 if something above hasn't eliminated the need for it. */
1060 if (modulus != 0)
1061 result = fold (build (FLOOR_MOD_EXPR, operation_type, result,
1062 convert (operation_type, modulus)));
1064 if (result_type != 0 && result_type != operation_type)
1065 result = convert (result_type, result);
1067 return result;
1070 /* Similar, but for unary operations. */
1072 tree
1073 build_unary_op (op_code, result_type, operand)
1074 enum tree_code op_code;
1075 tree result_type;
1076 tree operand;
1078 tree type = TREE_TYPE (operand);
1079 tree base_type = get_base_type (type);
1080 tree operation_type = result_type;
1081 tree result;
1082 int side_effects = 0;
1084 /* If we have a WITH_RECORD_EXPR as our operand, do the operation first,
1085 then surround it with the WITH_RECORD_EXPR. This allows GCC to do better
1086 expression folding. */
1087 if (TREE_CODE (operand) == WITH_RECORD_EXPR)
1089 result = build_unary_op (op_code, result_type,
1090 TREE_OPERAND (operand, 0));
1091 return build (WITH_RECORD_EXPR, TREE_TYPE (result), result,
1092 TREE_OPERAND (operand, 1));
1095 if (operation_type != 0
1096 && TREE_CODE (operation_type) == RECORD_TYPE
1097 && TYPE_LEFT_JUSTIFIED_MODULAR_P (operation_type))
1098 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1100 if (operation_type != 0
1101 && ! AGGREGATE_TYPE_P (operation_type)
1102 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1103 operation_type = get_base_type (operation_type);
1105 switch (op_code)
1107 case REALPART_EXPR:
1108 case IMAGPART_EXPR:
1109 if (operation_type == 0)
1110 result_type = operation_type = TREE_TYPE (type);
1111 else if (result_type != TREE_TYPE (type))
1112 gigi_abort (513);
1114 result = fold (build1 (op_code, operation_type, operand));
1115 break;
1117 case TRUTH_NOT_EXPR:
1118 if (result_type != base_type)
1119 gigi_abort (508);
1121 result = invert_truthvalue (gnat_truthvalue_conversion (operand));
1122 break;
1124 case ATTR_ADDR_EXPR:
1125 case ADDR_EXPR:
1126 switch (TREE_CODE (operand))
1128 case INDIRECT_REF:
1129 case UNCONSTRAINED_ARRAY_REF:
1130 result = TREE_OPERAND (operand, 0);
1132 /* Make sure the type here is a pointer, not a reference.
1133 GCC wants pointer types for function addresses. */
1134 if (result_type == 0)
1135 result_type = build_pointer_type (type);
1136 break;
1138 case NULL_EXPR:
1139 result = operand;
1140 TREE_TYPE (result) = type = build_pointer_type (type);
1141 break;
1143 case ARRAY_REF:
1144 case ARRAY_RANGE_REF:
1145 case COMPONENT_REF:
1146 case BIT_FIELD_REF:
1147 /* If this is for 'Address, find the address of the prefix and
1148 add the offset to the field. Otherwise, do this the normal
1149 way. */
1150 if (op_code == ATTR_ADDR_EXPR)
1152 HOST_WIDE_INT bitsize;
1153 HOST_WIDE_INT bitpos;
1154 tree offset, inner;
1155 enum machine_mode mode;
1156 int unsignedp, volatilep;
1158 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1159 &mode, &unsignedp, &volatilep);
1161 /* If INNER is a padding type whose field has a self-referential
1162 size, convert to that inner type. We know the offset is zero
1163 and we need to have that type visible. */
1164 if (TREE_CODE (TREE_TYPE (inner)) == RECORD_TYPE
1165 && TYPE_IS_PADDING_P (TREE_TYPE (inner))
1166 && (contains_placeholder_p
1167 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1168 (TREE_TYPE (inner)))))))
1169 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1170 inner);
1172 /* Compute the offset as a byte offset from INNER. */
1173 if (offset == 0)
1174 offset = size_zero_node;
1176 if (bitpos % BITS_PER_UNIT != 0)
1177 post_error
1178 ("taking address of object not aligned on storage unit?",
1179 error_gnat_node);
1181 offset = size_binop (PLUS_EXPR, offset,
1182 size_int (bitpos / BITS_PER_UNIT));
1184 /* Take the address of INNER, convert the offset to void *, and
1185 add then. It will later be converted to the desired result
1186 type, if any. */
1187 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1188 inner = convert (ptr_void_type_node, inner);
1189 offset = convert (ptr_void_type_node, offset);
1190 result = build_binary_op (PLUS_EXPR, ptr_void_type_node,
1191 inner, offset);
1192 result = convert (build_pointer_type (TREE_TYPE (operand)),
1193 result);
1194 break;
1196 goto common;
1198 case CONSTRUCTOR:
1199 /* If this is just a constructor for a padded record, we can
1200 just take the address of the single field and convert it to
1201 a pointer to our type. */
1202 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1204 result
1205 = build_unary_op (ADDR_EXPR, NULL_TREE,
1206 TREE_VALUE (CONSTRUCTOR_ELTS (operand)));
1207 result = convert (build_pointer_type (TREE_TYPE (operand)),
1208 result);
1209 break;
1212 goto common;
1214 case NOP_EXPR:
1215 if (AGGREGATE_TYPE_P (type)
1216 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1217 return build_unary_op (ADDR_EXPR, result_type,
1218 TREE_OPERAND (operand, 0));
1220 /* If this NOP_EXPR doesn't change the mode, get the result type
1221 from this type and go down. We need to do this in case
1222 this is a conversion of a CONST_DECL. */
1223 if (TYPE_MODE (type) != BLKmode
1224 && (TYPE_MODE (type)
1225 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0)))))
1226 return build_unary_op (ADDR_EXPR,
1227 (result_type == 0
1228 ? build_pointer_type (type)
1229 : result_type),
1230 TREE_OPERAND (operand, 0));
1231 goto common;
1233 case CONST_DECL:
1234 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1236 /* ... fall through ... */
1238 default:
1239 common:
1241 if (type != error_mark_node)
1242 operation_type = build_pointer_type (type);
1244 gnat_mark_addressable (operand);
1245 result = fold (build1 (ADDR_EXPR, operation_type, operand));
1248 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1249 break;
1251 case INDIRECT_REF:
1252 /* If we want to refer to an entire unconstrained array,
1253 make up an expression to do so. This will never survive to
1254 the backend. If TYPE is a thin pointer, first convert the
1255 operand to a fat pointer. */
1256 if (TYPE_THIN_POINTER_P (type)
1257 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
1259 operand
1260 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1261 operand);
1262 type = TREE_TYPE (operand);
1265 if (TYPE_FAT_POINTER_P (type))
1266 result = build1 (UNCONSTRAINED_ARRAY_REF,
1267 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1269 else if (TREE_CODE (operand) == ADDR_EXPR)
1270 result = TREE_OPERAND (operand, 0);
1272 else
1274 result = fold (build1 (op_code, TREE_TYPE (type), operand));
1275 TREE_READONLY (result) = TREE_READONLY (TREE_TYPE (type));
1278 side_effects = (! TYPE_FAT_POINTER_P (type)
1279 && TYPE_VOLATILE (TREE_TYPE (type)));
1280 break;
1282 case NEGATE_EXPR:
1283 case BIT_NOT_EXPR:
1285 tree modulus = ((operation_type != 0
1286 && TREE_CODE (operation_type) == INTEGER_TYPE
1287 && TYPE_MODULAR_P (operation_type))
1288 ? TYPE_MODULUS (operation_type) : 0);
1289 int mod_pow2 = modulus != 0 && integer_pow2p (modulus);
1291 /* If this is a modular type, there are various possibilities
1292 depending on the operation and whether the modulus is a
1293 power of two or not. */
1295 if (modulus != 0)
1297 if (operation_type != base_type)
1298 gigi_abort (509);
1300 operand = convert (operation_type, operand);
1302 /* The fastest in the negate case for binary modulus is
1303 the straightforward code; the TRUNC_MOD_EXPR below
1304 is an AND operation. */
1305 if (op_code == NEGATE_EXPR && mod_pow2)
1306 result = fold (build (TRUNC_MOD_EXPR, operation_type,
1307 fold (build1 (NEGATE_EXPR, operation_type,
1308 operand)),
1309 modulus));
1311 /* For nonbinary negate case, return zero for zero operand,
1312 else return the modulus minus the operand. If the modulus
1313 is a power of two minus one, we can do the subtraction
1314 as an XOR since it is equivalent and faster on most machines. */
1315 else if (op_code == NEGATE_EXPR && ! mod_pow2)
1317 if (integer_pow2p (fold (build (PLUS_EXPR, operation_type,
1318 modulus,
1319 convert (operation_type,
1320 integer_one_node)))))
1321 result = fold (build (BIT_XOR_EXPR, operation_type,
1322 operand, modulus));
1323 else
1324 result = fold (build (MINUS_EXPR, operation_type,
1325 modulus, operand));
1327 result = fold (build (COND_EXPR, operation_type,
1328 fold (build (NE_EXPR, integer_type_node,
1329 operand,
1330 convert (operation_type,
1331 integer_zero_node))),
1332 result, operand));
1334 else
1336 /* For the NOT cases, we need a constant equal to
1337 the modulus minus one. For a binary modulus, we
1338 XOR against the constant and subtract the operand from
1339 that constant for nonbinary modulus. */
1341 tree cnst = fold (build (MINUS_EXPR, operation_type, modulus,
1342 convert (operation_type,
1343 integer_one_node)));
1345 if (mod_pow2)
1346 result = fold (build (BIT_XOR_EXPR, operation_type,
1347 operand, cnst));
1348 else
1349 result = fold (build (MINUS_EXPR, operation_type,
1350 cnst, operand));
1353 break;
1357 /* ... fall through ... */
1359 default:
1360 if (operation_type != base_type)
1361 gigi_abort (509);
1363 result = fold (build1 (op_code, operation_type, convert (operation_type,
1364 operand)));
1367 if (side_effects)
1369 TREE_SIDE_EFFECTS (result) = 1;
1370 if (TREE_CODE (result) == INDIRECT_REF)
1371 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1374 if (result_type != 0 && TREE_TYPE (result) != result_type)
1375 result = convert (result_type, result);
1377 return result;
1380 /* Similar, but for COND_EXPR. */
1382 tree
1383 build_cond_expr (result_type, condition_operand, true_operand, false_operand)
1384 tree result_type;
1385 tree condition_operand;
1386 tree true_operand;
1387 tree false_operand;
1389 tree result;
1390 int addr_p = 0;
1392 /* Front-end verifies that result, true and false operands have same base
1393 type. Convert everything to the result type. */
1395 true_operand = convert (result_type, true_operand);
1396 false_operand = convert (result_type, false_operand);
1398 /* If the result type is unconstrained, take the address of
1399 the operands and then dereference our result. */
1401 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1402 || (TREE_CODE (TYPE_SIZE (result_type)) != INTEGER_CST
1403 && contains_placeholder_p (TYPE_SIZE (result_type))))
1405 addr_p = 1;
1406 result_type = build_pointer_type (result_type);
1407 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1408 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1411 result = fold (build (COND_EXPR, result_type, condition_operand,
1412 true_operand, false_operand));
1414 /* If either operand is a SAVE_EXPR (possibly surrounded by
1415 arithmetic, make sure it gets done. */
1416 while (TREE_CODE_CLASS (TREE_CODE (true_operand)) == '1'
1417 || (TREE_CODE_CLASS (TREE_CODE (true_operand)) == '2'
1418 && TREE_CONSTANT (TREE_OPERAND (true_operand, 1))))
1419 true_operand = TREE_OPERAND (true_operand, 0);
1421 while (TREE_CODE_CLASS (TREE_CODE (false_operand)) == '1'
1422 || (TREE_CODE_CLASS (TREE_CODE (false_operand)) == '2'
1423 && TREE_CONSTANT (TREE_OPERAND (false_operand, 1))))
1424 false_operand = TREE_OPERAND (false_operand, 0);
1426 if (TREE_CODE (true_operand) == SAVE_EXPR)
1427 result = build (COMPOUND_EXPR, result_type, true_operand, result);
1428 if (TREE_CODE (false_operand) == SAVE_EXPR)
1429 result = build (COMPOUND_EXPR, result_type, false_operand, result);
1431 if (addr_p)
1432 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1434 return result;
1438 /* Build a CALL_EXPR to call FUNDECL with one argument, ARG. Return
1439 the CALL_EXPR. */
1441 tree
1442 build_call_1_expr (fundecl, arg)
1443 tree fundecl;
1444 tree arg;
1446 tree call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1447 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1448 chainon (NULL_TREE, build_tree_list (NULL_TREE, arg)),
1449 NULL_TREE);
1451 TREE_SIDE_EFFECTS (call) = 1;
1453 return call;
1456 /* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2. Return
1457 the CALL_EXPR. */
1459 tree
1460 build_call_2_expr (fundecl, arg1, arg2)
1461 tree fundecl;
1462 tree arg1, arg2;
1464 tree call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1465 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1466 chainon (chainon (NULL_TREE,
1467 build_tree_list (NULL_TREE, arg1)),
1468 build_tree_list (NULL_TREE, arg2)),
1469 NULL_TREE);
1471 TREE_SIDE_EFFECTS (call) = 1;
1473 return call;
1476 /* Likewise to call FUNDECL with no arguments. */
1478 tree
1479 build_call_0_expr (fundecl)
1480 tree fundecl;
1482 tree call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1483 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1484 NULL_TREE, NULL_TREE);
1486 TREE_SIDE_EFFECTS (call) = 1;
1488 return call;
1491 /* Call a function that raises an exception and pass the line number and file
1492 name, if requested. MSG says which exception function to call. */
1494 tree
1495 build_call_raise (msg)
1496 int msg;
1498 tree fndecl = gnat_raise_decls[msg];
1499 const char *str = discard_file_names ? "" : ref_filename;
1500 int len = strlen (str) + 1;
1501 tree filename = build_string (len, str);
1503 TREE_TYPE (filename)
1504 = build_array_type (char_type_node,
1505 build_index_type (build_int_2 (len, 0)));
1507 return
1508 build_call_2_expr (fndecl,
1509 build1 (ADDR_EXPR, build_pointer_type (char_type_node),
1510 filename),
1511 build_int_2 (input_line, 0));
1514 /* Return a CONSTRUCTOR of TYPE whose list is LIST. */
1516 tree
1517 gnat_build_constructor (type, list)
1518 tree type;
1519 tree list;
1521 tree elmt;
1522 int allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1523 int side_effects = 0;
1524 tree result;
1526 for (elmt = list; elmt; elmt = TREE_CHAIN (elmt))
1528 if (! TREE_CONSTANT (TREE_VALUE (elmt))
1529 || (TREE_CODE (type) == RECORD_TYPE
1530 && DECL_BIT_FIELD (TREE_PURPOSE (elmt))
1531 && TREE_CODE (TREE_VALUE (elmt)) != INTEGER_CST))
1532 allconstant = 0;
1534 if (TREE_SIDE_EFFECTS (TREE_VALUE (elmt)))
1535 side_effects = 1;
1537 /* Propagate an NULL_EXPR from the size of the type. We won't ever
1538 be executing the code we generate here in that case, but handle it
1539 specially to avoid the cmpiler blowing up. */
1540 if (TREE_CODE (type) == RECORD_TYPE
1541 && (0 != (result
1542 = contains_null_expr (DECL_SIZE (TREE_PURPOSE (elmt))))))
1543 return build1 (NULL_EXPR, type, TREE_OPERAND (result, 0));
1546 /* If TYPE is a RECORD_TYPE and the fields are not in the
1547 same order as their bit position, don't treat this as constant
1548 since varasm.c can't handle it. */
1549 if (allconstant && TREE_CODE (type) == RECORD_TYPE)
1551 tree last_pos = bitsize_zero_node;
1552 tree field;
1554 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1556 tree this_pos = bit_position (field);
1558 if (TREE_CODE (this_pos) != INTEGER_CST
1559 || tree_int_cst_lt (this_pos, last_pos))
1561 allconstant = 0;
1562 break;
1565 last_pos = this_pos;
1569 result = build_constructor (type, list);
1570 TREE_CONSTANT (result) = allconstant;
1571 TREE_STATIC (result) = allconstant;
1572 TREE_SIDE_EFFECTS (result) = side_effects;
1573 TREE_READONLY (result) = TREE_READONLY (type);
1575 return result;
1578 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1579 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1580 for the field.
1582 We also handle the fact that we might have been passed a pointer to the
1583 actual record and know how to look for fields in variant parts. */
1585 static tree
1586 build_simple_component_ref (record_variable, component, field)
1587 tree record_variable;
1588 tree component;
1589 tree field;
1591 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1592 tree ref;
1594 if ((TREE_CODE (record_type) != RECORD_TYPE
1595 && TREE_CODE (record_type) != UNION_TYPE
1596 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1597 || TYPE_SIZE (record_type) == 0)
1598 gigi_abort (510);
1600 /* Either COMPONENT or FIELD must be specified, but not both. */
1601 if ((component != 0) == (field != 0))
1602 gigi_abort (511);
1604 /* If no field was specified, look for a field with the specified name
1605 in the current record only. */
1606 if (field == 0)
1607 for (field = TYPE_FIELDS (record_type); field;
1608 field = TREE_CHAIN (field))
1609 if (DECL_NAME (field) == component)
1610 break;
1612 if (field == 0)
1613 return 0;
1615 /* If this field is not in the specified record, see if we can find
1616 something in the record whose original field is the same as this one. */
1617 if (DECL_CONTEXT (field) != record_type)
1618 /* Check if there is a field with name COMPONENT in the record. */
1620 tree new_field;
1622 /* First loop thru normal components. */
1624 for (new_field = TYPE_FIELDS (record_type); new_field != 0;
1625 new_field = TREE_CHAIN (new_field))
1626 if (DECL_ORIGINAL_FIELD (new_field) == field
1627 || new_field == DECL_ORIGINAL_FIELD (field)
1628 || (DECL_ORIGINAL_FIELD (field) != 0
1629 && (DECL_ORIGINAL_FIELD (field)
1630 == DECL_ORIGINAL_FIELD (new_field))))
1631 break;
1633 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1634 the component in the first search. Doing this search in 2 steps
1635 is required to avoiding hidden homonymous fields in the
1636 _Parent field. */
1638 if (new_field == 0)
1639 for (new_field = TYPE_FIELDS (record_type); new_field != 0;
1640 new_field = TREE_CHAIN (new_field))
1641 if (DECL_INTERNAL_P (new_field))
1643 tree field_ref
1644 = build_simple_component_ref (record_variable,
1645 NULL_TREE, new_field);
1646 ref = build_simple_component_ref (field_ref, NULL_TREE, field);
1648 if (ref != 0)
1649 return ref;
1652 field = new_field;
1655 if (field == 0)
1656 return 0;
1658 /* It would be nice to call "fold" here, but that can lose a type
1659 we need to tag a PLACEHOLDER_EXPR with, so we can't do it. */
1660 ref = build (COMPONENT_REF, TREE_TYPE (field), record_variable, field);
1662 if (TREE_READONLY (record_variable) || TREE_READONLY (field))
1663 TREE_READONLY (ref) = 1;
1664 if (TREE_THIS_VOLATILE (record_variable) || TREE_THIS_VOLATILE (field)
1665 || TYPE_VOLATILE (record_type))
1666 TREE_THIS_VOLATILE (ref) = 1;
1668 return ref;
1671 /* Like build_simple_component_ref, except that we give an error if the
1672 reference could not be found. */
1674 tree
1675 build_component_ref (record_variable, component, field)
1676 tree record_variable;
1677 tree component;
1678 tree field;
1680 tree ref = build_simple_component_ref (record_variable, component, field);
1682 if (ref != 0)
1683 return ref;
1685 /* If FIELD was specified, assume this is an invalid user field so
1686 raise constraint error. Otherwise, we can't find the type to return, so
1687 abort. */
1689 else if (field != 0)
1690 return build1 (NULL_EXPR, TREE_TYPE (field),
1691 build_call_raise (CE_Discriminant_Check_Failed));
1692 else
1693 gigi_abort (512);
1696 /* Build a GCC tree to call an allocation or deallocation function.
1697 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
1698 generate an allocator.
1700 GNU_SIZE is the size of the object in bytes and ALIGN is the alignment in
1701 bits. GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the
1702 storage pool to use. If not preset, malloc and free will be used except
1703 if GNAT_PROC is the "fake" value of -1, in which case we allocate the
1704 object dynamically on the stack frame. */
1706 tree
1707 build_call_alloc_dealloc (gnu_obj, gnu_size, align, gnat_proc, gnat_pool)
1708 tree gnu_obj;
1709 tree gnu_size;
1710 int align;
1711 Entity_Id gnat_proc;
1712 Entity_Id gnat_pool;
1714 tree gnu_align = size_int (align / BITS_PER_UNIT);
1716 if (TREE_CODE (gnu_size) != INTEGER_CST && contains_placeholder_p (gnu_size))
1717 gnu_size = build (WITH_RECORD_EXPR, sizetype, gnu_size,
1718 build_unary_op (INDIRECT_REF, NULL_TREE, gnu_obj));
1720 if (Present (gnat_proc))
1722 /* The storage pools are obviously always tagged types, but the
1723 secondary stack uses the same mechanism and is not tagged */
1724 if (Is_Tagged_Type (Etype (gnat_pool)))
1726 /* The size is the third parameter; the alignment is the
1727 same type. */
1728 Entity_Id gnat_size_type
1729 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1730 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1731 tree gnu_proc = gnat_to_gnu (gnat_proc);
1732 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1733 tree gnu_pool = gnat_to_gnu (gnat_pool);
1734 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1735 tree gnu_args = NULL_TREE;
1736 tree gnu_call;
1738 /* The first arg is always the address of the storage pool; next
1739 comes the address of the object, for a deallocator, then the
1740 size and alignment. */
1741 gnu_args
1742 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_pool_addr));
1744 if (gnu_obj)
1745 gnu_args
1746 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_obj));
1748 gnu_args
1749 = chainon (gnu_args,
1750 build_tree_list (NULL_TREE,
1751 convert (gnu_size_type, gnu_size)));
1752 gnu_args
1753 = chainon (gnu_args,
1754 build_tree_list (NULL_TREE,
1755 convert (gnu_size_type, gnu_align)));
1757 gnu_call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (gnu_proc)),
1758 gnu_proc_addr, gnu_args, NULL_TREE);
1759 TREE_SIDE_EFFECTS (gnu_call) = 1;
1760 return gnu_call;
1763 /* Secondary stack case. */
1764 else
1766 /* The size is the second parameter */
1767 Entity_Id gnat_size_type
1768 = Etype (Next_Formal (First_Formal (gnat_proc)));
1769 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1770 tree gnu_proc = gnat_to_gnu (gnat_proc);
1771 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1772 tree gnu_args = NULL_TREE;
1773 tree gnu_call;
1775 /* The first arg is the address of the object, for a
1776 deallocator, then the size */
1777 if (gnu_obj)
1778 gnu_args
1779 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_obj));
1781 gnu_args
1782 = chainon (gnu_args,
1783 build_tree_list (NULL_TREE,
1784 convert (gnu_size_type, gnu_size)));
1786 gnu_call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (gnu_proc)),
1787 gnu_proc_addr, gnu_args, NULL_TREE);
1788 TREE_SIDE_EFFECTS (gnu_call) = 1;
1789 return gnu_call;
1793 else if (gnu_obj)
1794 return build_call_1_expr (free_decl, gnu_obj);
1795 else if (gnat_pool == -1)
1797 /* If the size is a constant, we can put it in the fixed portion of
1798 the stack frame to avoid the need to adjust the stack pointer. */
1799 if (TREE_CODE (gnu_size) == INTEGER_CST && ! flag_stack_check)
1801 tree gnu_range
1802 = build_range_type (NULL_TREE, size_one_node, gnu_size);
1803 tree gnu_array_type = build_array_type (char_type_node, gnu_range);
1804 tree gnu_decl =
1805 create_var_decl (get_identifier ("RETVAL"), NULL_TREE,
1806 gnu_array_type, NULL_TREE, 0, 0, 0, 0, 0);
1808 return convert (ptr_void_type_node,
1809 build_unary_op (ADDR_EXPR, NULL_TREE, gnu_decl));
1811 else
1812 return build (ALLOCATE_EXPR, ptr_void_type_node, gnu_size, gnu_align);
1814 else
1815 return build_call_1_expr (malloc_decl, gnu_size);
1818 /* Build a GCC tree to correspond to allocating an object of TYPE whose
1819 initial value is INIT, if INIT is nonzero. Convert the expression to
1820 RESULT_TYPE, which must be some type of pointer. Return the tree.
1821 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
1822 the storage pool to use. */
1824 tree
1825 build_allocator (type, init, result_type, gnat_proc, gnat_pool)
1826 tree type;
1827 tree init;
1828 tree result_type;
1829 Entity_Id gnat_proc;
1830 Entity_Id gnat_pool;
1832 tree size = TYPE_SIZE_UNIT (type);
1833 tree result;
1835 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
1836 if (init != 0 && TREE_CODE (init) == NULL_EXPR)
1837 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
1839 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
1840 sizes of the object and its template. Allocate the whole thing and
1841 fill in the parts that are known. */
1842 else if (TYPE_FAT_OR_THIN_POINTER_P (result_type))
1844 tree template_type
1845 = (TYPE_FAT_POINTER_P (result_type)
1846 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (result_type))))
1847 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (result_type))));
1848 tree storage_type
1849 = build_unc_object_type (template_type, type,
1850 get_identifier ("ALLOC"));
1851 tree storage_ptr_type = build_pointer_type (storage_type);
1852 tree storage;
1853 tree template_cons = NULL_TREE;
1855 size = TYPE_SIZE_UNIT (storage_type);
1857 if (TREE_CODE (size) != INTEGER_CST
1858 && contains_placeholder_p (size))
1859 size = build (WITH_RECORD_EXPR, sizetype, size, init);
1861 /* If the size overflows, pass -1 so the allocator will raise
1862 storage error. */
1863 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1864 size = ssize_int (-1);
1866 storage = build_call_alloc_dealloc (NULL_TREE, size,
1867 TYPE_ALIGN (storage_type),
1868 gnat_proc, gnat_pool);
1869 storage = convert (storage_ptr_type, protect_multiple_eval (storage));
1871 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1873 type = TREE_TYPE (TYPE_FIELDS (type));
1875 if (init != 0)
1876 init = convert (type, init);
1879 /* If there is an initializing expression, make a constructor for
1880 the entire object including the bounds and copy it into the
1881 object. If there is no initializing expression, just set the
1882 bounds. */
1883 if (init != 0)
1885 template_cons = tree_cons (TREE_CHAIN (TYPE_FIELDS (storage_type)),
1886 init, NULL_TREE);
1887 template_cons = tree_cons (TYPE_FIELDS (storage_type),
1888 build_template (template_type, type,
1889 init),
1890 template_cons);
1892 return convert
1893 (result_type,
1894 build (COMPOUND_EXPR, storage_ptr_type,
1895 build_binary_op
1896 (MODIFY_EXPR, storage_type,
1897 build_unary_op (INDIRECT_REF, NULL_TREE,
1898 convert (storage_ptr_type, storage)),
1899 gnat_build_constructor (storage_type, template_cons)),
1900 convert (storage_ptr_type, storage)));
1902 else
1903 return build
1904 (COMPOUND_EXPR, result_type,
1905 build_binary_op
1906 (MODIFY_EXPR, template_type,
1907 build_component_ref
1908 (build_unary_op (INDIRECT_REF, NULL_TREE,
1909 convert (storage_ptr_type, storage)),
1910 NULL_TREE, TYPE_FIELDS (storage_type)),
1911 build_template (template_type, type, NULL_TREE)),
1912 convert (result_type, convert (storage_ptr_type, storage)));
1915 /* If we have an initializing expression, see if its size is simpler
1916 than the size from the type. */
1917 if (init != 0 && TYPE_SIZE_UNIT (TREE_TYPE (init)) != 0
1918 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
1919 || (TREE_CODE (size) != INTEGER_CST
1920 && contains_placeholder_p (size))))
1921 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
1923 /* If the size is still self-referential, reference the initializing
1924 expression, if it is present. If not, this must have been a
1925 call to allocate a library-level object, in which case we use
1926 the maximum size. */
1927 if (TREE_CODE (size) != INTEGER_CST && contains_placeholder_p (size))
1929 if (init == 0)
1930 size = max_size (size, 1);
1931 else
1932 size = build (WITH_RECORD_EXPR, sizetype, size, init);
1935 /* If the size overflows, pass -1 so the allocator will raise
1936 storage error. */
1937 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1938 size = ssize_int (-1);
1940 /* If this is a type whose alignment is larger than the
1941 biggest we support in normal alignment and this is in
1942 the default storage pool, make an "aligning type", allocate
1943 it, point to the field we need, and return that. */
1944 if (TYPE_ALIGN (type) > BIGGEST_ALIGNMENT
1945 && No (gnat_proc))
1947 tree new_type = make_aligning_type (type, TYPE_ALIGN (type), size);
1949 result = build_call_alloc_dealloc (NULL_TREE, TYPE_SIZE (new_type),
1950 BIGGEST_ALIGNMENT, Empty, Empty);
1951 result = save_expr (result);
1952 result = convert (build_pointer_type (new_type), result);
1953 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1954 result = build_component_ref (result, NULL_TREE,
1955 TYPE_FIELDS (new_type));
1956 result = convert (result_type,
1957 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1959 else
1960 result = convert (result_type,
1961 build_call_alloc_dealloc (NULL_TREE, size,
1962 TYPE_ALIGN (type),
1963 gnat_proc, gnat_pool));
1965 /* If we have an initial value, put the new address into a SAVE_EXPR, assign
1966 the value, and return the address. Do this with a COMPOUND_EXPR. */
1968 if (init)
1970 result = save_expr (result);
1971 result
1972 = build (COMPOUND_EXPR, TREE_TYPE (result),
1973 build_binary_op
1974 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (result)),
1975 build_unary_op (INDIRECT_REF, TREE_TYPE (TREE_TYPE (result)),
1976 result),
1977 init),
1978 result);
1981 return convert (result_type, result);
1984 /* Fill in a VMS descriptor for EXPR and return a constructor for it.
1985 GNAT_FORMAL is how we find the descriptor record. */
1987 tree
1988 fill_vms_descriptor (expr, gnat_formal)
1989 tree expr;
1990 Entity_Id gnat_formal;
1992 tree record_type = TREE_TYPE (TREE_TYPE (get_gnu_tree (gnat_formal)));
1993 tree field;
1994 tree const_list = 0;
1996 expr = maybe_unconstrained_array (expr);
1997 gnat_mark_addressable (expr);
1999 for (field = TYPE_FIELDS (record_type); field; field = TREE_CHAIN (field))
2001 tree init = DECL_INITIAL (field);
2003 if (TREE_CODE (init) != INTEGER_CST
2004 && contains_placeholder_p (init))
2005 init = build (WITH_RECORD_EXPR, TREE_TYPE (init), init, expr);
2007 const_list = tree_cons (field, convert (TREE_TYPE (field), init),
2008 const_list);
2011 return gnat_build_constructor (record_type, nreverse (const_list));
2014 /* Indicate that we need to make the address of EXPR_NODE and it therefore
2015 should not be allocated in a register. Returns true if successful. */
2017 bool
2018 gnat_mark_addressable (expr_node)
2019 tree expr_node;
2021 while (1)
2022 switch (TREE_CODE (expr_node))
2024 case ADDR_EXPR:
2025 case COMPONENT_REF:
2026 case ARRAY_REF:
2027 case ARRAY_RANGE_REF:
2028 case REALPART_EXPR:
2029 case IMAGPART_EXPR:
2030 case NOP_EXPR:
2031 expr_node = TREE_OPERAND (expr_node, 0);
2032 break;
2034 case CONSTRUCTOR:
2035 TREE_ADDRESSABLE (expr_node) = 1;
2036 return true;
2038 case VAR_DECL:
2039 case PARM_DECL:
2040 case RESULT_DECL:
2041 put_var_into_stack (expr_node, /*rescan=*/true);
2042 TREE_ADDRESSABLE (expr_node) = 1;
2043 return true;
2045 case FUNCTION_DECL:
2046 TREE_ADDRESSABLE (expr_node) = 1;
2047 return true;
2049 case CONST_DECL:
2050 return (DECL_CONST_CORRESPONDING_VAR (expr_node) != 0
2051 && (gnat_mark_addressable
2052 (DECL_CONST_CORRESPONDING_VAR (expr_node))));
2053 default:
2054 return true;