1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2005, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 51 Franklin Street, Fifth Floor, *
20 * Boston, MA 02110-1301, USA. *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
25 ****************************************************************************/
29 #include "coretypes.h"
50 static tree
find_common_type (tree
, tree
);
51 static bool contains_save_expr_p (tree
);
52 static tree
contains_null_expr (tree
);
53 static tree
compare_arrays (tree
, tree
, tree
);
54 static tree
nonbinary_modular_operation (enum tree_code
, tree
, tree
, tree
);
55 static tree
build_simple_component_ref (tree
, tree
, tree
, bool);
57 /* Prepare expr to be an argument of a TRUTH_NOT_EXPR or other logical
60 This preparation consists of taking the ordinary representation of
61 an expression expr and producing a valid tree boolean expression
62 describing whether expr is nonzero. We could simply always do
64 build_binary_op (NE_EXPR, expr, integer_zero_node, 1),
66 but we optimize comparisons, &&, ||, and !.
68 The resulting type should always be the same as the input type.
69 This function is simpler than the corresponding C version since
70 the only possible operands will be things of Boolean type. */
73 gnat_truthvalue_conversion (tree expr
)
75 tree type
= TREE_TYPE (expr
);
77 switch (TREE_CODE (expr
))
79 case EQ_EXPR
: case NE_EXPR
: case LE_EXPR
: case GE_EXPR
:
80 case LT_EXPR
: case GT_EXPR
:
81 case TRUTH_ANDIF_EXPR
:
90 return (integer_zerop (expr
) ? convert (type
, integer_zero_node
)
91 : convert (type
, integer_one_node
));
94 return (real_zerop (expr
) ? convert (type
, integer_zero_node
)
95 : convert (type
, integer_one_node
));
98 /* Distribute the conversion into the arms of a COND_EXPR. */
100 (build3 (COND_EXPR
, type
, TREE_OPERAND (expr
, 0),
101 gnat_truthvalue_conversion (TREE_OPERAND (expr
, 1)),
102 gnat_truthvalue_conversion (TREE_OPERAND (expr
, 2))));
105 return build_binary_op (NE_EXPR
, type
, expr
,
106 convert (type
, integer_zero_node
));
110 /* Return the base type of TYPE. */
113 get_base_type (tree type
)
115 if (TREE_CODE (type
) == RECORD_TYPE
116 && TYPE_JUSTIFIED_MODULAR_P (type
))
117 type
= TREE_TYPE (TYPE_FIELDS (type
));
119 while (TREE_TYPE (type
)
120 && (TREE_CODE (type
) == INTEGER_TYPE
121 || TREE_CODE (type
) == REAL_TYPE
))
122 type
= TREE_TYPE (type
);
127 /* Likewise, but only return types known to the Ada source. */
129 get_ada_base_type (tree type
)
131 while (TREE_TYPE (type
)
132 && (TREE_CODE (type
) == INTEGER_TYPE
133 || TREE_CODE (type
) == REAL_TYPE
)
134 && !TYPE_EXTRA_SUBTYPE_P (type
))
135 type
= TREE_TYPE (type
);
140 /* EXP is a GCC tree representing an address. See if we can find how
141 strictly the object at that address is aligned. Return that alignment
142 in bits. If we don't know anything about the alignment, return 0. */
145 known_alignment (tree exp
)
147 unsigned int this_alignment
;
148 unsigned int lhs
, rhs
;
149 unsigned int type_alignment
;
151 /* For pointer expressions, we know that the designated object is always at
152 least as strictly aligned as the designated subtype, so we account for
153 both type and expression information in this case.
155 Beware that we can still get a dummy designated subtype here (e.g. Taft
156 Amendement types), in which the alignment information is meaningless and
159 We always compute a type_alignment value and return the MAX of it
160 compared with what we get from the expression tree. Just set the
161 type_alignment value to 0 when the type information is to be ignored. */
163 = ((POINTER_TYPE_P (TREE_TYPE (exp
))
164 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp
))))
165 ? TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
))) : 0);
167 switch (TREE_CODE (exp
))
171 case NON_LVALUE_EXPR
:
172 /* Conversions between pointers and integers don't change the alignment
173 of the underlying object. */
174 this_alignment
= known_alignment (TREE_OPERAND (exp
, 0));
179 /* If two address are added, the alignment of the result is the
180 minimum of the two alignments. */
181 lhs
= known_alignment (TREE_OPERAND (exp
, 0));
182 rhs
= known_alignment (TREE_OPERAND (exp
, 1));
183 this_alignment
= MIN (lhs
, rhs
);
187 /* The first part of this represents the lowest bit in the constant,
188 but is it in bytes, not bits. */
191 * (TREE_INT_CST_LOW (exp
) & - TREE_INT_CST_LOW (exp
)),
196 /* If we know the alignment of just one side, use it. Otherwise,
197 use the product of the alignments. */
198 lhs
= known_alignment (TREE_OPERAND (exp
, 0));
199 rhs
= known_alignment (TREE_OPERAND (exp
, 1));
201 if (lhs
== 0 || rhs
== 0)
202 this_alignment
= MIN (BIGGEST_ALIGNMENT
, MAX (lhs
, rhs
));
204 this_alignment
= MIN (BIGGEST_ALIGNMENT
, lhs
* rhs
);
208 this_alignment
= expr_align (TREE_OPERAND (exp
, 0));
216 return MAX (type_alignment
, this_alignment
);
219 /* We have a comparison or assignment operation on two types, T1 and T2,
220 which are both either array types or both record types.
221 Return the type that both operands should be converted to, if any.
222 Otherwise return zero. */
225 find_common_type (tree t1
, tree t2
)
227 /* If either type is non-BLKmode, use it. Note that we know that we will
228 not have any alignment problems since if we did the non-BLKmode
229 type could not have been used. */
230 if (TYPE_MODE (t1
) != BLKmode
)
232 else if (TYPE_MODE (t2
) != BLKmode
)
235 /* Otherwise, return the type that has a constant size. */
236 if (TREE_CONSTANT (TYPE_SIZE (t1
)))
238 else if (TREE_CONSTANT (TYPE_SIZE (t2
)))
241 /* In this case, both types have variable size. It's probably
242 best to leave the "type mismatch" because changing it could
243 case a bad self-referential reference. */
247 /* See if EXP contains a SAVE_EXPR in a position where we would
250 ??? This is a real kludge, but is probably the best approach short
251 of some very general solution. */
254 contains_save_expr_p (tree exp
)
256 switch (TREE_CODE (exp
))
261 case ADDR_EXPR
: case INDIRECT_REF
:
263 case NOP_EXPR
: case CONVERT_EXPR
: case VIEW_CONVERT_EXPR
:
264 return contains_save_expr_p (TREE_OPERAND (exp
, 0));
269 unsigned HOST_WIDE_INT ix
;
271 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), ix
, value
)
272 if (contains_save_expr_p (value
))
282 /* See if EXP contains a NULL_EXPR in an expression we use for sizes. Return
283 it if so. This is used to detect types whose sizes involve computations
284 that are known to raise Constraint_Error. */
287 contains_null_expr (tree exp
)
291 if (TREE_CODE (exp
) == NULL_EXPR
)
294 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
297 return contains_null_expr (TREE_OPERAND (exp
, 0));
301 tem
= contains_null_expr (TREE_OPERAND (exp
, 0));
305 return contains_null_expr (TREE_OPERAND (exp
, 1));
308 switch (TREE_CODE (exp
))
311 return contains_null_expr (TREE_OPERAND (exp
, 0));
314 tem
= contains_null_expr (TREE_OPERAND (exp
, 0));
318 tem
= contains_null_expr (TREE_OPERAND (exp
, 1));
322 return contains_null_expr (TREE_OPERAND (exp
, 2));
333 /* Return an expression tree representing an equality comparison of
334 A1 and A2, two objects of ARRAY_TYPE. The returned expression should
335 be of type RESULT_TYPE
337 Two arrays are equal in one of two ways: (1) if both have zero length
338 in some dimension (not necessarily the same dimension) or (2) if the
339 lengths in each dimension are equal and the data is equal. We perform the
340 length tests in as efficient a manner as possible. */
343 compare_arrays (tree result_type
, tree a1
, tree a2
)
345 tree t1
= TREE_TYPE (a1
);
346 tree t2
= TREE_TYPE (a2
);
347 tree result
= convert (result_type
, integer_one_node
);
348 tree a1_is_null
= convert (result_type
, integer_zero_node
);
349 tree a2_is_null
= convert (result_type
, integer_zero_node
);
350 bool length_zero_p
= false;
352 /* Process each dimension separately and compare the lengths. If any
353 dimension has a size known to be zero, set SIZE_ZERO_P to 1 to
354 suppress the comparison of the data. */
355 while (TREE_CODE (t1
) == ARRAY_TYPE
&& TREE_CODE (t2
) == ARRAY_TYPE
)
357 tree lb1
= TYPE_MIN_VALUE (TYPE_DOMAIN (t1
));
358 tree ub1
= TYPE_MAX_VALUE (TYPE_DOMAIN (t1
));
359 tree lb2
= TYPE_MIN_VALUE (TYPE_DOMAIN (t2
));
360 tree ub2
= TYPE_MAX_VALUE (TYPE_DOMAIN (t2
));
361 tree bt
= get_base_type (TREE_TYPE (lb1
));
362 tree length1
= fold (build2 (MINUS_EXPR
, bt
, ub1
, lb1
));
363 tree length2
= fold (build2 (MINUS_EXPR
, bt
, ub2
, lb2
));
366 tree comparison
, this_a1_is_null
, this_a2_is_null
;
368 /* If the length of the first array is a constant, swap our operands
369 unless the length of the second array is the constant zero.
370 Note that we have set the `length' values to the length - 1. */
371 if (TREE_CODE (length1
) == INTEGER_CST
372 && !integer_zerop (fold (build2 (PLUS_EXPR
, bt
, length2
,
373 convert (bt
, integer_one_node
)))))
375 tem
= a1
, a1
= a2
, a2
= tem
;
376 tem
= t1
, t1
= t2
, t2
= tem
;
377 tem
= lb1
, lb1
= lb2
, lb2
= tem
;
378 tem
= ub1
, ub1
= ub2
, ub2
= tem
;
379 tem
= length1
, length1
= length2
, length2
= tem
;
380 tem
= a1_is_null
, a1_is_null
= a2_is_null
, a2_is_null
= tem
;
383 /* If the length of this dimension in the second array is the constant
384 zero, we can just go inside the original bounds for the first
385 array and see if last < first. */
386 if (integer_zerop (fold (build2 (PLUS_EXPR
, bt
, length2
,
387 convert (bt
, integer_one_node
)))))
389 tree ub
= TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1
)));
390 tree lb
= TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1
)));
392 comparison
= build_binary_op (LT_EXPR
, result_type
, ub
, lb
);
393 comparison
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison
, a1
);
394 length1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1
, a1
);
396 length_zero_p
= true;
397 this_a1_is_null
= comparison
;
398 this_a2_is_null
= convert (result_type
, integer_one_node
);
401 /* If the length is some other constant value, we know that the
402 this dimension in the first array cannot be superflat, so we
403 can just use its length from the actual stored bounds. */
404 else if (TREE_CODE (length2
) == INTEGER_CST
)
406 ub1
= TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1
)));
407 lb1
= TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1
)));
408 ub2
= TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2
)));
409 lb2
= TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2
)));
410 nbt
= get_base_type (TREE_TYPE (ub1
));
413 = build_binary_op (EQ_EXPR
, result_type
,
414 build_binary_op (MINUS_EXPR
, nbt
, ub1
, lb1
),
415 build_binary_op (MINUS_EXPR
, nbt
, ub2
, lb2
));
417 /* Note that we know that UB2 and LB2 are constant and hence
418 cannot contain a PLACEHOLDER_EXPR. */
420 comparison
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison
, a1
);
421 length1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1
, a1
);
423 this_a1_is_null
= build_binary_op (LT_EXPR
, result_type
, ub1
, lb1
);
424 this_a2_is_null
= convert (result_type
, integer_zero_node
);
427 /* Otherwise compare the computed lengths. */
430 length1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1
, a1
);
431 length2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2
, a2
);
434 = build_binary_op (EQ_EXPR
, result_type
, length1
, length2
);
437 = build_binary_op (LT_EXPR
, result_type
, length1
,
438 convert (bt
, integer_zero_node
));
440 = build_binary_op (LT_EXPR
, result_type
, length2
,
441 convert (bt
, integer_zero_node
));
444 result
= build_binary_op (TRUTH_ANDIF_EXPR
, result_type
,
447 a1_is_null
= build_binary_op (TRUTH_ORIF_EXPR
, result_type
,
448 this_a1_is_null
, a1_is_null
);
449 a2_is_null
= build_binary_op (TRUTH_ORIF_EXPR
, result_type
,
450 this_a2_is_null
, a2_is_null
);
456 /* Unless the size of some bound is known to be zero, compare the
457 data in the array. */
460 tree type
= find_common_type (TREE_TYPE (a1
), TREE_TYPE (a2
));
463 a1
= convert (type
, a1
), a2
= convert (type
, a2
);
465 result
= build_binary_op (TRUTH_ANDIF_EXPR
, result_type
, result
,
466 fold (build2 (EQ_EXPR
, result_type
, a1
, a2
)));
470 /* The result is also true if both sizes are zero. */
471 result
= build_binary_op (TRUTH_ORIF_EXPR
, result_type
,
472 build_binary_op (TRUTH_ANDIF_EXPR
, result_type
,
473 a1_is_null
, a2_is_null
),
476 /* If either operand contains SAVE_EXPRs, they have to be evaluated before
477 starting the comparison above since the place it would be otherwise
478 evaluated would be wrong. */
480 if (contains_save_expr_p (a1
))
481 result
= build2 (COMPOUND_EXPR
, result_type
, a1
, result
);
483 if (contains_save_expr_p (a2
))
484 result
= build2 (COMPOUND_EXPR
, result_type
, a2
, result
);
489 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
490 type TYPE. We know that TYPE is a modular type with a nonbinary
494 nonbinary_modular_operation (enum tree_code op_code
, tree type
, tree lhs
,
497 tree modulus
= TYPE_MODULUS (type
);
498 unsigned int needed_precision
= tree_floor_log2 (modulus
) + 1;
499 unsigned int precision
;
500 bool unsignedp
= true;
504 /* If this is an addition of a constant, convert it to a subtraction
505 of a constant since we can do that faster. */
506 if (op_code
== PLUS_EXPR
&& TREE_CODE (rhs
) == INTEGER_CST
)
507 rhs
= fold (build2 (MINUS_EXPR
, type
, modulus
, rhs
)), op_code
= MINUS_EXPR
;
509 /* For the logical operations, we only need PRECISION bits. For
510 addition and subtraction, we need one more and for multiplication we
511 need twice as many. But we never want to make a size smaller than
513 if (op_code
== PLUS_EXPR
|| op_code
== MINUS_EXPR
)
514 needed_precision
+= 1;
515 else if (op_code
== MULT_EXPR
)
516 needed_precision
*= 2;
518 precision
= MAX (needed_precision
, TYPE_PRECISION (op_type
));
520 /* Unsigned will do for everything but subtraction. */
521 if (op_code
== MINUS_EXPR
)
524 /* If our type is the wrong signedness or isn't wide enough, make a new
525 type and convert both our operands to it. */
526 if (TYPE_PRECISION (op_type
) < precision
527 || TYPE_UNSIGNED (op_type
) != unsignedp
)
529 /* Copy the node so we ensure it can be modified to make it modular. */
530 op_type
= copy_node (gnat_type_for_size (precision
, unsignedp
));
531 modulus
= convert (op_type
, modulus
);
532 SET_TYPE_MODULUS (op_type
, modulus
);
533 TYPE_MODULAR_P (op_type
) = 1;
534 lhs
= convert (op_type
, lhs
);
535 rhs
= convert (op_type
, rhs
);
538 /* Do the operation, then we'll fix it up. */
539 result
= fold (build2 (op_code
, op_type
, lhs
, rhs
));
541 /* For multiplication, we have no choice but to do a full modulus
542 operation. However, we want to do this in the narrowest
544 if (op_code
== MULT_EXPR
)
546 tree div_type
= copy_node (gnat_type_for_size (needed_precision
, 1));
547 modulus
= convert (div_type
, modulus
);
548 SET_TYPE_MODULUS (div_type
, modulus
);
549 TYPE_MODULAR_P (div_type
) = 1;
550 result
= convert (op_type
,
551 fold (build2 (TRUNC_MOD_EXPR
, div_type
,
552 convert (div_type
, result
), modulus
)));
555 /* For subtraction, add the modulus back if we are negative. */
556 else if (op_code
== MINUS_EXPR
)
558 result
= save_expr (result
);
559 result
= fold (build3 (COND_EXPR
, op_type
,
560 build2 (LT_EXPR
, integer_type_node
, result
,
561 convert (op_type
, integer_zero_node
)),
562 fold (build2 (PLUS_EXPR
, op_type
,
567 /* For the other operations, subtract the modulus if we are >= it. */
570 result
= save_expr (result
);
571 result
= fold (build3 (COND_EXPR
, op_type
,
572 build2 (GE_EXPR
, integer_type_node
,
574 fold (build2 (MINUS_EXPR
, op_type
,
579 return convert (type
, result
);
582 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
583 desired for the result. Usually the operation is to be performed
584 in that type. For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
585 in which case the type to be used will be derived from the operands.
587 This function is very much unlike the ones for C and C++ since we
588 have already done any type conversion and matching required. All we
589 have to do here is validate the work done by SEM and handle subtypes. */
592 build_binary_op (enum tree_code op_code
, tree result_type
,
593 tree left_operand
, tree right_operand
)
595 tree left_type
= TREE_TYPE (left_operand
);
596 tree right_type
= TREE_TYPE (right_operand
);
597 tree left_base_type
= get_base_type (left_type
);
598 tree right_base_type
= get_base_type (right_type
);
599 tree operation_type
= result_type
;
600 tree best_type
= NULL_TREE
;
603 bool has_side_effects
= false;
606 && TREE_CODE (operation_type
) == RECORD_TYPE
607 && TYPE_JUSTIFIED_MODULAR_P (operation_type
))
608 operation_type
= TREE_TYPE (TYPE_FIELDS (operation_type
));
611 && !AGGREGATE_TYPE_P (operation_type
)
612 && TYPE_EXTRA_SUBTYPE_P (operation_type
))
613 operation_type
= get_base_type (operation_type
);
615 modulus
= (operation_type
&& TREE_CODE (operation_type
) == INTEGER_TYPE
616 && TYPE_MODULAR_P (operation_type
)
617 ? TYPE_MODULUS (operation_type
) : 0);
622 /* If there were any integral or pointer conversions on LHS, remove
623 them; we'll be putting them back below if needed. Likewise for
624 conversions between array and record types. But don't do this if
625 the right operand is not BLKmode (for packed arrays)
626 unless we are not changing the mode. */
627 while ((TREE_CODE (left_operand
) == CONVERT_EXPR
628 || TREE_CODE (left_operand
) == NOP_EXPR
629 || TREE_CODE (left_operand
) == VIEW_CONVERT_EXPR
)
630 && (((INTEGRAL_TYPE_P (left_type
)
631 || POINTER_TYPE_P (left_type
))
632 && (INTEGRAL_TYPE_P (TREE_TYPE
633 (TREE_OPERAND (left_operand
, 0)))
634 || POINTER_TYPE_P (TREE_TYPE
635 (TREE_OPERAND (left_operand
, 0)))))
636 || (((TREE_CODE (left_type
) == RECORD_TYPE
637 /* Don't remove conversions to justified modular
639 && !TYPE_JUSTIFIED_MODULAR_P (left_type
))
640 || TREE_CODE (left_type
) == ARRAY_TYPE
)
641 && ((TREE_CODE (TREE_TYPE
642 (TREE_OPERAND (left_operand
, 0)))
644 || (TREE_CODE (TREE_TYPE
645 (TREE_OPERAND (left_operand
, 0)))
647 && (TYPE_MODE (right_type
) == BLKmode
648 || (TYPE_MODE (left_type
)
649 == TYPE_MODE (TREE_TYPE
651 (left_operand
, 0))))))))
653 left_operand
= TREE_OPERAND (left_operand
, 0);
654 left_type
= TREE_TYPE (left_operand
);
658 operation_type
= left_type
;
660 /* If we are copying one array or record to another, find the best type
662 if (((TREE_CODE (left_type
) == ARRAY_TYPE
663 && TREE_CODE (right_type
) == ARRAY_TYPE
)
664 || (TREE_CODE (left_type
) == RECORD_TYPE
665 && TREE_CODE (right_type
) == RECORD_TYPE
))
666 && (best_type
= find_common_type (left_type
, right_type
)))
667 operation_type
= best_type
;
669 /* If a class-wide type may be involved, force use of the RHS type. */
670 if ((TREE_CODE (right_type
) == RECORD_TYPE
671 || TREE_CODE (right_type
) == UNION_TYPE
)
672 && TYPE_ALIGN_OK (right_type
))
673 operation_type
= right_type
;
675 /* Ensure everything on the LHS is valid. If we have a field reference,
676 strip anything that get_inner_reference can handle. Then remove any
677 conversions with type types having the same code and mode. Mark
678 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
679 either an INDIRECT_REF or a decl. */
680 result
= left_operand
;
683 tree restype
= TREE_TYPE (result
);
685 if (TREE_CODE (result
) == COMPONENT_REF
686 || TREE_CODE (result
) == ARRAY_REF
687 || TREE_CODE (result
) == ARRAY_RANGE_REF
)
688 while (handled_component_p (result
))
689 result
= TREE_OPERAND (result
, 0);
690 else if (TREE_CODE (result
) == REALPART_EXPR
691 || TREE_CODE (result
) == IMAGPART_EXPR
692 || ((TREE_CODE (result
) == NOP_EXPR
693 || TREE_CODE (result
) == CONVERT_EXPR
)
694 && (((TREE_CODE (restype
)
695 == TREE_CODE (TREE_TYPE
696 (TREE_OPERAND (result
, 0))))
697 && (TYPE_MODE (TREE_TYPE
698 (TREE_OPERAND (result
, 0)))
699 == TYPE_MODE (restype
)))
700 || TYPE_ALIGN_OK (restype
))))
701 result
= TREE_OPERAND (result
, 0);
702 else if (TREE_CODE (result
) == VIEW_CONVERT_EXPR
)
704 TREE_ADDRESSABLE (result
) = 1;
705 result
= TREE_OPERAND (result
, 0);
711 gcc_assert (TREE_CODE (result
) == INDIRECT_REF
712 || TREE_CODE (result
) == NULL_EXPR
|| DECL_P (result
));
714 /* Convert the right operand to the operation type unless
715 it is either already of the correct type or if the type
716 involves a placeholder, since the RHS may not have the same
718 if (operation_type
!= right_type
719 && (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type
))))
721 right_operand
= convert (operation_type
, right_operand
);
722 right_type
= operation_type
;
725 /* If the left operand is not the same type as the operation type,
726 surround it in a VIEW_CONVERT_EXPR. */
727 if (left_type
!= operation_type
)
728 left_operand
= unchecked_convert (operation_type
, left_operand
, false);
730 has_side_effects
= true;
736 operation_type
= TREE_TYPE (left_type
);
738 /* ... fall through ... */
740 case ARRAY_RANGE_REF
:
742 /* First convert the right operand to its base type. This will
743 prevent unneeded signedness conversions when sizetype is wider than
745 right_operand
= convert (right_base_type
, right_operand
);
746 right_operand
= convert (TYPE_DOMAIN (left_type
), right_operand
);
748 if (!TREE_CONSTANT (right_operand
)
749 || !TREE_CONSTANT (TYPE_MIN_VALUE (right_type
)))
750 gnat_mark_addressable (left_operand
);
759 gcc_assert (!POINTER_TYPE_P (left_type
));
761 /* ... fall through ... */
765 /* If either operand is a NULL_EXPR, just return a new one. */
766 if (TREE_CODE (left_operand
) == NULL_EXPR
)
767 return build2 (op_code
, result_type
,
768 build1 (NULL_EXPR
, integer_type_node
,
769 TREE_OPERAND (left_operand
, 0)),
772 else if (TREE_CODE (right_operand
) == NULL_EXPR
)
773 return build2 (op_code
, result_type
,
774 build1 (NULL_EXPR
, integer_type_node
,
775 TREE_OPERAND (right_operand
, 0)),
778 /* If either object is a justified modular types, get the
779 fields from within. */
780 if (TREE_CODE (left_type
) == RECORD_TYPE
781 && TYPE_JUSTIFIED_MODULAR_P (left_type
))
783 left_operand
= convert (TREE_TYPE (TYPE_FIELDS (left_type
)),
785 left_type
= TREE_TYPE (left_operand
);
786 left_base_type
= get_base_type (left_type
);
789 if (TREE_CODE (right_type
) == RECORD_TYPE
790 && TYPE_JUSTIFIED_MODULAR_P (right_type
))
792 right_operand
= convert (TREE_TYPE (TYPE_FIELDS (right_type
)),
794 right_type
= TREE_TYPE (right_operand
);
795 right_base_type
= get_base_type (right_type
);
798 /* If both objects are arrays, compare them specially. */
799 if ((TREE_CODE (left_type
) == ARRAY_TYPE
800 || (TREE_CODE (left_type
) == INTEGER_TYPE
801 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type
)))
802 && (TREE_CODE (right_type
) == ARRAY_TYPE
803 || (TREE_CODE (right_type
) == INTEGER_TYPE
804 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type
))))
806 result
= compare_arrays (result_type
, left_operand
, right_operand
);
808 if (op_code
== NE_EXPR
)
809 result
= invert_truthvalue (result
);
811 gcc_assert (op_code
== EQ_EXPR
);
816 /* Otherwise, the base types must be the same unless the objects are
817 records. If we have records, use the best type and convert both
818 operands to that type. */
819 if (left_base_type
!= right_base_type
)
821 if (TREE_CODE (left_base_type
) == RECORD_TYPE
822 && TREE_CODE (right_base_type
) == RECORD_TYPE
)
824 /* The only way these are permitted to be the same is if both
825 types have the same name. In that case, one of them must
826 not be self-referential. Use that one as the best type.
827 Even better is if one is of fixed size. */
828 best_type
= NULL_TREE
;
830 gcc_assert (TYPE_NAME (left_base_type
)
831 && (TYPE_NAME (left_base_type
)
832 == TYPE_NAME (right_base_type
)));
834 if (TREE_CONSTANT (TYPE_SIZE (left_base_type
)))
835 best_type
= left_base_type
;
836 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type
)))
837 best_type
= right_base_type
;
838 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type
)))
839 best_type
= left_base_type
;
840 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type
)))
841 best_type
= right_base_type
;
845 left_operand
= convert (best_type
, left_operand
);
846 right_operand
= convert (best_type
, right_operand
);
852 /* If we are comparing a fat pointer against zero, we need to
853 just compare the data pointer. */
854 else if (TYPE_FAT_POINTER_P (left_base_type
)
855 && TREE_CODE (right_operand
) == CONSTRUCTOR
856 && integer_zerop (VEC_index (constructor_elt
,
857 CONSTRUCTOR_ELTS (right_operand
),
861 right_operand
= build_component_ref (left_operand
, NULL_TREE
,
862 TYPE_FIELDS (left_base_type
),
864 left_operand
= convert (TREE_TYPE (right_operand
),
869 left_operand
= convert (left_base_type
, left_operand
);
870 right_operand
= convert (right_base_type
, right_operand
);
876 case PREINCREMENT_EXPR
:
877 case PREDECREMENT_EXPR
:
878 case POSTINCREMENT_EXPR
:
879 case POSTDECREMENT_EXPR
:
880 /* In these, the result type and the left operand type should be the
881 same. Do the operation in the base type of those and convert the
882 right operand (which is an integer) to that type.
884 Note that these operations are only used in loop control where
885 we guarantee that no overflow can occur. So nothing special need
886 be done for modular types. */
888 gcc_assert (left_type
== result_type
);
889 operation_type
= get_base_type (result_type
);
890 left_operand
= convert (operation_type
, left_operand
);
891 right_operand
= convert (operation_type
, right_operand
);
892 has_side_effects
= true;
900 /* The RHS of a shift can be any type. Also, ignore any modulus
901 (we used to abort, but this is needed for unchecked conversion
902 to modular types). Otherwise, processing is the same as normal. */
903 gcc_assert (operation_type
== left_base_type
);
905 left_operand
= convert (operation_type
, left_operand
);
908 case TRUTH_ANDIF_EXPR
:
909 case TRUTH_ORIF_EXPR
:
913 left_operand
= gnat_truthvalue_conversion (left_operand
);
914 right_operand
= gnat_truthvalue_conversion (right_operand
);
920 /* For binary modulus, if the inputs are in range, so are the
922 if (modulus
&& integer_pow2p (modulus
))
928 gcc_assert (TREE_TYPE (result_type
) == left_base_type
929 && TREE_TYPE (result_type
) == right_base_type
);
930 left_operand
= convert (left_base_type
, left_operand
);
931 right_operand
= convert (right_base_type
, right_operand
);
934 case TRUNC_DIV_EXPR
: case TRUNC_MOD_EXPR
:
935 case CEIL_DIV_EXPR
: case CEIL_MOD_EXPR
:
936 case FLOOR_DIV_EXPR
: case FLOOR_MOD_EXPR
:
937 case ROUND_DIV_EXPR
: case ROUND_MOD_EXPR
:
938 /* These always produce results lower than either operand. */
944 /* The result type should be the same as the base types of the
945 both operands (and they should be the same). Convert
946 everything to the result type. */
948 gcc_assert (operation_type
== left_base_type
949 && left_base_type
== right_base_type
);
950 left_operand
= convert (operation_type
, left_operand
);
951 right_operand
= convert (operation_type
, right_operand
);
954 if (modulus
&& !integer_pow2p (modulus
))
956 result
= nonbinary_modular_operation (op_code
, operation_type
,
957 left_operand
, right_operand
);
960 /* If either operand is a NULL_EXPR, just return a new one. */
961 else if (TREE_CODE (left_operand
) == NULL_EXPR
)
962 return build1 (NULL_EXPR
, operation_type
, TREE_OPERAND (left_operand
, 0));
963 else if (TREE_CODE (right_operand
) == NULL_EXPR
)
964 return build1 (NULL_EXPR
, operation_type
, TREE_OPERAND (right_operand
, 0));
965 else if (op_code
== ARRAY_REF
|| op_code
== ARRAY_RANGE_REF
)
966 result
= fold (build4 (op_code
, operation_type
, left_operand
,
967 right_operand
, NULL_TREE
, NULL_TREE
));
970 = fold (build2 (op_code
, operation_type
, left_operand
, right_operand
));
972 TREE_SIDE_EFFECTS (result
) |= has_side_effects
;
973 TREE_CONSTANT (result
)
974 |= (TREE_CONSTANT (left_operand
) & TREE_CONSTANT (right_operand
)
975 && op_code
!= ARRAY_REF
&& op_code
!= ARRAY_RANGE_REF
);
977 if ((op_code
== ARRAY_REF
|| op_code
== ARRAY_RANGE_REF
)
978 && TYPE_VOLATILE (operation_type
))
979 TREE_THIS_VOLATILE (result
) = 1;
981 /* If we are working with modular types, perform the MOD operation
982 if something above hasn't eliminated the need for it. */
984 result
= fold (build2 (FLOOR_MOD_EXPR
, operation_type
, result
,
985 convert (operation_type
, modulus
)));
987 if (result_type
&& result_type
!= operation_type
)
988 result
= convert (result_type
, result
);
993 /* Similar, but for unary operations. */
996 build_unary_op (enum tree_code op_code
, tree result_type
, tree operand
)
998 tree type
= TREE_TYPE (operand
);
999 tree base_type
= get_base_type (type
);
1000 tree operation_type
= result_type
;
1002 bool side_effects
= false;
1005 && TREE_CODE (operation_type
) == RECORD_TYPE
1006 && TYPE_JUSTIFIED_MODULAR_P (operation_type
))
1007 operation_type
= TREE_TYPE (TYPE_FIELDS (operation_type
));
1010 && !AGGREGATE_TYPE_P (operation_type
)
1011 && TYPE_EXTRA_SUBTYPE_P (operation_type
))
1012 operation_type
= get_base_type (operation_type
);
1018 if (!operation_type
)
1019 result_type
= operation_type
= TREE_TYPE (type
);
1021 gcc_assert (result_type
== TREE_TYPE (type
));
1023 result
= fold (build1 (op_code
, operation_type
, operand
));
1026 case TRUTH_NOT_EXPR
:
1027 gcc_assert (result_type
== base_type
);
1028 result
= invert_truthvalue (gnat_truthvalue_conversion (operand
));
1031 case ATTR_ADDR_EXPR
:
1033 switch (TREE_CODE (operand
))
1036 case UNCONSTRAINED_ARRAY_REF
:
1037 result
= TREE_OPERAND (operand
, 0);
1039 /* Make sure the type here is a pointer, not a reference.
1040 GCC wants pointer types for function addresses. */
1042 result_type
= build_pointer_type (type
);
1047 TREE_TYPE (result
) = type
= build_pointer_type (type
);
1051 case ARRAY_RANGE_REF
:
1054 /* If this is for 'Address, find the address of the prefix and
1055 add the offset to the field. Otherwise, do this the normal
1057 if (op_code
== ATTR_ADDR_EXPR
)
1059 HOST_WIDE_INT bitsize
;
1060 HOST_WIDE_INT bitpos
;
1062 enum machine_mode mode
;
1063 int unsignedp
, volatilep
;
1065 inner
= get_inner_reference (operand
, &bitsize
, &bitpos
, &offset
,
1066 &mode
, &unsignedp
, &volatilep
,
1069 /* If INNER is a padding type whose field has a self-referential
1070 size, convert to that inner type. We know the offset is zero
1071 and we need to have that type visible. */
1072 if (TREE_CODE (TREE_TYPE (inner
)) == RECORD_TYPE
1073 && TYPE_IS_PADDING_P (TREE_TYPE (inner
))
1074 && (CONTAINS_PLACEHOLDER_P
1075 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1076 (TREE_TYPE (inner
)))))))
1077 inner
= convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner
))),
1080 /* Compute the offset as a byte offset from INNER. */
1082 offset
= size_zero_node
;
1084 if (bitpos
% BITS_PER_UNIT
!= 0)
1086 ("taking address of object not aligned on storage unit?",
1089 offset
= size_binop (PLUS_EXPR
, offset
,
1090 size_int (bitpos
/ BITS_PER_UNIT
));
1092 /* Take the address of INNER, convert the offset to void *, and
1093 add then. It will later be converted to the desired result
1095 inner
= build_unary_op (ADDR_EXPR
, NULL_TREE
, inner
);
1096 inner
= convert (ptr_void_type_node
, inner
);
1097 offset
= convert (ptr_void_type_node
, offset
);
1098 result
= build_binary_op (PLUS_EXPR
, ptr_void_type_node
,
1100 result
= convert (build_pointer_type (TREE_TYPE (operand
)),
1107 /* If this is just a constructor for a padded record, we can
1108 just take the address of the single field and convert it to
1109 a pointer to our type. */
1110 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_IS_PADDING_P (type
))
1112 result
= (VEC_index (constructor_elt
,
1113 CONSTRUCTOR_ELTS (operand
),
1117 result
= convert (build_pointer_type (TREE_TYPE (operand
)),
1118 build_unary_op (ADDR_EXPR
, NULL_TREE
, result
));
1125 if (AGGREGATE_TYPE_P (type
)
1126 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand
, 0))))
1127 return build_unary_op (ADDR_EXPR
, result_type
,
1128 TREE_OPERAND (operand
, 0));
1130 /* ... fallthru ... */
1132 case VIEW_CONVERT_EXPR
:
1133 /* If this just a variant conversion or if the conversion doesn't
1134 change the mode, get the result type from this type and go down.
1135 This is needed for conversions of CONST_DECLs, to eventually get
1136 to the address of their CORRESPONDING_VARs. */
1137 if ((TYPE_MAIN_VARIANT (type
)
1138 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand
, 0))))
1139 || (TYPE_MODE (type
) != BLKmode
1140 && (TYPE_MODE (type
)
1141 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand
, 0))))))
1142 return build_unary_op (ADDR_EXPR
,
1143 (result_type
? result_type
1144 : build_pointer_type (type
)),
1145 TREE_OPERAND (operand
, 0));
1149 operand
= DECL_CONST_CORRESPONDING_VAR (operand
);
1151 /* ... fall through ... */
1156 /* If we are taking the address of a padded record whose field is
1157 contains a template, take the address of the template. */
1158 if (TREE_CODE (type
) == RECORD_TYPE
1159 && TYPE_IS_PADDING_P (type
)
1160 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type
))) == RECORD_TYPE
1161 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type
))))
1163 type
= TREE_TYPE (TYPE_FIELDS (type
));
1164 operand
= convert (type
, operand
);
1167 if (type
!= error_mark_node
)
1168 operation_type
= build_pointer_type (type
);
1170 gnat_mark_addressable (operand
);
1171 result
= fold (build1 (ADDR_EXPR
, operation_type
, operand
));
1174 TREE_CONSTANT (result
) = staticp (operand
) || TREE_CONSTANT (operand
);
1178 /* If we want to refer to an entire unconstrained array,
1179 make up an expression to do so. This will never survive to
1180 the backend. If TYPE is a thin pointer, first convert the
1181 operand to a fat pointer. */
1182 if (TYPE_THIN_POINTER_P (type
)
1183 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)))
1186 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
))),
1188 type
= TREE_TYPE (operand
);
1191 if (TYPE_FAT_POINTER_P (type
))
1193 result
= build1 (UNCONSTRAINED_ARRAY_REF
,
1194 TYPE_UNCONSTRAINED_ARRAY (type
), operand
);
1195 TREE_READONLY (result
) = TREE_STATIC (result
)
1196 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type
));
1198 else if (TREE_CODE (operand
) == ADDR_EXPR
)
1199 result
= TREE_OPERAND (operand
, 0);
1203 result
= fold (build1 (op_code
, TREE_TYPE (type
), operand
));
1204 TREE_READONLY (result
) = TYPE_READONLY (TREE_TYPE (type
));
1208 = (!TYPE_FAT_POINTER_P (type
) && TYPE_VOLATILE (TREE_TYPE (type
)));
1214 tree modulus
= ((operation_type
1215 && TREE_CODE (operation_type
) == INTEGER_TYPE
1216 && TYPE_MODULAR_P (operation_type
))
1217 ? TYPE_MODULUS (operation_type
) : 0);
1218 int mod_pow2
= modulus
&& integer_pow2p (modulus
);
1220 /* If this is a modular type, there are various possibilities
1221 depending on the operation and whether the modulus is a
1222 power of two or not. */
1226 gcc_assert (operation_type
== base_type
);
1227 operand
= convert (operation_type
, operand
);
1229 /* The fastest in the negate case for binary modulus is
1230 the straightforward code; the TRUNC_MOD_EXPR below
1231 is an AND operation. */
1232 if (op_code
== NEGATE_EXPR
&& mod_pow2
)
1233 result
= fold (build2 (TRUNC_MOD_EXPR
, operation_type
,
1234 fold (build1 (NEGATE_EXPR
, operation_type
,
1238 /* For nonbinary negate case, return zero for zero operand,
1239 else return the modulus minus the operand. If the modulus
1240 is a power of two minus one, we can do the subtraction
1241 as an XOR since it is equivalent and faster on most machines. */
1242 else if (op_code
== NEGATE_EXPR
&& !mod_pow2
)
1244 if (integer_pow2p (fold (build2 (PLUS_EXPR
, operation_type
,
1246 convert (operation_type
,
1247 integer_one_node
)))))
1248 result
= fold (build2 (BIT_XOR_EXPR
, operation_type
,
1251 result
= fold (build2 (MINUS_EXPR
, operation_type
,
1254 result
= fold (build3 (COND_EXPR
, operation_type
,
1255 fold (build2 (NE_EXPR
,
1260 integer_zero_node
))),
1265 /* For the NOT cases, we need a constant equal to
1266 the modulus minus one. For a binary modulus, we
1267 XOR against the constant and subtract the operand from
1268 that constant for nonbinary modulus. */
1270 tree cnst
= fold (build2 (MINUS_EXPR
, operation_type
, modulus
,
1271 convert (operation_type
,
1272 integer_one_node
)));
1275 result
= fold (build2 (BIT_XOR_EXPR
, operation_type
,
1278 result
= fold (build2 (MINUS_EXPR
, operation_type
,
1286 /* ... fall through ... */
1289 gcc_assert (operation_type
== base_type
);
1290 result
= fold (build1 (op_code
, operation_type
, convert (operation_type
,
1296 TREE_SIDE_EFFECTS (result
) = 1;
1297 if (TREE_CODE (result
) == INDIRECT_REF
)
1298 TREE_THIS_VOLATILE (result
) = TYPE_VOLATILE (TREE_TYPE (result
));
1301 if (result_type
&& TREE_TYPE (result
) != result_type
)
1302 result
= convert (result_type
, result
);
1307 /* Similar, but for COND_EXPR. */
1310 build_cond_expr (tree result_type
, tree condition_operand
,
1311 tree true_operand
, tree false_operand
)
1314 bool addr_p
= false;
1316 /* The front-end verifies that result, true and false operands have same base
1317 type. Convert everything to the result type. */
1319 true_operand
= convert (result_type
, true_operand
);
1320 false_operand
= convert (result_type
, false_operand
);
1322 /* If the result type is unconstrained, take the address of
1323 the operands and then dereference our result. */
1324 if (TREE_CODE (result_type
) == UNCONSTRAINED_ARRAY_TYPE
1325 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type
)))
1328 result_type
= build_pointer_type (result_type
);
1329 true_operand
= build_unary_op (ADDR_EXPR
, result_type
, true_operand
);
1330 false_operand
= build_unary_op (ADDR_EXPR
, result_type
, false_operand
);
1333 result
= fold (build3 (COND_EXPR
, result_type
, condition_operand
,
1334 true_operand
, false_operand
));
1336 /* If either operand is a SAVE_EXPR (possibly surrounded by
1337 arithmetic, make sure it gets done. */
1338 true_operand
= skip_simple_arithmetic (true_operand
);
1339 false_operand
= skip_simple_arithmetic (false_operand
);
1341 if (TREE_CODE (true_operand
) == SAVE_EXPR
)
1342 result
= build2 (COMPOUND_EXPR
, result_type
, true_operand
, result
);
1344 if (TREE_CODE (false_operand
) == SAVE_EXPR
)
1345 result
= build2 (COMPOUND_EXPR
, result_type
, false_operand
, result
);
1347 /* ??? Seems the code above is wrong, as it may move ahead of the COND
1348 SAVE_EXPRs with side effects and not shared by both arms. */
1351 result
= build_unary_op (INDIRECT_REF
, NULL_TREE
, result
);
1356 /* Similar, but for RETURN_EXPR. If RESULT_DECL is non-zero, build
1357 a RETURN_EXPR around the assignment of RET_VAL to RESULT_DECL.
1358 If RESULT_DECL is zero, build a bare RETURN_EXPR. */
1361 build_return_expr (tree result_decl
, tree ret_val
)
1367 /* The gimplifier explicitly enforces the following invariant:
1376 As a consequence, type-homogeneity dictates that we use the type
1377 of the RESULT_DECL as the operation type. */
1379 tree operation_type
= TREE_TYPE (result_decl
);
1381 /* Convert the right operand to the operation type. Note that
1382 it's the same transformation as in the MODIFY_EXPR case of
1383 build_binary_op with the additional guarantee that the type
1384 cannot involve a placeholder, since otherwise the function
1385 would use the "target pointer" return mechanism. */
1387 if (operation_type
!= TREE_TYPE (ret_val
))
1388 ret_val
= convert (operation_type
, ret_val
);
1391 = build2 (MODIFY_EXPR
, operation_type
, result_decl
, ret_val
);
1394 result_expr
= NULL_TREE
;
1396 return build1 (RETURN_EXPR
, void_type_node
, result_expr
);
1399 /* Build a CALL_EXPR to call FUNDECL with one argument, ARG. Return
1403 build_call_1_expr (tree fundecl
, tree arg
)
1405 tree call
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fundecl
)),
1406 build_unary_op (ADDR_EXPR
, NULL_TREE
, fundecl
),
1407 chainon (NULL_TREE
, build_tree_list (NULL_TREE
, arg
)),
1410 TREE_SIDE_EFFECTS (call
) = 1;
1415 /* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2. Return
1419 build_call_2_expr (tree fundecl
, tree arg1
, tree arg2
)
1421 tree call
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fundecl
)),
1422 build_unary_op (ADDR_EXPR
, NULL_TREE
, fundecl
),
1423 chainon (chainon (NULL_TREE
,
1424 build_tree_list (NULL_TREE
, arg1
)),
1425 build_tree_list (NULL_TREE
, arg2
)),
1428 TREE_SIDE_EFFECTS (call
) = 1;
1433 /* Likewise to call FUNDECL with no arguments. */
1436 build_call_0_expr (tree fundecl
)
1438 tree call
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fundecl
)),
1439 build_unary_op (ADDR_EXPR
, NULL_TREE
, fundecl
),
1440 NULL_TREE
, NULL_TREE
);
1442 TREE_SIDE_EFFECTS (call
) = 1;
1447 /* Call a function that raises an exception and pass the line number and file
1448 name, if requested. MSG says which exception function to call.
1450 GNAT_NODE is the gnat node conveying the source location for which the
1451 error should be signaled, or Empty in which case the error is signaled on
1452 the current ref_file_name/input_line. */
1455 build_call_raise (int msg
, Node_Id gnat_node
)
1457 tree fndecl
= gnat_raise_decls
[msg
];
1460 = (Debug_Flag_NN
|| Exception_Locations_Suppressed
)
1462 : (gnat_node
!= Empty
)
1463 ? IDENTIFIER_POINTER
1464 (get_identifier (Get_Name_String
1466 (Get_Source_File_Index (Sloc (gnat_node
))))))
1469 int len
= strlen (str
) + 1;
1470 tree filename
= build_string (len
, str
);
1473 = (gnat_node
!= Empty
)
1474 ? Get_Logical_Line_Number (Sloc(gnat_node
)) : input_line
;
1476 TREE_TYPE (filename
)
1477 = build_array_type (char_type_node
,
1478 build_index_type (build_int_cst (NULL_TREE
, len
)));
1481 build_call_2_expr (fndecl
,
1482 build1 (ADDR_EXPR
, build_pointer_type (char_type_node
),
1484 build_int_cst (NULL_TREE
, line_number
));
1487 /* qsort comparer for the bit positions of two constructor elements
1488 for record components. */
1491 compare_elmt_bitpos (const PTR rt1
, const PTR rt2
)
1493 tree elmt1
= * (tree
*) rt1
;
1494 tree elmt2
= * (tree
*) rt2
;
1496 tree pos_field1
= bit_position (TREE_PURPOSE (elmt1
));
1497 tree pos_field2
= bit_position (TREE_PURPOSE (elmt2
));
1499 if (tree_int_cst_equal (pos_field1
, pos_field2
))
1501 else if (tree_int_cst_lt (pos_field1
, pos_field2
))
1507 /* Return a CONSTRUCTOR of TYPE whose list is LIST. */
1510 gnat_build_constructor (tree type
, tree list
)
1514 bool allconstant
= (TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
);
1515 bool side_effects
= false;
1518 /* Scan the elements to see if they are all constant or if any has side
1519 effects, to let us set global flags on the resulting constructor. Count
1520 the elements along the way for possible sorting purposes below. */
1521 for (n_elmts
= 0, elmt
= list
; elmt
; elmt
= TREE_CHAIN (elmt
), n_elmts
++)
1523 if (!TREE_CONSTANT (TREE_VALUE (elmt
))
1524 || (TREE_CODE (type
) == RECORD_TYPE
1525 && DECL_BIT_FIELD (TREE_PURPOSE (elmt
))
1526 && TREE_CODE (TREE_VALUE (elmt
)) != INTEGER_CST
)
1527 || !initializer_constant_valid_p (TREE_VALUE (elmt
),
1528 TREE_TYPE (TREE_VALUE (elmt
))))
1529 allconstant
= false;
1531 if (TREE_SIDE_EFFECTS (TREE_VALUE (elmt
)))
1532 side_effects
= true;
1534 /* Propagate an NULL_EXPR from the size of the type. We won't ever
1535 be executing the code we generate here in that case, but handle it
1536 specially to avoid the cmpiler blowing up. */
1537 if (TREE_CODE (type
) == RECORD_TYPE
1539 = contains_null_expr (DECL_SIZE (TREE_PURPOSE (elmt
))))))
1540 return build1 (NULL_EXPR
, type
, TREE_OPERAND (result
, 0));
1543 /* For record types with constant components only, sort field list
1544 by increasing bit position. This is necessary to ensure the
1545 constructor can be output as static data, which the gimplifier
1546 might force in various circumstances. */
1547 if (allconstant
&& TREE_CODE (type
) == RECORD_TYPE
&& n_elmts
> 1)
1549 /* Fill an array with an element tree per index, and ask qsort to order
1550 them according to what a bitpos comparison function says. */
1552 tree
*gnu_arr
= (tree
*) alloca (sizeof (tree
) * n_elmts
);
1555 for (i
= 0, elmt
= list
; elmt
; elmt
= TREE_CHAIN (elmt
), i
++)
1558 qsort (gnu_arr
, n_elmts
, sizeof (tree
), compare_elmt_bitpos
);
1560 /* Then reconstruct the list from the sorted array contents. */
1563 for (i
= n_elmts
- 1; i
>= 0; i
--)
1565 TREE_CHAIN (gnu_arr
[i
]) = list
;
1570 result
= build_constructor_from_list (type
, list
);
1571 TREE_CONSTANT (result
) = TREE_INVARIANT (result
)
1572 = TREE_STATIC (result
) = allconstant
;
1573 TREE_SIDE_EFFECTS (result
) = side_effects
;
1574 TREE_READONLY (result
) = TYPE_READONLY (type
) || allconstant
;
1578 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1579 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1580 for the field. Don't fold the result if NO_FOLD_P is true.
1582 We also handle the fact that we might have been passed a pointer to the
1583 actual record and know how to look for fields in variant parts. */
1586 build_simple_component_ref (tree record_variable
, tree component
,
1587 tree field
, bool no_fold_p
)
1589 tree record_type
= TYPE_MAIN_VARIANT (TREE_TYPE (record_variable
));
1592 gcc_assert ((TREE_CODE (record_type
) == RECORD_TYPE
1593 || TREE_CODE (record_type
) == UNION_TYPE
1594 || TREE_CODE (record_type
) == QUAL_UNION_TYPE
)
1595 && TYPE_SIZE (record_type
)
1596 && (component
!= 0) != (field
!= 0));
1598 /* If no field was specified, look for a field with the specified name
1599 in the current record only. */
1601 for (field
= TYPE_FIELDS (record_type
); field
;
1602 field
= TREE_CHAIN (field
))
1603 if (DECL_NAME (field
) == component
)
1609 /* If this field is not in the specified record, see if we can find
1610 something in the record whose original field is the same as this one. */
1611 if (DECL_CONTEXT (field
) != record_type
)
1612 /* Check if there is a field with name COMPONENT in the record. */
1616 /* First loop thru normal components. */
1618 for (new_field
= TYPE_FIELDS (record_type
); new_field
;
1619 new_field
= TREE_CHAIN (new_field
))
1620 if (DECL_ORIGINAL_FIELD (new_field
) == field
1621 || new_field
== DECL_ORIGINAL_FIELD (field
)
1622 || (DECL_ORIGINAL_FIELD (field
)
1623 && (DECL_ORIGINAL_FIELD (field
)
1624 == DECL_ORIGINAL_FIELD (new_field
))))
1627 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1628 the component in the first search. Doing this search in 2 steps
1629 is required to avoiding hidden homonymous fields in the
1633 for (new_field
= TYPE_FIELDS (record_type
); new_field
;
1634 new_field
= TREE_CHAIN (new_field
))
1635 if (DECL_INTERNAL_P (new_field
))
1638 = build_simple_component_ref (record_variable
,
1639 NULL_TREE
, new_field
, no_fold_p
);
1640 ref
= build_simple_component_ref (field_ref
, NULL_TREE
, field
,
1653 /* If the field's offset has overflowed, do not attempt to access it
1654 as doing so may trigger sanity checks deeper in the back-end.
1655 Note that we don't need to warn since this will be done on trying
1656 to declare the object. */
1657 if (TREE_CODE (DECL_FIELD_OFFSET (field
)) == INTEGER_CST
1658 && TREE_CONSTANT_OVERFLOW (DECL_FIELD_OFFSET (field
)))
1661 /* It would be nice to call "fold" here, but that can lose a type
1662 we need to tag a PLACEHOLDER_EXPR with, so we can't do it. */
1663 ref
= build3 (COMPONENT_REF
, TREE_TYPE (field
), record_variable
, field
,
1666 if (TREE_READONLY (record_variable
) || TREE_READONLY (field
))
1667 TREE_READONLY (ref
) = 1;
1668 if (TREE_THIS_VOLATILE (record_variable
) || TREE_THIS_VOLATILE (field
)
1669 || TYPE_VOLATILE (record_type
))
1670 TREE_THIS_VOLATILE (ref
) = 1;
1672 return no_fold_p
? ref
: fold (ref
);
1675 /* Like build_simple_component_ref, except that we give an error if the
1676 reference could not be found. */
1679 build_component_ref (tree record_variable
, tree component
,
1680 tree field
, bool no_fold_p
)
1682 tree ref
= build_simple_component_ref (record_variable
, component
, field
,
1688 /* If FIELD was specified, assume this is an invalid user field so
1689 raise constraint error. Otherwise, we can't find the type to return, so
1692 return build1 (NULL_EXPR
, TREE_TYPE (field
),
1693 build_call_raise (CE_Discriminant_Check_Failed
, Empty
));
1696 /* Build a GCC tree to call an allocation or deallocation function.
1697 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
1698 generate an allocator.
1700 GNU_SIZE is the size of the object in bytes and ALIGN is the alignment in
1701 bits. GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the
1702 storage pool to use. If not preset, malloc and free will be used except
1703 if GNAT_PROC is the "fake" value of -1, in which case we allocate the
1704 object dynamically on the stack frame. */
1707 build_call_alloc_dealloc (tree gnu_obj
, tree gnu_size
, unsigned align
,
1708 Entity_Id gnat_proc
, Entity_Id gnat_pool
,
1711 tree gnu_align
= size_int (align
/ BITS_PER_UNIT
);
1713 gnu_size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size
, gnu_obj
);
1715 if (Present (gnat_proc
))
1717 /* The storage pools are obviously always tagged types, but the
1718 secondary stack uses the same mechanism and is not tagged */
1719 if (Is_Tagged_Type (Etype (gnat_pool
)))
1721 /* The size is the third parameter; the alignment is the
1723 Entity_Id gnat_size_type
1724 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc
))));
1725 tree gnu_size_type
= gnat_to_gnu_type (gnat_size_type
);
1726 tree gnu_proc
= gnat_to_gnu (gnat_proc
);
1727 tree gnu_proc_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, gnu_proc
);
1728 tree gnu_pool
= gnat_to_gnu (gnat_pool
);
1729 tree gnu_pool_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, gnu_pool
);
1730 tree gnu_args
= NULL_TREE
;
1733 /* The first arg is always the address of the storage pool; next
1734 comes the address of the object, for a deallocator, then the
1735 size and alignment. */
1737 = chainon (gnu_args
, build_tree_list (NULL_TREE
, gnu_pool_addr
));
1741 = chainon (gnu_args
, build_tree_list (NULL_TREE
, gnu_obj
));
1744 = chainon (gnu_args
,
1745 build_tree_list (NULL_TREE
,
1746 convert (gnu_size_type
, gnu_size
)));
1748 = chainon (gnu_args
,
1749 build_tree_list (NULL_TREE
,
1750 convert (gnu_size_type
, gnu_align
)));
1752 gnu_call
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (gnu_proc
)),
1753 gnu_proc_addr
, gnu_args
, NULL_TREE
);
1754 TREE_SIDE_EFFECTS (gnu_call
) = 1;
1758 /* Secondary stack case. */
1761 /* The size is the second parameter */
1762 Entity_Id gnat_size_type
1763 = Etype (Next_Formal (First_Formal (gnat_proc
)));
1764 tree gnu_size_type
= gnat_to_gnu_type (gnat_size_type
);
1765 tree gnu_proc
= gnat_to_gnu (gnat_proc
);
1766 tree gnu_proc_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, gnu_proc
);
1767 tree gnu_args
= NULL_TREE
;
1770 /* The first arg is the address of the object, for a
1771 deallocator, then the size */
1774 = chainon (gnu_args
, build_tree_list (NULL_TREE
, gnu_obj
));
1777 = chainon (gnu_args
,
1778 build_tree_list (NULL_TREE
,
1779 convert (gnu_size_type
, gnu_size
)));
1781 gnu_call
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (gnu_proc
)),
1782 gnu_proc_addr
, gnu_args
, NULL_TREE
);
1783 TREE_SIDE_EFFECTS (gnu_call
) = 1;
1789 return build_call_1_expr (free_decl
, gnu_obj
);
1791 /* ??? For now, disable variable-sized allocators in the stack since
1792 we can't yet gimplify an ALLOCATE_EXPR. */
1793 else if (gnat_pool
== -1
1794 && TREE_CODE (gnu_size
) == INTEGER_CST
&& !flag_stack_check
)
1796 /* If the size is a constant, we can put it in the fixed portion of
1797 the stack frame to avoid the need to adjust the stack pointer. */
1798 if (TREE_CODE (gnu_size
) == INTEGER_CST
&& !flag_stack_check
)
1801 = build_range_type (NULL_TREE
, size_one_node
, gnu_size
);
1802 tree gnu_array_type
= build_array_type (char_type_node
, gnu_range
);
1804 = create_var_decl (get_identifier ("RETVAL"), NULL_TREE
,
1805 gnu_array_type
, NULL_TREE
, false, false, false,
1806 false, NULL
, gnat_node
);
1808 return convert (ptr_void_type_node
,
1809 build_unary_op (ADDR_EXPR
, NULL_TREE
, gnu_decl
));
1814 return build2 (ALLOCATE_EXPR
, ptr_void_type_node
, gnu_size
, gnu_align
);
1819 if (Nkind (gnat_node
) != N_Allocator
|| !Comes_From_Source (gnat_node
))
1820 Check_No_Implicit_Heap_Alloc (gnat_node
);
1821 return build_call_1_expr (malloc_decl
, gnu_size
);
1825 /* Build a GCC tree to correspond to allocating an object of TYPE whose
1826 initial value is INIT, if INIT is nonzero. Convert the expression to
1827 RESULT_TYPE, which must be some type of pointer. Return the tree.
1828 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
1829 the storage pool to use. GNAT_NODE is used to provide an error
1830 location for restriction violations messages. If IGNORE_INIT_TYPE is
1831 true, ignore the type of INIT for the purpose of determining the size;
1832 this will cause the maximum size to be allocated if TYPE is of
1833 self-referential size. */
1836 build_allocator (tree type
, tree init
, tree result_type
, Entity_Id gnat_proc
,
1837 Entity_Id gnat_pool
, Node_Id gnat_node
, bool ignore_init_type
)
1839 tree size
= TYPE_SIZE_UNIT (type
);
1842 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
1843 if (init
&& TREE_CODE (init
) == NULL_EXPR
)
1844 return build1 (NULL_EXPR
, result_type
, TREE_OPERAND (init
, 0));
1846 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
1847 sizes of the object and its template. Allocate the whole thing and
1848 fill in the parts that are known. */
1849 else if (TYPE_FAT_OR_THIN_POINTER_P (result_type
))
1852 = build_unc_object_type_from_ptr (result_type
, type
,
1853 get_identifier ("ALLOC"));
1854 tree template_type
= TREE_TYPE (TYPE_FIELDS (storage_type
));
1855 tree storage_ptr_type
= build_pointer_type (storage_type
);
1857 tree template_cons
= NULL_TREE
;
1859 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type
),
1862 /* If the size overflows, pass -1 so the allocator will raise
1864 if (TREE_CODE (size
) == INTEGER_CST
&& TREE_OVERFLOW (size
))
1865 size
= ssize_int (-1);
1867 storage
= build_call_alloc_dealloc (NULL_TREE
, size
,
1868 TYPE_ALIGN (storage_type
),
1869 gnat_proc
, gnat_pool
, gnat_node
);
1870 storage
= convert (storage_ptr_type
, protect_multiple_eval (storage
));
1872 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_IS_PADDING_P (type
))
1874 type
= TREE_TYPE (TYPE_FIELDS (type
));
1877 init
= convert (type
, init
);
1880 /* If there is an initializing expression, make a constructor for
1881 the entire object including the bounds and copy it into the
1882 object. If there is no initializing expression, just set the
1886 template_cons
= tree_cons (TREE_CHAIN (TYPE_FIELDS (storage_type
)),
1888 template_cons
= tree_cons (TYPE_FIELDS (storage_type
),
1889 build_template (template_type
, type
,
1895 build2 (COMPOUND_EXPR
, storage_ptr_type
,
1897 (MODIFY_EXPR
, storage_type
,
1898 build_unary_op (INDIRECT_REF
, NULL_TREE
,
1899 convert (storage_ptr_type
, storage
)),
1900 gnat_build_constructor (storage_type
, template_cons
)),
1901 convert (storage_ptr_type
, storage
)));
1905 (COMPOUND_EXPR
, result_type
,
1907 (MODIFY_EXPR
, template_type
,
1909 (build_unary_op (INDIRECT_REF
, NULL_TREE
,
1910 convert (storage_ptr_type
, storage
)),
1911 NULL_TREE
, TYPE_FIELDS (storage_type
), 0),
1912 build_template (template_type
, type
, NULL_TREE
)),
1913 convert (result_type
, convert (storage_ptr_type
, storage
)));
1916 /* If we have an initializing expression, see if its size is simpler
1917 than the size from the type. */
1918 if (!ignore_init_type
&& init
&& TYPE_SIZE_UNIT (TREE_TYPE (init
))
1919 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init
))) == INTEGER_CST
1920 || CONTAINS_PLACEHOLDER_P (size
)))
1921 size
= TYPE_SIZE_UNIT (TREE_TYPE (init
));
1923 /* If the size is still self-referential, reference the initializing
1924 expression, if it is present. If not, this must have been a
1925 call to allocate a library-level object, in which case we use
1926 the maximum size. */
1927 if (CONTAINS_PLACEHOLDER_P (size
))
1929 if (!ignore_init_type
&& init
)
1930 size
= substitute_placeholder_in_expr (size
, init
);
1932 size
= max_size (size
, true);
1935 /* If the size overflows, pass -1 so the allocator will raise
1937 if (TREE_CODE (size
) == INTEGER_CST
&& TREE_OVERFLOW (size
))
1938 size
= ssize_int (-1);
1940 /* If this is a type whose alignment is larger than the
1941 biggest we support in normal alignment and this is in
1942 the default storage pool, make an "aligning type", allocate
1943 it, point to the field we need, and return that. */
1944 if (TYPE_ALIGN (type
) > BIGGEST_ALIGNMENT
1947 tree new_type
= make_aligning_type (type
, TYPE_ALIGN (type
), size
);
1949 result
= build_call_alloc_dealloc (NULL_TREE
, TYPE_SIZE_UNIT (new_type
),
1950 BIGGEST_ALIGNMENT
, Empty
,
1952 result
= save_expr (result
);
1953 result
= convert (build_pointer_type (new_type
), result
);
1954 result
= build_unary_op (INDIRECT_REF
, NULL_TREE
, result
);
1955 result
= build_component_ref (result
, NULL_TREE
,
1956 TYPE_FIELDS (new_type
), 0);
1957 result
= convert (result_type
,
1958 build_unary_op (ADDR_EXPR
, NULL_TREE
, result
));
1961 result
= convert (result_type
,
1962 build_call_alloc_dealloc (NULL_TREE
, size
,
1968 /* If we have an initial value, put the new address into a SAVE_EXPR, assign
1969 the value, and return the address. Do this with a COMPOUND_EXPR. */
1973 result
= save_expr (result
);
1975 = build2 (COMPOUND_EXPR
, TREE_TYPE (result
),
1977 (MODIFY_EXPR
, NULL_TREE
,
1978 build_unary_op (INDIRECT_REF
,
1979 TREE_TYPE (TREE_TYPE (result
)), result
),
1984 return convert (result_type
, result
);
1987 /* Fill in a VMS descriptor for EXPR and return a constructor for it.
1988 GNAT_FORMAL is how we find the descriptor record. */
1991 fill_vms_descriptor (tree expr
, Entity_Id gnat_formal
)
1993 tree record_type
= TREE_TYPE (TREE_TYPE (get_gnu_tree (gnat_formal
)));
1995 tree const_list
= NULL_TREE
;
1997 expr
= maybe_unconstrained_array (expr
);
1998 gnat_mark_addressable (expr
);
2000 for (field
= TYPE_FIELDS (record_type
); field
; field
= TREE_CHAIN (field
))
2003 convert (TREE_TYPE (field
),
2004 SUBSTITUTE_PLACEHOLDER_IN_EXPR
2005 (DECL_INITIAL (field
), expr
)),
2008 return gnat_build_constructor (record_type
, nreverse (const_list
));
2011 /* Indicate that we need to make the address of EXPR_NODE and it therefore
2012 should not be allocated in a register. Returns true if successful. */
2015 gnat_mark_addressable (tree expr_node
)
2018 switch (TREE_CODE (expr_node
))
2023 case ARRAY_RANGE_REF
:
2026 case VIEW_CONVERT_EXPR
:
2028 case NON_LVALUE_EXPR
:
2030 expr_node
= TREE_OPERAND (expr_node
, 0);
2034 TREE_ADDRESSABLE (expr_node
) = 1;
2040 TREE_ADDRESSABLE (expr_node
) = 1;
2044 TREE_ADDRESSABLE (expr_node
) = 1;
2048 return (DECL_CONST_CORRESPONDING_VAR (expr_node
)
2049 && (gnat_mark_addressable
2050 (DECL_CONST_CORRESPONDING_VAR (expr_node
))));