1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
57 #include "diagnostic-core.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
64 #include "tree-iterator.h"
67 #include "langhooks.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
75 #include "tree-into-ssa.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
80 #include "tree-ssanames.h"
83 /* Nonzero if we are folding constants inside an initializer; zero
85 int folding_initializer
= 0;
87 /* The following constants represent a bit based encoding of GCC's
88 comparison operators. This encoding simplifies transformations
89 on relational comparison operators, such as AND and OR. */
90 enum comparison_code
{
109 static bool negate_expr_p (tree
);
110 static tree
negate_expr (tree
);
111 static tree
split_tree (location_t
, tree
, tree
, enum tree_code
,
112 tree
*, tree
*, tree
*, int);
113 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
114 static enum comparison_code
comparison_to_compcode (enum tree_code
);
115 static enum tree_code
compcode_to_comparison (enum comparison_code
);
116 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
117 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
118 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
119 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
121 static int simple_operand_p (const_tree
);
122 static bool simple_operand_p_2 (tree
);
123 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
124 static tree
range_predecessor (tree
);
125 static tree
range_successor (tree
);
126 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
127 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
128 static tree
unextend (tree
, int, int, tree
);
129 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
130 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
131 static tree
fold_binary_op_with_conditional_arg (location_t
,
132 enum tree_code
, tree
,
135 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
136 static tree
fold_negate_const (tree
, tree
);
137 static tree
fold_not_const (const_tree
, tree
);
138 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
139 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
140 static tree
fold_view_convert_expr (tree
, tree
);
141 static bool vec_cst_ctor_to_array (tree
, tree
*);
142 static tree
fold_negate_expr (location_t
, tree
);
145 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
146 Otherwise, return LOC. */
149 expr_location_or (tree t
, location_t loc
)
151 location_t tloc
= EXPR_LOCATION (t
);
152 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
155 /* Similar to protected_set_expr_location, but never modify x in place,
156 if location can and needs to be set, unshare it. */
159 protected_set_expr_location_unshare (tree x
, location_t loc
)
161 if (CAN_HAVE_LOCATION_P (x
)
162 && EXPR_LOCATION (x
) != loc
163 && !(TREE_CODE (x
) == SAVE_EXPR
164 || TREE_CODE (x
) == TARGET_EXPR
165 || TREE_CODE (x
) == BIND_EXPR
))
168 SET_EXPR_LOCATION (x
, loc
);
173 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
174 division and returns the quotient. Otherwise returns
178 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
182 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
184 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
189 /* This is nonzero if we should defer warnings about undefined
190 overflow. This facility exists because these warnings are a
191 special case. The code to estimate loop iterations does not want
192 to issue any warnings, since it works with expressions which do not
193 occur in user code. Various bits of cleanup code call fold(), but
194 only use the result if it has certain characteristics (e.g., is a
195 constant); that code only wants to issue a warning if the result is
198 static int fold_deferring_overflow_warnings
;
200 /* If a warning about undefined overflow is deferred, this is the
201 warning. Note that this may cause us to turn two warnings into
202 one, but that is fine since it is sufficient to only give one
203 warning per expression. */
205 static const char* fold_deferred_overflow_warning
;
207 /* If a warning about undefined overflow is deferred, this is the
208 level at which the warning should be emitted. */
210 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
212 /* Start deferring overflow warnings. We could use a stack here to
213 permit nested calls, but at present it is not necessary. */
216 fold_defer_overflow_warnings (void)
218 ++fold_deferring_overflow_warnings
;
221 /* Stop deferring overflow warnings. If there is a pending warning,
222 and ISSUE is true, then issue the warning if appropriate. STMT is
223 the statement with which the warning should be associated (used for
224 location information); STMT may be NULL. CODE is the level of the
225 warning--a warn_strict_overflow_code value. This function will use
226 the smaller of CODE and the deferred code when deciding whether to
227 issue the warning. CODE may be zero to mean to always use the
231 fold_undefer_overflow_warnings (bool issue
, const gimple
*stmt
, int code
)
236 gcc_assert (fold_deferring_overflow_warnings
> 0);
237 --fold_deferring_overflow_warnings
;
238 if (fold_deferring_overflow_warnings
> 0)
240 if (fold_deferred_overflow_warning
!= NULL
242 && code
< (int) fold_deferred_overflow_code
)
243 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
247 warnmsg
= fold_deferred_overflow_warning
;
248 fold_deferred_overflow_warning
= NULL
;
250 if (!issue
|| warnmsg
== NULL
)
253 if (gimple_no_warning_p (stmt
))
256 /* Use the smallest code level when deciding to issue the
258 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
259 code
= fold_deferred_overflow_code
;
261 if (!issue_strict_overflow_warning (code
))
265 locus
= input_location
;
267 locus
= gimple_location (stmt
);
268 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
271 /* Stop deferring overflow warnings, ignoring any deferred
275 fold_undefer_and_ignore_overflow_warnings (void)
277 fold_undefer_overflow_warnings (false, NULL
, 0);
280 /* Whether we are deferring overflow warnings. */
283 fold_deferring_overflow_warnings_p (void)
285 return fold_deferring_overflow_warnings
> 0;
288 /* This is called when we fold something based on the fact that signed
289 overflow is undefined. */
292 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
294 if (fold_deferring_overflow_warnings
> 0)
296 if (fold_deferred_overflow_warning
== NULL
297 || wc
< fold_deferred_overflow_code
)
299 fold_deferred_overflow_warning
= gmsgid
;
300 fold_deferred_overflow_code
= wc
;
303 else if (issue_strict_overflow_warning (wc
))
304 warning (OPT_Wstrict_overflow
, gmsgid
);
307 /* Return true if the built-in mathematical function specified by CODE
308 is odd, i.e. -f(x) == f(-x). */
311 negate_mathfn_p (combined_fn fn
)
344 return !flag_rounding_math
;
352 /* Check whether we may negate an integer constant T without causing
356 may_negate_without_overflow_p (const_tree t
)
360 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
362 type
= TREE_TYPE (t
);
363 if (TYPE_UNSIGNED (type
))
366 return !wi::only_sign_bit_p (t
);
369 /* Determine whether an expression T can be cheaply negated using
370 the function negate_expr without introducing undefined overflow. */
373 negate_expr_p (tree t
)
380 type
= TREE_TYPE (t
);
383 switch (TREE_CODE (t
))
386 if (INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_WRAPS (type
))
389 /* Check that -CST will not overflow type. */
390 return may_negate_without_overflow_p (t
);
392 return (INTEGRAL_TYPE_P (type
)
393 && TYPE_OVERFLOW_WRAPS (type
));
399 return !TYPE_OVERFLOW_SANITIZED (type
);
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
407 return negate_expr_p (TREE_REALPART (t
))
408 && negate_expr_p (TREE_IMAGPART (t
));
412 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
415 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
417 for (i
= 0; i
< count
; i
++)
418 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
425 return negate_expr_p (TREE_OPERAND (t
, 0))
426 && negate_expr_p (TREE_OPERAND (t
, 1));
429 return negate_expr_p (TREE_OPERAND (t
, 0));
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
433 || HONOR_SIGNED_ZEROS (element_mode (type
))
434 || (INTEGRAL_TYPE_P (type
)
435 && ! TYPE_OVERFLOW_WRAPS (type
)))
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t
, 1)))
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t
, 0));
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
446 && !HONOR_SIGNED_ZEROS (element_mode (type
))
447 && (! INTEGRAL_TYPE_P (type
)
448 || TYPE_OVERFLOW_WRAPS (type
));
451 if (TYPE_UNSIGNED (type
))
453 /* INT_MIN/n * n doesn't overflow while negating one operand it does
454 if n is a (negative) power of two. */
455 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
456 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
457 && ! ((TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
458 && wi::popcount (wi::abs (TREE_OPERAND (t
, 0))) != 1)
459 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
460 && wi::popcount (wi::abs (TREE_OPERAND (t
, 1))) != 1)))
466 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t
))))
467 return negate_expr_p (TREE_OPERAND (t
, 1))
468 || negate_expr_p (TREE_OPERAND (t
, 0));
474 if (TYPE_UNSIGNED (type
))
476 if (negate_expr_p (TREE_OPERAND (t
, 0)))
478 /* In general we can't negate B in A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. */
481 if (! INTEGRAL_TYPE_P (TREE_TYPE (t
))
482 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
483 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
484 && ! integer_onep (TREE_OPERAND (t
, 1))))
485 return negate_expr_p (TREE_OPERAND (t
, 1));
489 /* Negate -((double)float) as (double)(-float). */
490 if (TREE_CODE (type
) == REAL_TYPE
)
492 tree tem
= strip_float_extensions (t
);
494 return negate_expr_p (tem
);
499 /* Negate -f(x) as f(-x). */
500 if (negate_mathfn_p (get_call_combined_fn (t
)))
501 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
505 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
506 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
508 tree op1
= TREE_OPERAND (t
, 1);
509 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
520 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
521 simplification is possible.
522 If negate_expr_p would return true for T, NULL_TREE will never be
526 fold_negate_expr_1 (location_t loc
, tree t
)
528 tree type
= TREE_TYPE (t
);
531 switch (TREE_CODE (t
))
533 /* Convert - (~A) to A + 1. */
535 if (INTEGRAL_TYPE_P (type
))
536 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
537 build_one_cst (type
));
541 tem
= fold_negate_const (t
, type
);
542 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
543 || (ANY_INTEGRAL_TYPE_P (type
)
544 && !TYPE_OVERFLOW_TRAPS (type
)
545 && TYPE_OVERFLOW_WRAPS (type
))
546 || (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
551 tem
= fold_negate_const (t
, type
);
555 tem
= fold_negate_const (t
, type
);
560 tree rpart
= fold_negate_expr (loc
, TREE_REALPART (t
));
561 tree ipart
= fold_negate_expr (loc
, TREE_IMAGPART (t
));
563 return build_complex (type
, rpart
, ipart
);
569 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
570 tree
*elts
= XALLOCAVEC (tree
, count
);
572 for (i
= 0; i
< count
; i
++)
574 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
575 if (elts
[i
] == NULL_TREE
)
579 return build_vector (type
, elts
);
583 if (negate_expr_p (t
))
584 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
585 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
586 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
590 if (negate_expr_p (t
))
591 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
592 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
596 if (!TYPE_OVERFLOW_SANITIZED (type
))
597 return TREE_OPERAND (t
, 0);
601 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
602 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
604 /* -(A + B) -> (-B) - A. */
605 if (negate_expr_p (TREE_OPERAND (t
, 1)))
607 tem
= negate_expr (TREE_OPERAND (t
, 1));
608 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
609 tem
, TREE_OPERAND (t
, 0));
612 /* -(A + B) -> (-A) - B. */
613 if (negate_expr_p (TREE_OPERAND (t
, 0)))
615 tem
= negate_expr (TREE_OPERAND (t
, 0));
616 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
617 tem
, TREE_OPERAND (t
, 1));
623 /* - (A - B) -> B - A */
624 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
))
625 && !HONOR_SIGNED_ZEROS (element_mode (type
)))
626 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
627 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
631 if (TYPE_UNSIGNED (type
))
637 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
)))
639 tem
= TREE_OPERAND (t
, 1);
640 if (negate_expr_p (tem
))
641 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
642 TREE_OPERAND (t
, 0), negate_expr (tem
));
643 tem
= TREE_OPERAND (t
, 0);
644 if (negate_expr_p (tem
))
645 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
646 negate_expr (tem
), TREE_OPERAND (t
, 1));
653 if (TYPE_UNSIGNED (type
))
655 if (negate_expr_p (TREE_OPERAND (t
, 0)))
656 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
657 negate_expr (TREE_OPERAND (t
, 0)),
658 TREE_OPERAND (t
, 1));
659 /* In general we can't negate B in A / B, because if A is INT_MIN and
660 B is 1, we may turn this into INT_MIN / -1 which is undefined
661 and actually traps on some architectures. */
662 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t
))
663 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t
))
664 || (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
665 && ! integer_onep (TREE_OPERAND (t
, 1))))
666 && negate_expr_p (TREE_OPERAND (t
, 1)))
667 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
669 negate_expr (TREE_OPERAND (t
, 1)));
673 /* Convert -((double)float) into (double)(-float). */
674 if (TREE_CODE (type
) == REAL_TYPE
)
676 tem
= strip_float_extensions (t
);
677 if (tem
!= t
&& negate_expr_p (tem
))
678 return fold_convert_loc (loc
, type
, negate_expr (tem
));
683 /* Negate -f(x) as f(-x). */
684 if (negate_mathfn_p (get_call_combined_fn (t
))
685 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
689 fndecl
= get_callee_fndecl (t
);
690 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
691 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
696 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
697 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
699 tree op1
= TREE_OPERAND (t
, 1);
700 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
702 tree ntype
= TYPE_UNSIGNED (type
)
703 ? signed_type_for (type
)
704 : unsigned_type_for (type
);
705 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
706 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
707 return fold_convert_loc (loc
, type
, temp
);
719 /* A wrapper for fold_negate_expr_1. */
722 fold_negate_expr (location_t loc
, tree t
)
724 tree type
= TREE_TYPE (t
);
726 tree tem
= fold_negate_expr_1 (loc
, t
);
727 if (tem
== NULL_TREE
)
729 return fold_convert_loc (loc
, type
, tem
);
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
745 loc
= EXPR_LOCATION (t
);
746 type
= TREE_TYPE (t
);
749 tem
= fold_negate_expr (loc
, t
);
751 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
752 return fold_convert_loc (loc
, type
, tem
);
755 /* Split a tree IN into a constant, literal and variable parts that could be
756 combined with CODE to make IN. "constant" means an expression with
757 TREE_CONSTANT but that isn't an actual constant. CODE must be a
758 commutative arithmetic operation. Store the constant part into *CONP,
759 the literal in *LITP and return the variable part. If a part isn't
760 present, set it to null. If the tree does not decompose in this way,
761 return the entire tree as the variable part and the other parts as null.
763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
764 case, we negate an operand that was subtracted. Except if it is a
765 literal for which we use *MINUS_LITP instead.
767 If NEGATE_P is true, we are negating all of IN, again except a literal
768 for which we use *MINUS_LITP instead. If a variable part is of pointer
769 type, it is negated after converting to TYPE. This prevents us from
770 generating illegal MINUS pointer expression. LOC is the location of
771 the converted variable part.
773 If IN is itself a literal or constant, return it as appropriate.
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
779 split_tree (location_t loc
, tree in
, tree type
, enum tree_code code
,
780 tree
*conp
, tree
*litp
, tree
*minus_litp
, int negate_p
)
788 /* Strip any conversions that don't change the machine mode or signedness. */
789 STRIP_SIGN_NOPS (in
);
791 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
792 || TREE_CODE (in
) == FIXED_CST
)
794 else if (TREE_CODE (in
) == code
795 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
796 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
797 /* We can associate addition and subtraction together (even
798 though the C standard doesn't say so) for integers because
799 the value is not affected. For reals, the value might be
800 affected, so we can't. */
801 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
802 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
804 tree op0
= TREE_OPERAND (in
, 0);
805 tree op1
= TREE_OPERAND (in
, 1);
806 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
807 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
809 /* First see if either of the operands is a literal, then a constant. */
810 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
811 || TREE_CODE (op0
) == FIXED_CST
)
812 *litp
= op0
, op0
= 0;
813 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
814 || TREE_CODE (op1
) == FIXED_CST
)
815 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
817 if (op0
!= 0 && TREE_CONSTANT (op0
))
818 *conp
= op0
, op0
= 0;
819 else if (op1
!= 0 && TREE_CONSTANT (op1
))
820 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
822 /* If we haven't dealt with either operand, this is not a case we can
823 decompose. Otherwise, VAR is either of the ones remaining, if any. */
824 if (op0
!= 0 && op1
!= 0)
829 var
= op1
, neg_var_p
= neg1_p
;
831 /* Now do any needed negations. */
833 *minus_litp
= *litp
, *litp
= 0;
835 *conp
= negate_expr (*conp
);
836 if (neg_var_p
&& var
)
838 /* Convert to TYPE before negating. */
839 var
= fold_convert_loc (loc
, type
, var
);
840 var
= negate_expr (var
);
843 else if (TREE_CONSTANT (in
))
845 else if (TREE_CODE (in
) == BIT_NOT_EXPR
846 && code
== PLUS_EXPR
)
848 /* -X - 1 is folded to ~X, undo that here. Do _not_ do this
849 when IN is constant. */
850 *minus_litp
= build_one_cst (TREE_TYPE (in
));
851 var
= negate_expr (TREE_OPERAND (in
, 0));
859 *minus_litp
= *litp
, *litp
= 0;
860 else if (*minus_litp
)
861 *litp
= *minus_litp
, *minus_litp
= 0;
862 *conp
= negate_expr (*conp
);
865 /* Convert to TYPE before negating. */
866 var
= fold_convert_loc (loc
, type
, var
);
867 var
= negate_expr (var
);
874 /* Re-associate trees split by the above function. T1 and T2 are
875 either expressions to associate or null. Return the new
876 expression, if any. LOC is the location of the new expression. If
877 we build an operation, do it in TYPE and with CODE. */
880 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
887 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
888 try to fold this since we will have infinite recursion. But do
889 deal with any NEGATE_EXPRs. */
890 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
891 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
893 if (code
== PLUS_EXPR
)
895 if (TREE_CODE (t1
) == NEGATE_EXPR
)
896 return build2_loc (loc
, MINUS_EXPR
, type
,
897 fold_convert_loc (loc
, type
, t2
),
898 fold_convert_loc (loc
, type
,
899 TREE_OPERAND (t1
, 0)));
900 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
901 return build2_loc (loc
, MINUS_EXPR
, type
,
902 fold_convert_loc (loc
, type
, t1
),
903 fold_convert_loc (loc
, type
,
904 TREE_OPERAND (t2
, 0)));
905 else if (integer_zerop (t2
))
906 return fold_convert_loc (loc
, type
, t1
);
908 else if (code
== MINUS_EXPR
)
910 if (integer_zerop (t2
))
911 return fold_convert_loc (loc
, type
, t1
);
914 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
915 fold_convert_loc (loc
, type
, t2
));
918 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
919 fold_convert_loc (loc
, type
, t2
));
922 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
923 for use in int_const_binop, size_binop and size_diffop. */
926 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
928 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
930 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
945 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
946 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
947 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
951 /* Combine two integer constants ARG1 and ARG2 under operation CODE
952 to produce a new constant. Return NULL_TREE if we don't know how
953 to evaluate CODE at compile-time. */
956 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
961 tree type
= TREE_TYPE (arg1
);
962 signop sign
= TYPE_SIGN (type
);
963 bool overflow
= false;
965 wide_int arg2
= wi::to_wide (parg2
, TYPE_PRECISION (type
));
970 res
= wi::bit_or (arg1
, arg2
);
974 res
= wi::bit_xor (arg1
, arg2
);
978 res
= wi::bit_and (arg1
, arg2
);
983 if (wi::neg_p (arg2
))
986 if (code
== RSHIFT_EXPR
)
992 if (code
== RSHIFT_EXPR
)
993 /* It's unclear from the C standard whether shifts can overflow.
994 The following code ignores overflow; perhaps a C standard
995 interpretation ruling is needed. */
996 res
= wi::rshift (arg1
, arg2
, sign
);
998 res
= wi::lshift (arg1
, arg2
);
1003 if (wi::neg_p (arg2
))
1006 if (code
== RROTATE_EXPR
)
1007 code
= LROTATE_EXPR
;
1009 code
= RROTATE_EXPR
;
1012 if (code
== RROTATE_EXPR
)
1013 res
= wi::rrotate (arg1
, arg2
);
1015 res
= wi::lrotate (arg1
, arg2
);
1019 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1023 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1027 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1030 case MULT_HIGHPART_EXPR
:
1031 res
= wi::mul_high (arg1
, arg2
, sign
);
1034 case TRUNC_DIV_EXPR
:
1035 case EXACT_DIV_EXPR
:
1038 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1041 case FLOOR_DIV_EXPR
:
1044 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1050 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1053 case ROUND_DIV_EXPR
:
1056 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1059 case TRUNC_MOD_EXPR
:
1062 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1065 case FLOOR_MOD_EXPR
:
1068 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1074 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1077 case ROUND_MOD_EXPR
:
1080 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1084 res
= wi::min (arg1
, arg2
, sign
);
1088 res
= wi::max (arg1
, arg2
, sign
);
1095 t
= force_fit_type (type
, res
, overflowable
,
1096 (((sign
== SIGNED
|| overflowable
== -1)
1098 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1104 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1106 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1109 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1110 constant. We assume ARG1 and ARG2 have the same data type, or at least
1111 are the same kind of constant and the same machine mode. Return zero if
1112 combining the constants is not allowed in the current operating mode. */
1115 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1117 /* Sanity check for the recursive cases. */
1124 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg2
) == INTEGER_CST
)
1126 if (code
== POINTER_PLUS_EXPR
)
1127 return int_const_binop (PLUS_EXPR
,
1128 arg1
, fold_convert (TREE_TYPE (arg1
), arg2
));
1130 return int_const_binop (code
, arg1
, arg2
);
1133 if (TREE_CODE (arg1
) == REAL_CST
&& TREE_CODE (arg2
) == REAL_CST
)
1138 REAL_VALUE_TYPE value
;
1139 REAL_VALUE_TYPE result
;
1143 /* The following codes are handled by real_arithmetic. */
1158 d1
= TREE_REAL_CST (arg1
);
1159 d2
= TREE_REAL_CST (arg2
);
1161 type
= TREE_TYPE (arg1
);
1162 mode
= TYPE_MODE (type
);
1164 /* Don't perform operation if we honor signaling NaNs and
1165 either operand is a signaling NaN. */
1166 if (HONOR_SNANS (mode
)
1167 && (REAL_VALUE_ISSIGNALING_NAN (d1
)
1168 || REAL_VALUE_ISSIGNALING_NAN (d2
)))
1171 /* Don't perform operation if it would raise a division
1172 by zero exception. */
1173 if (code
== RDIV_EXPR
1174 && real_equal (&d2
, &dconst0
)
1175 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1178 /* If either operand is a NaN, just return it. Otherwise, set up
1179 for floating-point trap; we return an overflow. */
1180 if (REAL_VALUE_ISNAN (d1
))
1182 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1185 t
= build_real (type
, d1
);
1188 else if (REAL_VALUE_ISNAN (d2
))
1190 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1193 t
= build_real (type
, d2
);
1197 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1198 real_convert (&result
, mode
, &value
);
1200 /* Don't constant fold this floating point operation if
1201 the result has overflowed and flag_trapping_math. */
1202 if (flag_trapping_math
1203 && MODE_HAS_INFINITIES (mode
)
1204 && REAL_VALUE_ISINF (result
)
1205 && !REAL_VALUE_ISINF (d1
)
1206 && !REAL_VALUE_ISINF (d2
))
1209 /* Don't constant fold this floating point operation if the
1210 result may dependent upon the run-time rounding mode and
1211 flag_rounding_math is set, or if GCC's software emulation
1212 is unable to accurately represent the result. */
1213 if ((flag_rounding_math
1214 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1215 && (inexact
|| !real_identical (&result
, &value
)))
1218 t
= build_real (type
, result
);
1220 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1224 if (TREE_CODE (arg1
) == FIXED_CST
)
1226 FIXED_VALUE_TYPE f1
;
1227 FIXED_VALUE_TYPE f2
;
1228 FIXED_VALUE_TYPE result
;
1233 /* The following codes are handled by fixed_arithmetic. */
1239 case TRUNC_DIV_EXPR
:
1240 if (TREE_CODE (arg2
) != FIXED_CST
)
1242 f2
= TREE_FIXED_CST (arg2
);
1248 if (TREE_CODE (arg2
) != INTEGER_CST
)
1251 f2
.data
.high
= w2
.elt (1);
1252 f2
.data
.low
= w2
.ulow ();
1261 f1
= TREE_FIXED_CST (arg1
);
1262 type
= TREE_TYPE (arg1
);
1263 sat_p
= TYPE_SATURATING (type
);
1264 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1265 t
= build_fixed (type
, result
);
1266 /* Propagate overflow flags. */
1267 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1268 TREE_OVERFLOW (t
) = 1;
1272 if (TREE_CODE (arg1
) == COMPLEX_CST
&& TREE_CODE (arg2
) == COMPLEX_CST
)
1274 tree type
= TREE_TYPE (arg1
);
1275 tree r1
= TREE_REALPART (arg1
);
1276 tree i1
= TREE_IMAGPART (arg1
);
1277 tree r2
= TREE_REALPART (arg2
);
1278 tree i2
= TREE_IMAGPART (arg2
);
1285 real
= const_binop (code
, r1
, r2
);
1286 imag
= const_binop (code
, i1
, i2
);
1290 if (COMPLEX_FLOAT_TYPE_P (type
))
1291 return do_mpc_arg2 (arg1
, arg2
, type
,
1292 /* do_nonfinite= */ folding_initializer
,
1295 real
= const_binop (MINUS_EXPR
,
1296 const_binop (MULT_EXPR
, r1
, r2
),
1297 const_binop (MULT_EXPR
, i1
, i2
));
1298 imag
= const_binop (PLUS_EXPR
,
1299 const_binop (MULT_EXPR
, r1
, i2
),
1300 const_binop (MULT_EXPR
, i1
, r2
));
1304 if (COMPLEX_FLOAT_TYPE_P (type
))
1305 return do_mpc_arg2 (arg1
, arg2
, type
,
1306 /* do_nonfinite= */ folding_initializer
,
1309 case TRUNC_DIV_EXPR
:
1311 case FLOOR_DIV_EXPR
:
1312 case ROUND_DIV_EXPR
:
1313 if (flag_complex_method
== 0)
1315 /* Keep this algorithm in sync with
1316 tree-complex.c:expand_complex_div_straight().
1318 Expand complex division to scalars, straightforward algorithm.
1319 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1323 = const_binop (PLUS_EXPR
,
1324 const_binop (MULT_EXPR
, r2
, r2
),
1325 const_binop (MULT_EXPR
, i2
, i2
));
1327 = const_binop (PLUS_EXPR
,
1328 const_binop (MULT_EXPR
, r1
, r2
),
1329 const_binop (MULT_EXPR
, i1
, i2
));
1331 = const_binop (MINUS_EXPR
,
1332 const_binop (MULT_EXPR
, i1
, r2
),
1333 const_binop (MULT_EXPR
, r1
, i2
));
1335 real
= const_binop (code
, t1
, magsquared
);
1336 imag
= const_binop (code
, t2
, magsquared
);
1340 /* Keep this algorithm in sync with
1341 tree-complex.c:expand_complex_div_wide().
1343 Expand complex division to scalars, modified algorithm to minimize
1344 overflow with wide input ranges. */
1345 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1346 fold_abs_const (r2
, TREE_TYPE (type
)),
1347 fold_abs_const (i2
, TREE_TYPE (type
)));
1349 if (integer_nonzerop (compare
))
1351 /* In the TRUE branch, we compute
1353 div = (br * ratio) + bi;
1354 tr = (ar * ratio) + ai;
1355 ti = (ai * ratio) - ar;
1358 tree ratio
= const_binop (code
, r2
, i2
);
1359 tree div
= const_binop (PLUS_EXPR
, i2
,
1360 const_binop (MULT_EXPR
, r2
, ratio
));
1361 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1362 real
= const_binop (PLUS_EXPR
, real
, i1
);
1363 real
= const_binop (code
, real
, div
);
1365 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1366 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1367 imag
= const_binop (code
, imag
, div
);
1371 /* In the FALSE branch, we compute
1373 divisor = (d * ratio) + c;
1374 tr = (b * ratio) + a;
1375 ti = b - (a * ratio);
1378 tree ratio
= const_binop (code
, i2
, r2
);
1379 tree div
= const_binop (PLUS_EXPR
, r2
,
1380 const_binop (MULT_EXPR
, i2
, ratio
));
1382 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1383 real
= const_binop (PLUS_EXPR
, real
, r1
);
1384 real
= const_binop (code
, real
, div
);
1386 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1387 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1388 imag
= const_binop (code
, imag
, div
);
1398 return build_complex (type
, real
, imag
);
1401 if (TREE_CODE (arg1
) == VECTOR_CST
1402 && TREE_CODE (arg2
) == VECTOR_CST
)
1404 tree type
= TREE_TYPE (arg1
);
1405 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1406 tree
*elts
= XALLOCAVEC (tree
, count
);
1408 for (i
= 0; i
< count
; i
++)
1410 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1411 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1413 elts
[i
] = const_binop (code
, elem1
, elem2
);
1415 /* It is possible that const_binop cannot handle the given
1416 code and return NULL_TREE */
1417 if (elts
[i
] == NULL_TREE
)
1421 return build_vector (type
, elts
);
1424 /* Shifts allow a scalar offset for a vector. */
1425 if (TREE_CODE (arg1
) == VECTOR_CST
1426 && TREE_CODE (arg2
) == INTEGER_CST
)
1428 tree type
= TREE_TYPE (arg1
);
1429 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1430 tree
*elts
= XALLOCAVEC (tree
, count
);
1432 for (i
= 0; i
< count
; i
++)
1434 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1436 elts
[i
] = const_binop (code
, elem1
, arg2
);
1438 /* It is possible that const_binop cannot handle the given
1439 code and return NULL_TREE. */
1440 if (elts
[i
] == NULL_TREE
)
1444 return build_vector (type
, elts
);
1449 /* Overload that adds a TYPE parameter to be able to dispatch
1450 to fold_relational_const. */
1453 const_binop (enum tree_code code
, tree type
, tree arg1
, tree arg2
)
1455 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
1456 return fold_relational_const (code
, type
, arg1
, arg2
);
1458 /* ??? Until we make the const_binop worker take the type of the
1459 result as argument put those cases that need it here. */
1463 if ((TREE_CODE (arg1
) == REAL_CST
1464 && TREE_CODE (arg2
) == REAL_CST
)
1465 || (TREE_CODE (arg1
) == INTEGER_CST
1466 && TREE_CODE (arg2
) == INTEGER_CST
))
1467 return build_complex (type
, arg1
, arg2
);
1470 case VEC_PACK_TRUNC_EXPR
:
1471 case VEC_PACK_FIX_TRUNC_EXPR
:
1473 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1476 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2
1477 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
/ 2);
1478 if (TREE_CODE (arg1
) != VECTOR_CST
1479 || TREE_CODE (arg2
) != VECTOR_CST
)
1482 elts
= XALLOCAVEC (tree
, nelts
);
1483 if (!vec_cst_ctor_to_array (arg1
, elts
)
1484 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
/ 2))
1487 for (i
= 0; i
< nelts
; i
++)
1489 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
1490 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
1491 TREE_TYPE (type
), elts
[i
]);
1492 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1496 return build_vector (type
, elts
);
1499 case VEC_WIDEN_MULT_LO_EXPR
:
1500 case VEC_WIDEN_MULT_HI_EXPR
:
1501 case VEC_WIDEN_MULT_EVEN_EXPR
:
1502 case VEC_WIDEN_MULT_ODD_EXPR
:
1504 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
1505 unsigned int out
, ofs
, scale
;
1508 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2
1509 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)) == nelts
* 2);
1510 if (TREE_CODE (arg1
) != VECTOR_CST
|| TREE_CODE (arg2
) != VECTOR_CST
)
1513 elts
= XALLOCAVEC (tree
, nelts
* 4);
1514 if (!vec_cst_ctor_to_array (arg1
, elts
)
1515 || !vec_cst_ctor_to_array (arg2
, elts
+ nelts
* 2))
1518 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
1519 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
1520 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
1521 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
1522 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
1524 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1527 for (out
= 0; out
< nelts
; out
++)
1529 unsigned int in1
= (out
<< scale
) + ofs
;
1530 unsigned int in2
= in1
+ nelts
* 2;
1533 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
1534 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
1536 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
1538 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
1539 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
1543 return build_vector (type
, elts
);
1549 if (TREE_CODE_CLASS (code
) != tcc_binary
)
1552 /* Make sure type and arg0 have the same saturating flag. */
1553 gcc_checking_assert (TYPE_SATURATING (type
)
1554 == TYPE_SATURATING (TREE_TYPE (arg1
)));
1556 return const_binop (code
, arg1
, arg2
);
1559 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1560 Return zero if computing the constants is not possible. */
1563 const_unop (enum tree_code code
, tree type
, tree arg0
)
1565 /* Don't perform the operation, other than NEGATE and ABS, if
1566 flag_signaling_nans is on and the operand is a signaling NaN. */
1567 if (TREE_CODE (arg0
) == REAL_CST
1568 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
1569 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0
))
1570 && code
!= NEGATE_EXPR
1571 && code
!= ABS_EXPR
)
1578 case FIX_TRUNC_EXPR
:
1579 case FIXED_CONVERT_EXPR
:
1580 return fold_convert_const (code
, type
, arg0
);
1582 case ADDR_SPACE_CONVERT_EXPR
:
1583 /* If the source address is 0, and the source address space
1584 cannot have a valid object at 0, fold to dest type null. */
1585 if (integer_zerop (arg0
)
1586 && !(targetm
.addr_space
.zero_address_valid
1587 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
))))))
1588 return fold_convert_const (code
, type
, arg0
);
1591 case VIEW_CONVERT_EXPR
:
1592 return fold_view_convert_expr (type
, arg0
);
1596 /* Can't call fold_negate_const directly here as that doesn't
1597 handle all cases and we might not be able to negate some
1599 tree tem
= fold_negate_expr (UNKNOWN_LOCATION
, arg0
);
1600 if (tem
&& CONSTANT_CLASS_P (tem
))
1606 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
1607 return fold_abs_const (arg0
, type
);
1611 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1613 tree ipart
= fold_negate_const (TREE_IMAGPART (arg0
),
1615 return build_complex (type
, TREE_REALPART (arg0
), ipart
);
1620 if (TREE_CODE (arg0
) == INTEGER_CST
)
1621 return fold_not_const (arg0
, type
);
1622 /* Perform BIT_NOT_EXPR on each element individually. */
1623 else if (TREE_CODE (arg0
) == VECTOR_CST
)
1627 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
1629 elements
= XALLOCAVEC (tree
, count
);
1630 for (i
= 0; i
< count
; i
++)
1632 elem
= VECTOR_CST_ELT (arg0
, i
);
1633 elem
= const_unop (BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
1634 if (elem
== NULL_TREE
)
1639 return build_vector (type
, elements
);
1643 case TRUTH_NOT_EXPR
:
1644 if (TREE_CODE (arg0
) == INTEGER_CST
)
1645 return constant_boolean_node (integer_zerop (arg0
), type
);
1649 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1650 return fold_convert (type
, TREE_REALPART (arg0
));
1654 if (TREE_CODE (arg0
) == COMPLEX_CST
)
1655 return fold_convert (type
, TREE_IMAGPART (arg0
));
1658 case VEC_UNPACK_LO_EXPR
:
1659 case VEC_UNPACK_HI_EXPR
:
1660 case VEC_UNPACK_FLOAT_LO_EXPR
:
1661 case VEC_UNPACK_FLOAT_HI_EXPR
:
1663 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
1665 enum tree_code subcode
;
1667 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
1668 if (TREE_CODE (arg0
) != VECTOR_CST
)
1671 elts
= XALLOCAVEC (tree
, nelts
* 2);
1672 if (!vec_cst_ctor_to_array (arg0
, elts
))
1675 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
1676 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
1679 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
1682 subcode
= FLOAT_EXPR
;
1684 for (i
= 0; i
< nelts
; i
++)
1686 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
1687 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
1691 return build_vector (type
, elts
);
1694 case REDUC_MIN_EXPR
:
1695 case REDUC_MAX_EXPR
:
1696 case REDUC_PLUS_EXPR
:
1698 unsigned int nelts
, i
;
1700 enum tree_code subcode
;
1702 if (TREE_CODE (arg0
) != VECTOR_CST
)
1704 nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
));
1706 elts
= XALLOCAVEC (tree
, nelts
);
1707 if (!vec_cst_ctor_to_array (arg0
, elts
))
1712 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
1713 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
1714 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
1715 default: gcc_unreachable ();
1718 for (i
= 1; i
< nelts
; i
++)
1720 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
1721 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
1735 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1736 indicates which particular sizetype to create. */
1739 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1741 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1744 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1745 is a tree code. The type of the result is taken from the operands.
1746 Both must be equivalent integer types, ala int_binop_types_match_p.
1747 If the operands are constant, so is the result. */
1750 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1752 tree type
= TREE_TYPE (arg0
);
1754 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1755 return error_mark_node
;
1757 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1760 /* Handle the special case of two integer constants faster. */
1761 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1763 /* And some specific cases even faster than that. */
1764 if (code
== PLUS_EXPR
)
1766 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1768 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1771 else if (code
== MINUS_EXPR
)
1773 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1776 else if (code
== MULT_EXPR
)
1778 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1782 /* Handle general case of two integer constants. For sizetype
1783 constant calculations we always want to know about overflow,
1784 even in the unsigned case. */
1785 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1788 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1791 /* Given two values, either both of sizetype or both of bitsizetype,
1792 compute the difference between the two values. Return the value
1793 in signed type corresponding to the type of the operands. */
1796 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1798 tree type
= TREE_TYPE (arg0
);
1801 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1804 /* If the type is already signed, just do the simple thing. */
1805 if (!TYPE_UNSIGNED (type
))
1806 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1808 if (type
== sizetype
)
1810 else if (type
== bitsizetype
)
1811 ctype
= sbitsizetype
;
1813 ctype
= signed_type_for (type
);
1815 /* If either operand is not a constant, do the conversions to the signed
1816 type and subtract. The hardware will do the right thing with any
1817 overflow in the subtraction. */
1818 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1819 return size_binop_loc (loc
, MINUS_EXPR
,
1820 fold_convert_loc (loc
, ctype
, arg0
),
1821 fold_convert_loc (loc
, ctype
, arg1
));
1823 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1824 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1825 overflow) and negate (which can't either). Special-case a result
1826 of zero while we're here. */
1827 if (tree_int_cst_equal (arg0
, arg1
))
1828 return build_int_cst (ctype
, 0);
1829 else if (tree_int_cst_lt (arg1
, arg0
))
1830 return fold_convert_loc (loc
, ctype
,
1831 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1833 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1834 fold_convert_loc (loc
, ctype
,
1835 size_binop_loc (loc
,
1840 /* A subroutine of fold_convert_const handling conversions of an
1841 INTEGER_CST to another integer type. */
1844 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1846 /* Given an integer constant, make new constant with new type,
1847 appropriately sign-extended or truncated. Use widest_int
1848 so that any extension is done according ARG1's type. */
1849 return force_fit_type (type
, wi::to_widest (arg1
),
1850 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1851 TREE_OVERFLOW (arg1
));
1854 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1855 to an integer type. */
1858 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1860 bool overflow
= false;
1863 /* The following code implements the floating point to integer
1864 conversion rules required by the Java Language Specification,
1865 that IEEE NaNs are mapped to zero and values that overflow
1866 the target precision saturate, i.e. values greater than
1867 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1868 are mapped to INT_MIN. These semantics are allowed by the
1869 C and C++ standards that simply state that the behavior of
1870 FP-to-integer conversion is unspecified upon overflow. */
1874 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1878 case FIX_TRUNC_EXPR
:
1879 real_trunc (&r
, VOIDmode
, &x
);
1886 /* If R is NaN, return zero and show we have an overflow. */
1887 if (REAL_VALUE_ISNAN (r
))
1890 val
= wi::zero (TYPE_PRECISION (type
));
1893 /* See if R is less than the lower bound or greater than the
1898 tree lt
= TYPE_MIN_VALUE (type
);
1899 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1900 if (real_less (&r
, &l
))
1909 tree ut
= TYPE_MAX_VALUE (type
);
1912 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1913 if (real_less (&u
, &r
))
1922 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1924 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1928 /* A subroutine of fold_convert_const handling conversions of a
1929 FIXED_CST to an integer type. */
1932 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1935 double_int temp
, temp_trunc
;
1938 /* Right shift FIXED_CST to temp by fbit. */
1939 temp
= TREE_FIXED_CST (arg1
).data
;
1940 mode
= TREE_FIXED_CST (arg1
).mode
;
1941 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1943 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1944 HOST_BITS_PER_DOUBLE_INT
,
1945 SIGNED_FIXED_POINT_MODE_P (mode
));
1947 /* Left shift temp to temp_trunc by fbit. */
1948 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1949 HOST_BITS_PER_DOUBLE_INT
,
1950 SIGNED_FIXED_POINT_MODE_P (mode
));
1954 temp
= double_int_zero
;
1955 temp_trunc
= double_int_zero
;
1958 /* If FIXED_CST is negative, we need to round the value toward 0.
1959 By checking if the fractional bits are not zero to add 1 to temp. */
1960 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1961 && temp_trunc
.is_negative ()
1962 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1963 temp
+= double_int_one
;
1965 /* Given a fixed-point constant, make new constant with new type,
1966 appropriately sign-extended or truncated. */
1967 t
= force_fit_type (type
, temp
, -1,
1968 (temp
.is_negative ()
1969 && (TYPE_UNSIGNED (type
)
1970 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1971 | TREE_OVERFLOW (arg1
));
1976 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1977 to another floating point type. */
1980 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1982 REAL_VALUE_TYPE value
;
1985 /* Don't perform the operation if flag_signaling_nans is on
1986 and the operand is a signaling NaN. */
1987 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
1988 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1
)))
1991 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1992 t
= build_real (type
, value
);
1994 /* If converting an infinity or NAN to a representation that doesn't
1995 have one, set the overflow bit so that we can produce some kind of
1996 error message at the appropriate point if necessary. It's not the
1997 most user-friendly message, but it's better than nothing. */
1998 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1999 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
2000 TREE_OVERFLOW (t
) = 1;
2001 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
2002 && !MODE_HAS_NANS (TYPE_MODE (type
)))
2003 TREE_OVERFLOW (t
) = 1;
2004 /* Regular overflow, conversion produced an infinity in a mode that
2005 can't represent them. */
2006 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
2007 && REAL_VALUE_ISINF (value
)
2008 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
2009 TREE_OVERFLOW (t
) = 1;
2011 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2015 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2016 to a floating point type. */
2019 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
2021 REAL_VALUE_TYPE value
;
2024 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
2025 t
= build_real (type
, value
);
2027 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
2031 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2032 to another fixed-point type. */
2035 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
2037 FIXED_VALUE_TYPE value
;
2041 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
2042 TYPE_SATURATING (type
));
2043 t
= build_fixed (type
, value
);
2045 /* Propagate overflow flags. */
2046 if (overflow_p
| TREE_OVERFLOW (arg1
))
2047 TREE_OVERFLOW (t
) = 1;
2051 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2052 to a fixed-point type. */
2055 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
2057 FIXED_VALUE_TYPE value
;
2062 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
2064 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
2065 if (TREE_INT_CST_NUNITS (arg1
) == 1)
2066 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? HOST_WIDE_INT_M1
: 0;
2068 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
2070 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
2071 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
2072 TYPE_SATURATING (type
));
2073 t
= build_fixed (type
, value
);
2075 /* Propagate overflow flags. */
2076 if (overflow_p
| TREE_OVERFLOW (arg1
))
2077 TREE_OVERFLOW (t
) = 1;
2081 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2082 to a fixed-point type. */
2085 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
2087 FIXED_VALUE_TYPE value
;
2091 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
2092 &TREE_REAL_CST (arg1
),
2093 TYPE_SATURATING (type
));
2094 t
= build_fixed (type
, value
);
2096 /* Propagate overflow flags. */
2097 if (overflow_p
| TREE_OVERFLOW (arg1
))
2098 TREE_OVERFLOW (t
) = 1;
2102 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2103 type TYPE. If no simplification can be done return NULL_TREE. */
2106 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
2108 if (TREE_TYPE (arg1
) == type
)
2111 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
2112 || TREE_CODE (type
) == OFFSET_TYPE
)
2114 if (TREE_CODE (arg1
) == INTEGER_CST
)
2115 return fold_convert_const_int_from_int (type
, arg1
);
2116 else if (TREE_CODE (arg1
) == REAL_CST
)
2117 return fold_convert_const_int_from_real (code
, type
, arg1
);
2118 else if (TREE_CODE (arg1
) == FIXED_CST
)
2119 return fold_convert_const_int_from_fixed (type
, arg1
);
2121 else if (TREE_CODE (type
) == REAL_TYPE
)
2123 if (TREE_CODE (arg1
) == INTEGER_CST
)
2124 return build_real_from_int_cst (type
, arg1
);
2125 else if (TREE_CODE (arg1
) == REAL_CST
)
2126 return fold_convert_const_real_from_real (type
, arg1
);
2127 else if (TREE_CODE (arg1
) == FIXED_CST
)
2128 return fold_convert_const_real_from_fixed (type
, arg1
);
2130 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
2132 if (TREE_CODE (arg1
) == FIXED_CST
)
2133 return fold_convert_const_fixed_from_fixed (type
, arg1
);
2134 else if (TREE_CODE (arg1
) == INTEGER_CST
)
2135 return fold_convert_const_fixed_from_int (type
, arg1
);
2136 else if (TREE_CODE (arg1
) == REAL_CST
)
2137 return fold_convert_const_fixed_from_real (type
, arg1
);
2139 else if (TREE_CODE (type
) == VECTOR_TYPE
)
2141 if (TREE_CODE (arg1
) == VECTOR_CST
2142 && TYPE_VECTOR_SUBPARTS (type
) == VECTOR_CST_NELTS (arg1
))
2144 int len
= TYPE_VECTOR_SUBPARTS (type
);
2145 tree elttype
= TREE_TYPE (type
);
2146 tree
*v
= XALLOCAVEC (tree
, len
);
2147 for (int i
= 0; i
< len
; ++i
)
2149 tree elt
= VECTOR_CST_ELT (arg1
, i
);
2150 tree cvt
= fold_convert_const (code
, elttype
, elt
);
2151 if (cvt
== NULL_TREE
)
2155 return build_vector (type
, v
);
2161 /* Construct a vector of zero elements of vector type TYPE. */
2164 build_zero_vector (tree type
)
2168 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
2169 return build_vector_from_val (type
, t
);
2172 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2175 fold_convertible_p (const_tree type
, const_tree arg
)
2177 tree orig
= TREE_TYPE (arg
);
2182 if (TREE_CODE (arg
) == ERROR_MARK
2183 || TREE_CODE (type
) == ERROR_MARK
2184 || TREE_CODE (orig
) == ERROR_MARK
)
2187 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2190 switch (TREE_CODE (type
))
2192 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2193 case POINTER_TYPE
: case REFERENCE_TYPE
:
2195 return (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2196 || TREE_CODE (orig
) == OFFSET_TYPE
);
2199 case FIXED_POINT_TYPE
:
2202 return TREE_CODE (type
) == TREE_CODE (orig
);
2209 /* Convert expression ARG to type TYPE. Used by the middle-end for
2210 simple conversions in preference to calling the front-end's convert. */
2213 fold_convert_loc (location_t loc
, tree type
, tree arg
)
2215 tree orig
= TREE_TYPE (arg
);
2221 if (TREE_CODE (arg
) == ERROR_MARK
2222 || TREE_CODE (type
) == ERROR_MARK
2223 || TREE_CODE (orig
) == ERROR_MARK
)
2224 return error_mark_node
;
2226 switch (TREE_CODE (type
))
2229 case REFERENCE_TYPE
:
2230 /* Handle conversions between pointers to different address spaces. */
2231 if (POINTER_TYPE_P (orig
)
2232 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
2233 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
2234 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
2237 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2239 if (TREE_CODE (arg
) == INTEGER_CST
)
2241 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2242 if (tem
!= NULL_TREE
)
2245 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2246 || TREE_CODE (orig
) == OFFSET_TYPE
)
2247 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2248 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
2249 return fold_convert_loc (loc
, type
,
2250 fold_build1_loc (loc
, REALPART_EXPR
,
2251 TREE_TYPE (orig
), arg
));
2252 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
2253 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2254 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2257 if (TREE_CODE (arg
) == INTEGER_CST
)
2259 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
2260 if (tem
!= NULL_TREE
)
2263 else if (TREE_CODE (arg
) == REAL_CST
)
2265 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
2266 if (tem
!= NULL_TREE
)
2269 else if (TREE_CODE (arg
) == FIXED_CST
)
2271 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2272 if (tem
!= NULL_TREE
)
2276 switch (TREE_CODE (orig
))
2279 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2280 case POINTER_TYPE
: case REFERENCE_TYPE
:
2281 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
2284 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2286 case FIXED_POINT_TYPE
:
2287 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2290 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2291 return fold_convert_loc (loc
, type
, tem
);
2297 case FIXED_POINT_TYPE
:
2298 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
2299 || TREE_CODE (arg
) == REAL_CST
)
2301 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2302 if (tem
!= NULL_TREE
)
2303 goto fold_convert_exit
;
2306 switch (TREE_CODE (orig
))
2308 case FIXED_POINT_TYPE
:
2313 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2316 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2317 return fold_convert_loc (loc
, type
, tem
);
2324 switch (TREE_CODE (orig
))
2327 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2328 case POINTER_TYPE
: case REFERENCE_TYPE
:
2330 case FIXED_POINT_TYPE
:
2331 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2332 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2333 fold_convert_loc (loc
, TREE_TYPE (type
),
2334 integer_zero_node
));
2339 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2341 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2342 TREE_OPERAND (arg
, 0));
2343 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2344 TREE_OPERAND (arg
, 1));
2345 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2348 arg
= save_expr (arg
);
2349 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2350 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2351 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2352 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2353 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2361 if (integer_zerop (arg
))
2362 return build_zero_vector (type
);
2363 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2364 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2365 || TREE_CODE (orig
) == VECTOR_TYPE
);
2366 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2369 tem
= fold_ignored_result (arg
);
2370 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2373 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2374 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2378 protected_set_expr_location_unshare (tem
, loc
);
2382 /* Return false if expr can be assumed not to be an lvalue, true
2386 maybe_lvalue_p (const_tree x
)
2388 /* We only need to wrap lvalue tree codes. */
2389 switch (TREE_CODE (x
))
2402 case ARRAY_RANGE_REF
:
2408 case PREINCREMENT_EXPR
:
2409 case PREDECREMENT_EXPR
:
2411 case TRY_CATCH_EXPR
:
2412 case WITH_CLEANUP_EXPR
:
2421 /* Assume the worst for front-end tree codes. */
2422 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2430 /* Return an expr equal to X but certainly not valid as an lvalue. */
2433 non_lvalue_loc (location_t loc
, tree x
)
2435 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2440 if (! maybe_lvalue_p (x
))
2442 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2445 /* When pedantic, return an expr equal to X but certainly not valid as a
2446 pedantic lvalue. Otherwise, return X. */
2449 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2451 return protected_set_expr_location_unshare (x
, loc
);
2454 /* Given a tree comparison code, return the code that is the logical inverse.
2455 It is generally not safe to do this for floating-point comparisons, except
2456 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2457 ERROR_MARK in this case. */
2460 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2462 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2463 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2473 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2475 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2477 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2479 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2493 return UNORDERED_EXPR
;
2494 case UNORDERED_EXPR
:
2495 return ORDERED_EXPR
;
2501 /* Similar, but return the comparison that results if the operands are
2502 swapped. This is safe for floating-point. */
2505 swap_tree_comparison (enum tree_code code
)
2512 case UNORDERED_EXPR
:
2538 /* Convert a comparison tree code from an enum tree_code representation
2539 into a compcode bit-based encoding. This function is the inverse of
2540 compcode_to_comparison. */
2542 static enum comparison_code
2543 comparison_to_compcode (enum tree_code code
)
2560 return COMPCODE_ORD
;
2561 case UNORDERED_EXPR
:
2562 return COMPCODE_UNORD
;
2564 return COMPCODE_UNLT
;
2566 return COMPCODE_UNEQ
;
2568 return COMPCODE_UNLE
;
2570 return COMPCODE_UNGT
;
2572 return COMPCODE_LTGT
;
2574 return COMPCODE_UNGE
;
2580 /* Convert a compcode bit-based encoding of a comparison operator back
2581 to GCC's enum tree_code representation. This function is the
2582 inverse of comparison_to_compcode. */
2584 static enum tree_code
2585 compcode_to_comparison (enum comparison_code code
)
2602 return ORDERED_EXPR
;
2603 case COMPCODE_UNORD
:
2604 return UNORDERED_EXPR
;
2622 /* Return a tree for the comparison which is the combination of
2623 doing the AND or OR (depending on CODE) of the two operations LCODE
2624 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2625 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2626 if this makes the transformation invalid. */
2629 combine_comparisons (location_t loc
,
2630 enum tree_code code
, enum tree_code lcode
,
2631 enum tree_code rcode
, tree truth_type
,
2632 tree ll_arg
, tree lr_arg
)
2634 bool honor_nans
= HONOR_NANS (ll_arg
);
2635 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2636 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2641 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2642 compcode
= lcompcode
& rcompcode
;
2645 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2646 compcode
= lcompcode
| rcompcode
;
2655 /* Eliminate unordered comparisons, as well as LTGT and ORD
2656 which are not used unless the mode has NaNs. */
2657 compcode
&= ~COMPCODE_UNORD
;
2658 if (compcode
== COMPCODE_LTGT
)
2659 compcode
= COMPCODE_NE
;
2660 else if (compcode
== COMPCODE_ORD
)
2661 compcode
= COMPCODE_TRUE
;
2663 else if (flag_trapping_math
)
2665 /* Check that the original operation and the optimized ones will trap
2666 under the same condition. */
2667 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2668 && (lcompcode
!= COMPCODE_EQ
)
2669 && (lcompcode
!= COMPCODE_ORD
);
2670 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2671 && (rcompcode
!= COMPCODE_EQ
)
2672 && (rcompcode
!= COMPCODE_ORD
);
2673 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2674 && (compcode
!= COMPCODE_EQ
)
2675 && (compcode
!= COMPCODE_ORD
);
2677 /* In a short-circuited boolean expression the LHS might be
2678 such that the RHS, if evaluated, will never trap. For
2679 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2680 if neither x nor y is NaN. (This is a mixed blessing: for
2681 example, the expression above will never trap, hence
2682 optimizing it to x < y would be invalid). */
2683 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2684 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2687 /* If the comparison was short-circuited, and only the RHS
2688 trapped, we may now generate a spurious trap. */
2690 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2693 /* If we changed the conditions that cause a trap, we lose. */
2694 if ((ltrap
|| rtrap
) != trap
)
2698 if (compcode
== COMPCODE_TRUE
)
2699 return constant_boolean_node (true, truth_type
);
2700 else if (compcode
== COMPCODE_FALSE
)
2701 return constant_boolean_node (false, truth_type
);
2704 enum tree_code tcode
;
2706 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2707 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2711 /* Return nonzero if two operands (typically of the same tree node)
2712 are necessarily equal. FLAGS modifies behavior as follows:
2714 If OEP_ONLY_CONST is set, only return nonzero for constants.
2715 This function tests whether the operands are indistinguishable;
2716 it does not test whether they are equal using C's == operation.
2717 The distinction is important for IEEE floating point, because
2718 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2719 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2721 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2722 even though it may hold multiple values during a function.
2723 This is because a GCC tree node guarantees that nothing else is
2724 executed between the evaluation of its "operands" (which may often
2725 be evaluated in arbitrary order). Hence if the operands themselves
2726 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2727 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2728 unset means assuming isochronic (or instantaneous) tree equivalence.
2729 Unless comparing arbitrary expression trees, such as from different
2730 statements, this flag can usually be left unset.
2732 If OEP_PURE_SAME is set, then pure functions with identical arguments
2733 are considered the same. It is used when the caller has other ways
2734 to ensure that global memory is unchanged in between.
2736 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2737 not values of expressions.
2739 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2740 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2742 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2743 any operand with side effect. This is unnecesarily conservative in the
2744 case we know that arg0 and arg1 are in disjoint code paths (such as in
2745 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2746 addresses with TREE_CONSTANT flag set so we know that &var == &var
2747 even if var is volatile. */
2750 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2752 /* When checking, verify at the outermost operand_equal_p call that
2753 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2755 if (flag_checking
&& !(flags
& OEP_NO_HASH_CHECK
))
2757 if (operand_equal_p (arg0
, arg1
, flags
| OEP_NO_HASH_CHECK
))
2761 inchash::hash
hstate0 (0), hstate1 (0);
2762 inchash::add_expr (arg0
, hstate0
, flags
| OEP_HASH_CHECK
);
2763 inchash::add_expr (arg1
, hstate1
, flags
| OEP_HASH_CHECK
);
2764 hashval_t h0
= hstate0
.end ();
2765 hashval_t h1
= hstate1
.end ();
2766 gcc_assert (h0
== h1
);
2774 /* If either is ERROR_MARK, they aren't equal. */
2775 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2776 || TREE_TYPE (arg0
) == error_mark_node
2777 || TREE_TYPE (arg1
) == error_mark_node
)
2780 /* Similar, if either does not have a type (like a released SSA name),
2781 they aren't equal. */
2782 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2785 /* We cannot consider pointers to different address space equal. */
2786 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
2787 && POINTER_TYPE_P (TREE_TYPE (arg1
))
2788 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2789 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2792 /* Check equality of integer constants before bailing out due to
2793 precision differences. */
2794 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2796 /* Address of INTEGER_CST is not defined; check that we did not forget
2797 to drop the OEP_ADDRESS_OF flags. */
2798 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
2799 return tree_int_cst_equal (arg0
, arg1
);
2802 if (!(flags
& OEP_ADDRESS_OF
))
2804 /* If both types don't have the same signedness, then we can't consider
2805 them equal. We must check this before the STRIP_NOPS calls
2806 because they may change the signedness of the arguments. As pointers
2807 strictly don't have a signedness, require either two pointers or
2808 two non-pointers as well. */
2809 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2810 || POINTER_TYPE_P (TREE_TYPE (arg0
))
2811 != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2814 /* If both types don't have the same precision, then it is not safe
2816 if (element_precision (TREE_TYPE (arg0
))
2817 != element_precision (TREE_TYPE (arg1
)))
2824 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2825 sanity check once the issue is solved. */
2827 /* Addresses of conversions and SSA_NAMEs (and many other things)
2828 are not defined. Check that we did not forget to drop the
2829 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2830 gcc_checking_assert (!CONVERT_EXPR_P (arg0
) && !CONVERT_EXPR_P (arg1
)
2831 && TREE_CODE (arg0
) != SSA_NAME
);
2834 /* In case both args are comparisons but with different comparison
2835 code, try to swap the comparison operands of one arg to produce
2836 a match and compare that variant. */
2837 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2838 && COMPARISON_CLASS_P (arg0
)
2839 && COMPARISON_CLASS_P (arg1
))
2841 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2843 if (TREE_CODE (arg0
) == swap_code
)
2844 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2845 TREE_OPERAND (arg1
, 1), flags
)
2846 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2847 TREE_OPERAND (arg1
, 0), flags
);
2850 if (TREE_CODE (arg0
) != TREE_CODE (arg1
))
2852 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2853 if (CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
))
2855 else if (flags
& OEP_ADDRESS_OF
)
2857 /* If we are interested in comparing addresses ignore
2858 MEM_REF wrappings of the base that can appear just for
2860 if (TREE_CODE (arg0
) == MEM_REF
2862 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ADDR_EXPR
2863 && TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0) == arg1
2864 && integer_zerop (TREE_OPERAND (arg0
, 1)))
2866 else if (TREE_CODE (arg1
) == MEM_REF
2868 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ADDR_EXPR
2869 && TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0) == arg0
2870 && integer_zerop (TREE_OPERAND (arg1
, 1)))
2878 /* When not checking adddresses, this is needed for conversions and for
2879 COMPONENT_REF. Might as well play it safe and always test this. */
2880 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2881 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2882 || (TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
))
2883 && !(flags
& OEP_ADDRESS_OF
)))
2886 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2887 We don't care about side effects in that case because the SAVE_EXPR
2888 takes care of that for us. In all other cases, two expressions are
2889 equal if they have no side effects. If we have two identical
2890 expressions with side effects that should be treated the same due
2891 to the only side effects being identical SAVE_EXPR's, that will
2892 be detected in the recursive calls below.
2893 If we are taking an invariant address of two identical objects
2894 they are necessarily equal as well. */
2895 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2896 && (TREE_CODE (arg0
) == SAVE_EXPR
2897 || (flags
& OEP_MATCH_SIDE_EFFECTS
)
2898 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2901 /* Next handle constant cases, those for which we can return 1 even
2902 if ONLY_CONST is set. */
2903 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2904 switch (TREE_CODE (arg0
))
2907 return tree_int_cst_equal (arg0
, arg1
);
2910 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2911 TREE_FIXED_CST (arg1
));
2914 if (real_identical (&TREE_REAL_CST (arg0
), &TREE_REAL_CST (arg1
)))
2918 if (!HONOR_SIGNED_ZEROS (arg0
))
2920 /* If we do not distinguish between signed and unsigned zero,
2921 consider them equal. */
2922 if (real_zerop (arg0
) && real_zerop (arg1
))
2931 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2934 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2936 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2937 VECTOR_CST_ELT (arg1
, i
), flags
))
2944 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2946 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2950 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2951 && ! memcmp (TREE_STRING_POINTER (arg0
),
2952 TREE_STRING_POINTER (arg1
),
2953 TREE_STRING_LENGTH (arg0
)));
2956 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
2957 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2958 flags
| OEP_ADDRESS_OF
2959 | OEP_MATCH_SIDE_EFFECTS
);
2961 /* In GIMPLE empty constructors are allowed in initializers of
2963 return !CONSTRUCTOR_NELTS (arg0
) && !CONSTRUCTOR_NELTS (arg1
);
2968 if (flags
& OEP_ONLY_CONST
)
2971 /* Define macros to test an operand from arg0 and arg1 for equality and a
2972 variant that allows null and views null as being different from any
2973 non-null value. In the latter case, if either is null, the both
2974 must be; otherwise, do the normal comparison. */
2975 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2976 TREE_OPERAND (arg1, N), flags)
2978 #define OP_SAME_WITH_NULL(N) \
2979 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2980 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2982 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2985 /* Two conversions are equal only if signedness and modes match. */
2986 switch (TREE_CODE (arg0
))
2989 case FIX_TRUNC_EXPR
:
2990 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2991 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
3001 case tcc_comparison
:
3003 if (OP_SAME (0) && OP_SAME (1))
3006 /* For commutative ops, allow the other order. */
3007 return (commutative_tree_code (TREE_CODE (arg0
))
3008 && operand_equal_p (TREE_OPERAND (arg0
, 0),
3009 TREE_OPERAND (arg1
, 1), flags
)
3010 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3011 TREE_OPERAND (arg1
, 0), flags
));
3014 /* If either of the pointer (or reference) expressions we are
3015 dereferencing contain a side effect, these cannot be equal,
3016 but their addresses can be. */
3017 if ((flags
& OEP_MATCH_SIDE_EFFECTS
) == 0
3018 && (TREE_SIDE_EFFECTS (arg0
)
3019 || TREE_SIDE_EFFECTS (arg1
)))
3022 switch (TREE_CODE (arg0
))
3025 if (!(flags
& OEP_ADDRESS_OF
)
3026 && (TYPE_ALIGN (TREE_TYPE (arg0
))
3027 != TYPE_ALIGN (TREE_TYPE (arg1
))))
3029 flags
&= ~OEP_ADDRESS_OF
;
3033 /* Require the same offset. */
3034 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
3035 TYPE_SIZE (TREE_TYPE (arg1
)),
3036 flags
& ~OEP_ADDRESS_OF
))
3041 case VIEW_CONVERT_EXPR
:
3044 case TARGET_MEM_REF
:
3046 if (!(flags
& OEP_ADDRESS_OF
))
3048 /* Require equal access sizes */
3049 if (TYPE_SIZE (TREE_TYPE (arg0
)) != TYPE_SIZE (TREE_TYPE (arg1
))
3050 && (!TYPE_SIZE (TREE_TYPE (arg0
))
3051 || !TYPE_SIZE (TREE_TYPE (arg1
))
3052 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
3053 TYPE_SIZE (TREE_TYPE (arg1
)),
3056 /* Verify that access happens in similar types. */
3057 if (!types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
)))
3059 /* Verify that accesses are TBAA compatible. */
3060 if (!alias_ptr_types_compatible_p
3061 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
3062 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
3063 || (MR_DEPENDENCE_CLIQUE (arg0
)
3064 != MR_DEPENDENCE_CLIQUE (arg1
))
3065 || (MR_DEPENDENCE_BASE (arg0
)
3066 != MR_DEPENDENCE_BASE (arg1
)))
3068 /* Verify that alignment is compatible. */
3069 if (TYPE_ALIGN (TREE_TYPE (arg0
))
3070 != TYPE_ALIGN (TREE_TYPE (arg1
)))
3073 flags
&= ~OEP_ADDRESS_OF
;
3074 return (OP_SAME (0) && OP_SAME (1)
3075 /* TARGET_MEM_REF require equal extra operands. */
3076 && (TREE_CODE (arg0
) != TARGET_MEM_REF
3077 || (OP_SAME_WITH_NULL (2)
3078 && OP_SAME_WITH_NULL (3)
3079 && OP_SAME_WITH_NULL (4))));
3082 case ARRAY_RANGE_REF
:
3085 flags
&= ~OEP_ADDRESS_OF
;
3086 /* Compare the array index by value if it is constant first as we
3087 may have different types but same value here. */
3088 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
3089 TREE_OPERAND (arg1
, 1))
3091 && OP_SAME_WITH_NULL (2)
3092 && OP_SAME_WITH_NULL (3)
3093 /* Compare low bound and element size as with OEP_ADDRESS_OF
3094 we have to account for the offset of the ref. */
3095 && (TREE_TYPE (TREE_OPERAND (arg0
, 0))
3096 == TREE_TYPE (TREE_OPERAND (arg1
, 0))
3097 || (operand_equal_p (array_ref_low_bound
3098 (CONST_CAST_TREE (arg0
)),
3100 (CONST_CAST_TREE (arg1
)), flags
)
3101 && operand_equal_p (array_ref_element_size
3102 (CONST_CAST_TREE (arg0
)),
3103 array_ref_element_size
3104 (CONST_CAST_TREE (arg1
)),
3108 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3109 may be NULL when we're called to compare MEM_EXPRs. */
3110 if (!OP_SAME_WITH_NULL (0)
3113 flags
&= ~OEP_ADDRESS_OF
;
3114 return OP_SAME_WITH_NULL (2);
3119 flags
&= ~OEP_ADDRESS_OF
;
3120 return OP_SAME (1) && OP_SAME (2);
3126 case tcc_expression
:
3127 switch (TREE_CODE (arg0
))
3130 /* Be sure we pass right ADDRESS_OF flag. */
3131 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
3132 return operand_equal_p (TREE_OPERAND (arg0
, 0),
3133 TREE_OPERAND (arg1
, 0),
3134 flags
| OEP_ADDRESS_OF
);
3136 case TRUTH_NOT_EXPR
:
3139 case TRUTH_ANDIF_EXPR
:
3140 case TRUTH_ORIF_EXPR
:
3141 return OP_SAME (0) && OP_SAME (1);
3144 case WIDEN_MULT_PLUS_EXPR
:
3145 case WIDEN_MULT_MINUS_EXPR
:
3148 /* The multiplcation operands are commutative. */
3151 case TRUTH_AND_EXPR
:
3153 case TRUTH_XOR_EXPR
:
3154 if (OP_SAME (0) && OP_SAME (1))
3157 /* Otherwise take into account this is a commutative operation. */
3158 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
3159 TREE_OPERAND (arg1
, 1), flags
)
3160 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3161 TREE_OPERAND (arg1
, 0), flags
));
3164 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3166 flags
&= ~OEP_ADDRESS_OF
;
3171 case BIT_INSERT_EXPR
:
3172 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3177 case PREDECREMENT_EXPR
:
3178 case PREINCREMENT_EXPR
:
3179 case POSTDECREMENT_EXPR
:
3180 case POSTINCREMENT_EXPR
:
3181 if (flags
& OEP_LEXICOGRAPHIC
)
3182 return OP_SAME (0) && OP_SAME (1);
3185 case CLEANUP_POINT_EXPR
:
3187 if (flags
& OEP_LEXICOGRAPHIC
)
3196 switch (TREE_CODE (arg0
))
3199 if ((CALL_EXPR_FN (arg0
) == NULL_TREE
)
3200 != (CALL_EXPR_FN (arg1
) == NULL_TREE
))
3201 /* If not both CALL_EXPRs are either internal or normal function
3202 functions, then they are not equal. */
3204 else if (CALL_EXPR_FN (arg0
) == NULL_TREE
)
3206 /* If the CALL_EXPRs call different internal functions, then they
3208 if (CALL_EXPR_IFN (arg0
) != CALL_EXPR_IFN (arg1
))
3213 /* If the CALL_EXPRs call different functions, then they are not
3215 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
3220 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3222 unsigned int cef
= call_expr_flags (arg0
);
3223 if (flags
& OEP_PURE_SAME
)
3224 cef
&= ECF_CONST
| ECF_PURE
;
3227 if (!cef
&& !(flags
& OEP_LEXICOGRAPHIC
))
3231 /* Now see if all the arguments are the same. */
3233 const_call_expr_arg_iterator iter0
, iter1
;
3235 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
3236 a1
= first_const_call_expr_arg (arg1
, &iter1
);
3238 a0
= next_const_call_expr_arg (&iter0
),
3239 a1
= next_const_call_expr_arg (&iter1
))
3240 if (! operand_equal_p (a0
, a1
, flags
))
3243 /* If we get here and both argument lists are exhausted
3244 then the CALL_EXPRs are equal. */
3245 return ! (a0
|| a1
);
3251 case tcc_declaration
:
3252 /* Consider __builtin_sqrt equal to sqrt. */
3253 return (TREE_CODE (arg0
) == FUNCTION_DECL
3254 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
3255 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
3256 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
3258 case tcc_exceptional
:
3259 if (TREE_CODE (arg0
) == CONSTRUCTOR
)
3261 /* In GIMPLE constructors are used only to build vectors from
3262 elements. Individual elements in the constructor must be
3263 indexed in increasing order and form an initial sequence.
3265 We make no effort to compare constructors in generic.
3266 (see sem_variable::equals in ipa-icf which can do so for
3268 if (!VECTOR_TYPE_P (TREE_TYPE (arg0
))
3269 || !VECTOR_TYPE_P (TREE_TYPE (arg1
)))
3272 /* Be sure that vectors constructed have the same representation.
3273 We only tested element precision and modes to match.
3274 Vectors may be BLKmode and thus also check that the number of
3276 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
))
3277 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)))
3280 vec
<constructor_elt
, va_gc
> *v0
= CONSTRUCTOR_ELTS (arg0
);
3281 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (arg1
);
3282 unsigned int len
= vec_safe_length (v0
);
3284 if (len
!= vec_safe_length (v1
))
3287 for (unsigned int i
= 0; i
< len
; i
++)
3289 constructor_elt
*c0
= &(*v0
)[i
];
3290 constructor_elt
*c1
= &(*v1
)[i
];
3292 if (!operand_equal_p (c0
->value
, c1
->value
, flags
)
3293 /* In GIMPLE the indexes can be either NULL or matching i.
3294 Double check this so we won't get false
3295 positives for GENERIC. */
3297 && (TREE_CODE (c0
->index
) != INTEGER_CST
3298 || !compare_tree_int (c0
->index
, i
)))
3300 && (TREE_CODE (c1
->index
) != INTEGER_CST
3301 || !compare_tree_int (c1
->index
, i
))))
3306 else if (TREE_CODE (arg0
) == STATEMENT_LIST
3307 && (flags
& OEP_LEXICOGRAPHIC
))
3309 /* Compare the STATEMENT_LISTs. */
3310 tree_stmt_iterator tsi1
, tsi2
;
3311 tree body1
= CONST_CAST_TREE (arg0
);
3312 tree body2
= CONST_CAST_TREE (arg1
);
3313 for (tsi1
= tsi_start (body1
), tsi2
= tsi_start (body2
); ;
3314 tsi_next (&tsi1
), tsi_next (&tsi2
))
3316 /* The lists don't have the same number of statements. */
3317 if (tsi_end_p (tsi1
) ^ tsi_end_p (tsi2
))
3319 if (tsi_end_p (tsi1
) && tsi_end_p (tsi2
))
3321 if (!operand_equal_p (tsi_stmt (tsi1
), tsi_stmt (tsi2
),
3329 switch (TREE_CODE (arg0
))
3332 if (flags
& OEP_LEXICOGRAPHIC
)
3333 return OP_SAME_WITH_NULL (0);
3344 #undef OP_SAME_WITH_NULL
3347 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3348 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3350 When in doubt, return 0. */
3353 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
3355 int unsignedp1
, unsignedpo
;
3356 tree primarg0
, primarg1
, primother
;
3357 unsigned int correct_width
;
3359 if (operand_equal_p (arg0
, arg1
, 0))
3362 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
3363 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
3366 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3367 and see if the inner values are the same. This removes any
3368 signedness comparison, which doesn't matter here. */
3369 primarg0
= arg0
, primarg1
= arg1
;
3370 STRIP_NOPS (primarg0
);
3371 STRIP_NOPS (primarg1
);
3372 if (operand_equal_p (primarg0
, primarg1
, 0))
3375 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3376 actual comparison operand, ARG0.
3378 First throw away any conversions to wider types
3379 already present in the operands. */
3381 primarg1
= get_narrower (arg1
, &unsignedp1
);
3382 primother
= get_narrower (other
, &unsignedpo
);
3384 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
3385 if (unsignedp1
== unsignedpo
3386 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
3387 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
3389 tree type
= TREE_TYPE (arg0
);
3391 /* Make sure shorter operand is extended the right way
3392 to match the longer operand. */
3393 primarg1
= fold_convert (signed_or_unsigned_type_for
3394 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
3396 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
3403 /* See if ARG is an expression that is either a comparison or is performing
3404 arithmetic on comparisons. The comparisons must only be comparing
3405 two different values, which will be stored in *CVAL1 and *CVAL2; if
3406 they are nonzero it means that some operands have already been found.
3407 No variables may be used anywhere else in the expression except in the
3408 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3409 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3411 If this is true, return 1. Otherwise, return zero. */
3414 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
3416 enum tree_code code
= TREE_CODE (arg
);
3417 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3419 /* We can handle some of the tcc_expression cases here. */
3420 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3422 else if (tclass
== tcc_expression
3423 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
3424 || code
== COMPOUND_EXPR
))
3425 tclass
= tcc_binary
;
3427 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
3428 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
3430 /* If we've already found a CVAL1 or CVAL2, this expression is
3431 two complex to handle. */
3432 if (*cval1
|| *cval2
)
3442 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
3445 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
3446 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3447 cval1
, cval2
, save_p
));
3452 case tcc_expression
:
3453 if (code
== COND_EXPR
)
3454 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
3455 cval1
, cval2
, save_p
)
3456 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
3457 cval1
, cval2
, save_p
)
3458 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
3459 cval1
, cval2
, save_p
));
3462 case tcc_comparison
:
3463 /* First see if we can handle the first operand, then the second. For
3464 the second operand, we know *CVAL1 can't be zero. It must be that
3465 one side of the comparison is each of the values; test for the
3466 case where this isn't true by failing if the two operands
3469 if (operand_equal_p (TREE_OPERAND (arg
, 0),
3470 TREE_OPERAND (arg
, 1), 0))
3474 *cval1
= TREE_OPERAND (arg
, 0);
3475 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
3477 else if (*cval2
== 0)
3478 *cval2
= TREE_OPERAND (arg
, 0);
3479 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
3484 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
3486 else if (*cval2
== 0)
3487 *cval2
= TREE_OPERAND (arg
, 1);
3488 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
3500 /* ARG is a tree that is known to contain just arithmetic operations and
3501 comparisons. Evaluate the operations in the tree substituting NEW0 for
3502 any occurrence of OLD0 as an operand of a comparison and likewise for
3506 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
3507 tree old1
, tree new1
)
3509 tree type
= TREE_TYPE (arg
);
3510 enum tree_code code
= TREE_CODE (arg
);
3511 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
3513 /* We can handle some of the tcc_expression cases here. */
3514 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
3516 else if (tclass
== tcc_expression
3517 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
3518 tclass
= tcc_binary
;
3523 return fold_build1_loc (loc
, code
, type
,
3524 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3525 old0
, new0
, old1
, new1
));
3528 return fold_build2_loc (loc
, code
, type
,
3529 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3530 old0
, new0
, old1
, new1
),
3531 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3532 old0
, new0
, old1
, new1
));
3534 case tcc_expression
:
3538 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3542 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3546 return fold_build3_loc (loc
, code
, type
,
3547 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3548 old0
, new0
, old1
, new1
),
3549 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3550 old0
, new0
, old1
, new1
),
3551 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3552 old0
, new0
, old1
, new1
));
3556 /* Fall through - ??? */
3558 case tcc_comparison
:
3560 tree arg0
= TREE_OPERAND (arg
, 0);
3561 tree arg1
= TREE_OPERAND (arg
, 1);
3563 /* We need to check both for exact equality and tree equality. The
3564 former will be true if the operand has a side-effect. In that
3565 case, we know the operand occurred exactly once. */
3567 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3569 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3572 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3574 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3577 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3585 /* Return a tree for the case when the result of an expression is RESULT
3586 converted to TYPE and OMITTED was previously an operand of the expression
3587 but is now not needed (e.g., we folded OMITTED * 0).
3589 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3590 the conversion of RESULT to TYPE. */
3593 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3595 tree t
= fold_convert_loc (loc
, type
, result
);
3597 /* If the resulting operand is an empty statement, just return the omitted
3598 statement casted to void. */
3599 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3600 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3601 fold_ignored_result (omitted
));
3603 if (TREE_SIDE_EFFECTS (omitted
))
3604 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3605 fold_ignored_result (omitted
), t
);
3607 return non_lvalue_loc (loc
, t
);
3610 /* Return a tree for the case when the result of an expression is RESULT
3611 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3612 of the expression but are now not needed.
3614 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3615 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3616 evaluated before OMITTED2. Otherwise, if neither has side effects,
3617 just do the conversion of RESULT to TYPE. */
3620 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3621 tree omitted1
, tree omitted2
)
3623 tree t
= fold_convert_loc (loc
, type
, result
);
3625 if (TREE_SIDE_EFFECTS (omitted2
))
3626 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3627 if (TREE_SIDE_EFFECTS (omitted1
))
3628 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3630 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3634 /* Return a simplified tree node for the truth-negation of ARG. This
3635 never alters ARG itself. We assume that ARG is an operation that
3636 returns a truth value (0 or 1).
3638 FIXME: one would think we would fold the result, but it causes
3639 problems with the dominator optimizer. */
3642 fold_truth_not_expr (location_t loc
, tree arg
)
3644 tree type
= TREE_TYPE (arg
);
3645 enum tree_code code
= TREE_CODE (arg
);
3646 location_t loc1
, loc2
;
3648 /* If this is a comparison, we can simply invert it, except for
3649 floating-point non-equality comparisons, in which case we just
3650 enclose a TRUTH_NOT_EXPR around what we have. */
3652 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3654 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3655 if (FLOAT_TYPE_P (op_type
)
3656 && flag_trapping_math
3657 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3658 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3661 code
= invert_tree_comparison (code
, HONOR_NANS (op_type
));
3662 if (code
== ERROR_MARK
)
3665 tree ret
= build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3666 TREE_OPERAND (arg
, 1));
3667 if (TREE_NO_WARNING (arg
))
3668 TREE_NO_WARNING (ret
) = 1;
3675 return constant_boolean_node (integer_zerop (arg
), type
);
3677 case TRUTH_AND_EXPR
:
3678 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3679 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3680 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3681 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3682 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3685 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3686 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3687 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3688 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3689 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3691 case TRUTH_XOR_EXPR
:
3692 /* Here we can invert either operand. We invert the first operand
3693 unless the second operand is a TRUTH_NOT_EXPR in which case our
3694 result is the XOR of the first operand with the inside of the
3695 negation of the second operand. */
3697 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3698 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3699 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3701 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3702 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3703 TREE_OPERAND (arg
, 1));
3705 case TRUTH_ANDIF_EXPR
:
3706 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3707 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3708 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3709 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3710 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3712 case TRUTH_ORIF_EXPR
:
3713 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3714 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3715 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3716 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3717 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3719 case TRUTH_NOT_EXPR
:
3720 return TREE_OPERAND (arg
, 0);
3724 tree arg1
= TREE_OPERAND (arg
, 1);
3725 tree arg2
= TREE_OPERAND (arg
, 2);
3727 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3728 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3730 /* A COND_EXPR may have a throw as one operand, which
3731 then has void type. Just leave void operands
3733 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3734 VOID_TYPE_P (TREE_TYPE (arg1
))
3735 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3736 VOID_TYPE_P (TREE_TYPE (arg2
))
3737 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3741 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3742 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3743 TREE_OPERAND (arg
, 0),
3744 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3746 case NON_LVALUE_EXPR
:
3747 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3748 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3751 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3752 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3757 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3758 return build1_loc (loc
, TREE_CODE (arg
), type
,
3759 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3762 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3764 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3767 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3769 case CLEANUP_POINT_EXPR
:
3770 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3771 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3772 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3779 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3780 assume that ARG is an operation that returns a truth value (0 or 1
3781 for scalars, 0 or -1 for vectors). Return the folded expression if
3782 folding is successful. Otherwise, return NULL_TREE. */
3785 fold_invert_truthvalue (location_t loc
, tree arg
)
3787 tree type
= TREE_TYPE (arg
);
3788 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3794 /* Return a simplified tree node for the truth-negation of ARG. This
3795 never alters ARG itself. We assume that ARG is an operation that
3796 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3799 invert_truthvalue_loc (location_t loc
, tree arg
)
3801 if (TREE_CODE (arg
) == ERROR_MARK
)
3804 tree type
= TREE_TYPE (arg
);
3805 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3811 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3812 with code CODE. This optimization is unsafe. */
3814 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3815 tree arg0
, tree arg1
)
3817 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3818 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3820 /* (A / C) +- (B / C) -> (A +- B) / C. */
3822 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3823 TREE_OPERAND (arg1
, 1), 0))
3824 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3825 fold_build2_loc (loc
, code
, type
,
3826 TREE_OPERAND (arg0
, 0),
3827 TREE_OPERAND (arg1
, 0)),
3828 TREE_OPERAND (arg0
, 1));
3830 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3831 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3832 TREE_OPERAND (arg1
, 0), 0)
3833 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3834 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3836 REAL_VALUE_TYPE r0
, r1
;
3837 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3838 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3840 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3842 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3843 real_arithmetic (&r0
, code
, &r0
, &r1
);
3844 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3845 TREE_OPERAND (arg0
, 0),
3846 build_real (type
, r0
));
3852 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3853 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3854 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3855 is the original memory reference used to preserve the alias set of
3859 make_bit_field_ref (location_t loc
, tree inner
, tree orig_inner
, tree type
,
3860 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
3861 int unsignedp
, int reversep
)
3863 tree result
, bftype
;
3865 /* Attempt not to lose the access path if possible. */
3866 if (TREE_CODE (orig_inner
) == COMPONENT_REF
)
3868 tree ninner
= TREE_OPERAND (orig_inner
, 0);
3870 HOST_WIDE_INT nbitsize
, nbitpos
;
3872 int nunsignedp
, nreversep
, nvolatilep
= 0;
3873 tree base
= get_inner_reference (ninner
, &nbitsize
, &nbitpos
,
3874 &noffset
, &nmode
, &nunsignedp
,
3875 &nreversep
, &nvolatilep
);
3877 && noffset
== NULL_TREE
3878 && nbitsize
>= bitsize
3879 && nbitpos
<= bitpos
3880 && bitpos
+ bitsize
<= nbitpos
+ nbitsize
3890 alias_set_type iset
= get_alias_set (orig_inner
);
3891 if (iset
== 0 && get_alias_set (inner
) != iset
)
3892 inner
= fold_build2 (MEM_REF
, TREE_TYPE (inner
),
3893 build_fold_addr_expr (inner
),
3894 build_int_cst (ptr_type_node
, 0));
3896 if (bitpos
== 0 && !reversep
)
3898 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3899 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3900 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3901 && tree_fits_shwi_p (size
)
3902 && tree_to_shwi (size
) == bitsize
)
3903 return fold_convert_loc (loc
, type
, inner
);
3907 if (TYPE_PRECISION (bftype
) != bitsize
3908 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3909 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3911 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3912 size_int (bitsize
), bitsize_int (bitpos
));
3913 REF_REVERSE_STORAGE_ORDER (result
) = reversep
;
3916 result
= fold_convert_loc (loc
, type
, result
);
3921 /* Optimize a bit-field compare.
3923 There are two cases: First is a compare against a constant and the
3924 second is a comparison of two items where the fields are at the same
3925 bit position relative to the start of a chunk (byte, halfword, word)
3926 large enough to contain it. In these cases we can avoid the shift
3927 implicit in bitfield extractions.
3929 For constants, we emit a compare of the shifted constant with the
3930 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3931 compared. For two fields at the same position, we do the ANDs with the
3932 similar mask and compare the result of the ANDs.
3934 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3935 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3936 are the left and right operands of the comparison, respectively.
3938 If the optimization described above can be done, we return the resulting
3939 tree. Otherwise we return zero. */
3942 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3943 tree compare_type
, tree lhs
, tree rhs
)
3945 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3946 tree type
= TREE_TYPE (lhs
);
3948 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3949 machine_mode lmode
, rmode
, nmode
;
3950 int lunsignedp
, runsignedp
;
3951 int lreversep
, rreversep
;
3952 int lvolatilep
= 0, rvolatilep
= 0;
3953 tree linner
, rinner
= NULL_TREE
;
3957 /* Get all the information about the extractions being done. If the bit size
3958 if the same as the size of the underlying object, we aren't doing an
3959 extraction at all and so can do nothing. We also don't want to
3960 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3961 then will no longer be able to replace it. */
3962 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3963 &lunsignedp
, &lreversep
, &lvolatilep
);
3964 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3965 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3969 rreversep
= lreversep
;
3972 /* If this is not a constant, we can only do something if bit positions,
3973 sizes, signedness and storage order are the same. */
3975 = get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3976 &runsignedp
, &rreversep
, &rvolatilep
);
3978 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3979 || lunsignedp
!= runsignedp
|| lreversep
!= rreversep
|| offset
!= 0
3980 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3984 /* Honor the C++ memory model and mimic what RTL expansion does. */
3985 unsigned HOST_WIDE_INT bitstart
= 0;
3986 unsigned HOST_WIDE_INT bitend
= 0;
3987 if (TREE_CODE (lhs
) == COMPONENT_REF
)
3989 get_bit_range (&bitstart
, &bitend
, lhs
, &lbitpos
, &offset
);
3990 if (offset
!= NULL_TREE
)
3994 /* See if we can find a mode to refer to this field. We should be able to,
3995 but fail if we can't. */
3996 nmode
= get_best_mode (lbitsize
, lbitpos
, bitstart
, bitend
,
3997 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3998 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3999 TYPE_ALIGN (TREE_TYPE (rinner
))),
4001 if (nmode
== VOIDmode
)
4004 /* Set signed and unsigned types of the precision of this mode for the
4006 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
4008 /* Compute the bit position and size for the new reference and our offset
4009 within it. If the new reference is the same size as the original, we
4010 won't optimize anything, so return zero. */
4011 nbitsize
= GET_MODE_BITSIZE (nmode
);
4012 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
4014 if (nbitsize
== lbitsize
)
4017 if (lreversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
4018 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
4020 /* Make the mask to be used against the extracted field. */
4021 mask
= build_int_cst_type (unsigned_type
, -1);
4022 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
4023 mask
= const_binop (RSHIFT_EXPR
, mask
,
4024 size_int (nbitsize
- lbitsize
- lbitpos
));
4027 /* If not comparing with constant, just rework the comparison
4029 return fold_build2_loc (loc
, code
, compare_type
,
4030 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4031 make_bit_field_ref (loc
, linner
, lhs
,
4036 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4037 make_bit_field_ref (loc
, rinner
, rhs
,
4043 /* Otherwise, we are handling the constant case. See if the constant is too
4044 big for the field. Warn and return a tree for 0 (false) if so. We do
4045 this not only for its own sake, but to avoid having to test for this
4046 error case below. If we didn't, we might generate wrong code.
4048 For unsigned fields, the constant shifted right by the field length should
4049 be all zero. For signed fields, the high-order bits should agree with
4054 if (wi::lrshift (rhs
, lbitsize
) != 0)
4056 warning (0, "comparison is always %d due to width of bit-field",
4058 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4063 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
4064 if (tem
!= 0 && tem
!= -1)
4066 warning (0, "comparison is always %d due to width of bit-field",
4068 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
4072 /* Single-bit compares should always be against zero. */
4073 if (lbitsize
== 1 && ! integer_zerop (rhs
))
4075 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
4076 rhs
= build_int_cst (type
, 0);
4079 /* Make a new bitfield reference, shift the constant over the
4080 appropriate number of bits and mask it with the computed mask
4081 (in case this was a signed field). If we changed it, make a new one. */
4082 lhs
= make_bit_field_ref (loc
, linner
, lhs
, unsigned_type
,
4083 nbitsize
, nbitpos
, 1, lreversep
);
4085 rhs
= const_binop (BIT_AND_EXPR
,
4086 const_binop (LSHIFT_EXPR
,
4087 fold_convert_loc (loc
, unsigned_type
, rhs
),
4088 size_int (lbitpos
)),
4091 lhs
= build2_loc (loc
, code
, compare_type
,
4092 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
4096 /* Subroutine for fold_truth_andor_1: decode a field reference.
4098 If EXP is a comparison reference, we return the innermost reference.
4100 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4101 set to the starting bit number.
4103 If the innermost field can be completely contained in a mode-sized
4104 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4106 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4107 otherwise it is not changed.
4109 *PUNSIGNEDP is set to the signedness of the field.
4111 *PREVERSEP is set to the storage order of the field.
4113 *PMASK is set to the mask used. This is either contained in a
4114 BIT_AND_EXPR or derived from the width of the field.
4116 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4118 Return 0 if this is not a component reference or is one that we can't
4119 do anything with. */
4122 decode_field_reference (location_t loc
, tree
*exp_
, HOST_WIDE_INT
*pbitsize
,
4123 HOST_WIDE_INT
*pbitpos
, machine_mode
*pmode
,
4124 int *punsignedp
, int *preversep
, int *pvolatilep
,
4125 tree
*pmask
, tree
*pand_mask
)
4128 tree outer_type
= 0;
4130 tree mask
, inner
, offset
;
4132 unsigned int precision
;
4134 /* All the optimizations using this function assume integer fields.
4135 There are problems with FP fields since the type_for_size call
4136 below can fail for, e.g., XFmode. */
4137 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
4140 /* We are interested in the bare arrangement of bits, so strip everything
4141 that doesn't affect the machine mode. However, record the type of the
4142 outermost expression if it may matter below. */
4143 if (CONVERT_EXPR_P (exp
)
4144 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
4145 outer_type
= TREE_TYPE (exp
);
4148 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
4150 and_mask
= TREE_OPERAND (exp
, 1);
4151 exp
= TREE_OPERAND (exp
, 0);
4152 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
4153 if (TREE_CODE (and_mask
) != INTEGER_CST
)
4157 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
4158 punsignedp
, preversep
, pvolatilep
);
4159 if ((inner
== exp
&& and_mask
== 0)
4160 || *pbitsize
< 0 || offset
!= 0
4161 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
4162 /* Reject out-of-bound accesses (PR79731). */
4163 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner
))
4164 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner
)),
4165 *pbitpos
+ *pbitsize
) < 0))
4170 /* If the number of bits in the reference is the same as the bitsize of
4171 the outer type, then the outer type gives the signedness. Otherwise
4172 (in case of a small bitfield) the signedness is unchanged. */
4173 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
4174 *punsignedp
= TYPE_UNSIGNED (outer_type
);
4176 /* Compute the mask to access the bitfield. */
4177 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
4178 precision
= TYPE_PRECISION (unsigned_type
);
4180 mask
= build_int_cst_type (unsigned_type
, -1);
4182 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
4183 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
4185 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4187 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
4188 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
4191 *pand_mask
= and_mask
;
4195 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4196 bit positions and MASK is SIGNED. */
4199 all_ones_mask_p (const_tree mask
, unsigned int size
)
4201 tree type
= TREE_TYPE (mask
);
4202 unsigned int precision
= TYPE_PRECISION (type
);
4204 /* If this function returns true when the type of the mask is
4205 UNSIGNED, then there will be errors. In particular see
4206 gcc.c-torture/execute/990326-1.c. There does not appear to be
4207 any documentation paper trail as to why this is so. But the pre
4208 wide-int worked with that restriction and it has been preserved
4210 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
4213 return wi::mask (size
, false, precision
) == mask
;
4216 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4217 represents the sign bit of EXP's type. If EXP represents a sign
4218 or zero extension, also test VAL against the unextended type.
4219 The return value is the (sub)expression whose sign bit is VAL,
4220 or NULL_TREE otherwise. */
4223 sign_bit_p (tree exp
, const_tree val
)
4228 /* Tree EXP must have an integral type. */
4229 t
= TREE_TYPE (exp
);
4230 if (! INTEGRAL_TYPE_P (t
))
4233 /* Tree VAL must be an integer constant. */
4234 if (TREE_CODE (val
) != INTEGER_CST
4235 || TREE_OVERFLOW (val
))
4238 width
= TYPE_PRECISION (t
);
4239 if (wi::only_sign_bit_p (val
, width
))
4242 /* Handle extension from a narrower type. */
4243 if (TREE_CODE (exp
) == NOP_EXPR
4244 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
4245 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
4250 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4251 to be evaluated unconditionally. */
4254 simple_operand_p (const_tree exp
)
4256 /* Strip any conversions that don't change the machine mode. */
4259 return (CONSTANT_CLASS_P (exp
)
4260 || TREE_CODE (exp
) == SSA_NAME
4262 && ! TREE_ADDRESSABLE (exp
)
4263 && ! TREE_THIS_VOLATILE (exp
)
4264 && ! DECL_NONLOCAL (exp
)
4265 /* Don't regard global variables as simple. They may be
4266 allocated in ways unknown to the compiler (shared memory,
4267 #pragma weak, etc). */
4268 && ! TREE_PUBLIC (exp
)
4269 && ! DECL_EXTERNAL (exp
)
4270 /* Weakrefs are not safe to be read, since they can be NULL.
4271 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4272 have DECL_WEAK flag set. */
4273 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
4274 /* Loading a static variable is unduly expensive, but global
4275 registers aren't expensive. */
4276 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
4279 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4280 to be evaluated unconditionally.
4281 I addition to simple_operand_p, we assume that comparisons, conversions,
4282 and logic-not operations are simple, if their operands are simple, too. */
4285 simple_operand_p_2 (tree exp
)
4287 enum tree_code code
;
4289 if (TREE_SIDE_EFFECTS (exp
)
4290 || tree_could_trap_p (exp
))
4293 while (CONVERT_EXPR_P (exp
))
4294 exp
= TREE_OPERAND (exp
, 0);
4296 code
= TREE_CODE (exp
);
4298 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4299 return (simple_operand_p (TREE_OPERAND (exp
, 0))
4300 && simple_operand_p (TREE_OPERAND (exp
, 1)));
4302 if (code
== TRUTH_NOT_EXPR
)
4303 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
4305 return simple_operand_p (exp
);
4309 /* The following functions are subroutines to fold_range_test and allow it to
4310 try to change a logical combination of comparisons into a range test.
4313 X == 2 || X == 3 || X == 4 || X == 5
4317 (unsigned) (X - 2) <= 3
4319 We describe each set of comparisons as being either inside or outside
4320 a range, using a variable named like IN_P, and then describe the
4321 range with a lower and upper bound. If one of the bounds is omitted,
4322 it represents either the highest or lowest value of the type.
4324 In the comments below, we represent a range by two numbers in brackets
4325 preceded by a "+" to designate being inside that range, or a "-" to
4326 designate being outside that range, so the condition can be inverted by
4327 flipping the prefix. An omitted bound is represented by a "-". For
4328 example, "- [-, 10]" means being outside the range starting at the lowest
4329 possible value and ending at 10, in other words, being greater than 10.
4330 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4333 We set up things so that the missing bounds are handled in a consistent
4334 manner so neither a missing bound nor "true" and "false" need to be
4335 handled using a special case. */
4337 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4338 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4339 and UPPER1_P are nonzero if the respective argument is an upper bound
4340 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4341 must be specified for a comparison. ARG1 will be converted to ARG0's
4342 type if both are specified. */
4345 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
4346 tree arg1
, int upper1_p
)
4352 /* If neither arg represents infinity, do the normal operation.
4353 Else, if not a comparison, return infinity. Else handle the special
4354 comparison rules. Note that most of the cases below won't occur, but
4355 are handled for consistency. */
4357 if (arg0
!= 0 && arg1
!= 0)
4359 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
4360 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
4362 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
4365 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4368 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4369 for neither. In real maths, we cannot assume open ended ranges are
4370 the same. But, this is computer arithmetic, where numbers are finite.
4371 We can therefore make the transformation of any unbounded range with
4372 the value Z, Z being greater than any representable number. This permits
4373 us to treat unbounded ranges as equal. */
4374 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
4375 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
4379 result
= sgn0
== sgn1
;
4382 result
= sgn0
!= sgn1
;
4385 result
= sgn0
< sgn1
;
4388 result
= sgn0
<= sgn1
;
4391 result
= sgn0
> sgn1
;
4394 result
= sgn0
>= sgn1
;
4400 return constant_boolean_node (result
, type
);
4403 /* Helper routine for make_range. Perform one step for it, return
4404 new expression if the loop should continue or NULL_TREE if it should
4408 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
4409 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
4410 bool *strict_overflow_p
)
4412 tree arg0_type
= TREE_TYPE (arg0
);
4413 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
4414 int in_p
= *p_in_p
, n_in_p
;
4418 case TRUTH_NOT_EXPR
:
4419 /* We can only do something if the range is testing for zero. */
4420 if (low
== NULL_TREE
|| high
== NULL_TREE
4421 || ! integer_zerop (low
) || ! integer_zerop (high
))
4426 case EQ_EXPR
: case NE_EXPR
:
4427 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
4428 /* We can only do something if the range is testing for zero
4429 and if the second operand is an integer constant. Note that
4430 saying something is "in" the range we make is done by
4431 complementing IN_P since it will set in the initial case of
4432 being not equal to zero; "out" is leaving it alone. */
4433 if (low
== NULL_TREE
|| high
== NULL_TREE
4434 || ! integer_zerop (low
) || ! integer_zerop (high
)
4435 || TREE_CODE (arg1
) != INTEGER_CST
)
4440 case NE_EXPR
: /* - [c, c] */
4443 case EQ_EXPR
: /* + [c, c] */
4444 in_p
= ! in_p
, low
= high
= arg1
;
4446 case GT_EXPR
: /* - [-, c] */
4447 low
= 0, high
= arg1
;
4449 case GE_EXPR
: /* + [c, -] */
4450 in_p
= ! in_p
, low
= arg1
, high
= 0;
4452 case LT_EXPR
: /* - [c, -] */
4453 low
= arg1
, high
= 0;
4455 case LE_EXPR
: /* + [-, c] */
4456 in_p
= ! in_p
, low
= 0, high
= arg1
;
4462 /* If this is an unsigned comparison, we also know that EXP is
4463 greater than or equal to zero. We base the range tests we make
4464 on that fact, so we record it here so we can parse existing
4465 range tests. We test arg0_type since often the return type
4466 of, e.g. EQ_EXPR, is boolean. */
4467 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
4469 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4471 build_int_cst (arg0_type
, 0),
4475 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
4477 /* If the high bound is missing, but we have a nonzero low
4478 bound, reverse the range so it goes from zero to the low bound
4480 if (high
== 0 && low
&& ! integer_zerop (low
))
4483 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
4484 build_int_cst (TREE_TYPE (low
), 1), 0);
4485 low
= build_int_cst (arg0_type
, 0);
4495 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4496 low and high are non-NULL, then normalize will DTRT. */
4497 if (!TYPE_UNSIGNED (arg0_type
)
4498 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4500 if (low
== NULL_TREE
)
4501 low
= TYPE_MIN_VALUE (arg0_type
);
4502 if (high
== NULL_TREE
)
4503 high
= TYPE_MAX_VALUE (arg0_type
);
4506 /* (-x) IN [a,b] -> x in [-b, -a] */
4507 n_low
= range_binop (MINUS_EXPR
, exp_type
,
4508 build_int_cst (exp_type
, 0),
4510 n_high
= range_binop (MINUS_EXPR
, exp_type
,
4511 build_int_cst (exp_type
, 0),
4513 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
4519 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
4520 build_int_cst (exp_type
, 1));
4524 if (TREE_CODE (arg1
) != INTEGER_CST
)
4527 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4528 move a constant to the other side. */
4529 if (!TYPE_UNSIGNED (arg0_type
)
4530 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4533 /* If EXP is signed, any overflow in the computation is undefined,
4534 so we don't worry about it so long as our computations on
4535 the bounds don't overflow. For unsigned, overflow is defined
4536 and this is exactly the right thing. */
4537 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4538 arg0_type
, low
, 0, arg1
, 0);
4539 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4540 arg0_type
, high
, 1, arg1
, 0);
4541 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4542 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4545 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4546 *strict_overflow_p
= true;
4549 /* Check for an unsigned range which has wrapped around the maximum
4550 value thus making n_high < n_low, and normalize it. */
4551 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4553 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4554 build_int_cst (TREE_TYPE (n_high
), 1), 0);
4555 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4556 build_int_cst (TREE_TYPE (n_low
), 1), 0);
4558 /* If the range is of the form +/- [ x+1, x ], we won't
4559 be able to normalize it. But then, it represents the
4560 whole range or the empty set, so make it
4562 if (tree_int_cst_equal (n_low
, low
)
4563 && tree_int_cst_equal (n_high
, high
))
4569 low
= n_low
, high
= n_high
;
4577 case NON_LVALUE_EXPR
:
4578 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4581 if (! INTEGRAL_TYPE_P (arg0_type
)
4582 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4583 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4586 n_low
= low
, n_high
= high
;
4589 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4592 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4594 /* If we're converting arg0 from an unsigned type, to exp,
4595 a signed type, we will be doing the comparison as unsigned.
4596 The tests above have already verified that LOW and HIGH
4599 So we have to ensure that we will handle large unsigned
4600 values the same way that the current signed bounds treat
4603 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4607 /* For fixed-point modes, we need to pass the saturating flag
4608 as the 2nd parameter. */
4609 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4611 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4612 TYPE_SATURATING (arg0_type
));
4615 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4617 /* A range without an upper bound is, naturally, unbounded.
4618 Since convert would have cropped a very large value, use
4619 the max value for the destination type. */
4621 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4622 : TYPE_MAX_VALUE (arg0_type
);
4624 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4625 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4626 fold_convert_loc (loc
, arg0_type
,
4628 build_int_cst (arg0_type
, 1));
4630 /* If the low bound is specified, "and" the range with the
4631 range for which the original unsigned value will be
4635 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4636 1, fold_convert_loc (loc
, arg0_type
,
4641 in_p
= (n_in_p
== in_p
);
4645 /* Otherwise, "or" the range with the range of the input
4646 that will be interpreted as negative. */
4647 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4648 1, fold_convert_loc (loc
, arg0_type
,
4653 in_p
= (in_p
!= n_in_p
);
4667 /* Given EXP, a logical expression, set the range it is testing into
4668 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4669 actually being tested. *PLOW and *PHIGH will be made of the same
4670 type as the returned expression. If EXP is not a comparison, we
4671 will most likely not be returning a useful value and range. Set
4672 *STRICT_OVERFLOW_P to true if the return value is only valid
4673 because signed overflow is undefined; otherwise, do not change
4674 *STRICT_OVERFLOW_P. */
4677 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4678 bool *strict_overflow_p
)
4680 enum tree_code code
;
4681 tree arg0
, arg1
= NULL_TREE
;
4682 tree exp_type
, nexp
;
4685 location_t loc
= EXPR_LOCATION (exp
);
4687 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4688 and see if we can refine the range. Some of the cases below may not
4689 happen, but it doesn't seem worth worrying about this. We "continue"
4690 the outer loop when we've changed something; otherwise we "break"
4691 the switch, which will "break" the while. */
4694 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4698 code
= TREE_CODE (exp
);
4699 exp_type
= TREE_TYPE (exp
);
4702 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4704 if (TREE_OPERAND_LENGTH (exp
) > 0)
4705 arg0
= TREE_OPERAND (exp
, 0);
4706 if (TREE_CODE_CLASS (code
) == tcc_binary
4707 || TREE_CODE_CLASS (code
) == tcc_comparison
4708 || (TREE_CODE_CLASS (code
) == tcc_expression
4709 && TREE_OPERAND_LENGTH (exp
) > 1))
4710 arg1
= TREE_OPERAND (exp
, 1);
4712 if (arg0
== NULL_TREE
)
4715 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4716 &high
, &in_p
, strict_overflow_p
);
4717 if (nexp
== NULL_TREE
)
4722 /* If EXP is a constant, we can evaluate whether this is true or false. */
4723 if (TREE_CODE (exp
) == INTEGER_CST
)
4725 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4727 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4733 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4737 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4738 type, TYPE, return an expression to test if EXP is in (or out of, depending
4739 on IN_P) the range. Return 0 if the test couldn't be created. */
4742 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4743 tree low
, tree high
)
4745 tree etype
= TREE_TYPE (exp
), value
;
4747 /* Disable this optimization for function pointer expressions
4748 on targets that require function pointer canonicalization. */
4749 if (targetm
.have_canonicalize_funcptr_for_compare ()
4750 && TREE_CODE (etype
) == POINTER_TYPE
4751 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4756 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4758 return invert_truthvalue_loc (loc
, value
);
4763 if (low
== 0 && high
== 0)
4764 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4767 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4768 fold_convert_loc (loc
, etype
, high
));
4771 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4772 fold_convert_loc (loc
, etype
, low
));
4774 if (operand_equal_p (low
, high
, 0))
4775 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4776 fold_convert_loc (loc
, etype
, low
));
4778 if (integer_zerop (low
))
4780 if (! TYPE_UNSIGNED (etype
))
4782 etype
= unsigned_type_for (etype
);
4783 high
= fold_convert_loc (loc
, etype
, high
);
4784 exp
= fold_convert_loc (loc
, etype
, exp
);
4786 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4789 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4790 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4792 int prec
= TYPE_PRECISION (etype
);
4794 if (wi::mask (prec
- 1, false, prec
) == high
)
4796 if (TYPE_UNSIGNED (etype
))
4798 tree signed_etype
= signed_type_for (etype
);
4799 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4801 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4803 etype
= signed_etype
;
4804 exp
= fold_convert_loc (loc
, etype
, exp
);
4806 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4807 build_int_cst (etype
, 0));
4811 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4812 This requires wrap-around arithmetics for the type of the expression.
4813 First make sure that arithmetics in this type is valid, then make sure
4814 that it wraps around. */
4815 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4816 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4817 TYPE_UNSIGNED (etype
));
4819 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4821 tree utype
, minv
, maxv
;
4823 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4824 for the type in question, as we rely on this here. */
4825 utype
= unsigned_type_for (etype
);
4826 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4827 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4828 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4829 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4831 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4838 high
= fold_convert_loc (loc
, etype
, high
);
4839 low
= fold_convert_loc (loc
, etype
, low
);
4840 exp
= fold_convert_loc (loc
, etype
, exp
);
4842 value
= const_binop (MINUS_EXPR
, high
, low
);
4845 if (POINTER_TYPE_P (etype
))
4847 if (value
!= 0 && !TREE_OVERFLOW (value
))
4849 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4850 return build_range_check (loc
, type
,
4851 fold_build_pointer_plus_loc (loc
, exp
, low
),
4852 1, build_int_cst (etype
, 0), value
);
4857 if (value
!= 0 && !TREE_OVERFLOW (value
))
4858 return build_range_check (loc
, type
,
4859 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4860 1, build_int_cst (etype
, 0), value
);
4865 /* Return the predecessor of VAL in its type, handling the infinite case. */
4868 range_predecessor (tree val
)
4870 tree type
= TREE_TYPE (val
);
4872 if (INTEGRAL_TYPE_P (type
)
4873 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4876 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4877 build_int_cst (TREE_TYPE (val
), 1), 0);
4880 /* Return the successor of VAL in its type, handling the infinite case. */
4883 range_successor (tree val
)
4885 tree type
= TREE_TYPE (val
);
4887 if (INTEGRAL_TYPE_P (type
)
4888 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4891 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4892 build_int_cst (TREE_TYPE (val
), 1), 0);
4895 /* Given two ranges, see if we can merge them into one. Return 1 if we
4896 can, 0 if we can't. Set the output range into the specified parameters. */
4899 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4900 tree high0
, int in1_p
, tree low1
, tree high1
)
4908 int lowequal
= ((low0
== 0 && low1
== 0)
4909 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4910 low0
, 0, low1
, 0)));
4911 int highequal
= ((high0
== 0 && high1
== 0)
4912 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4913 high0
, 1, high1
, 1)));
4915 /* Make range 0 be the range that starts first, or ends last if they
4916 start at the same value. Swap them if it isn't. */
4917 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4920 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4921 high1
, 1, high0
, 1))))
4923 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4924 tem
= low0
, low0
= low1
, low1
= tem
;
4925 tem
= high0
, high0
= high1
, high1
= tem
;
4928 /* Now flag two cases, whether the ranges are disjoint or whether the
4929 second range is totally subsumed in the first. Note that the tests
4930 below are simplified by the ones above. */
4931 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4932 high0
, 1, low1
, 0));
4933 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4934 high1
, 1, high0
, 1));
4936 /* We now have four cases, depending on whether we are including or
4937 excluding the two ranges. */
4940 /* If they don't overlap, the result is false. If the second range
4941 is a subset it is the result. Otherwise, the range is from the start
4942 of the second to the end of the first. */
4944 in_p
= 0, low
= high
= 0;
4946 in_p
= 1, low
= low1
, high
= high1
;
4948 in_p
= 1, low
= low1
, high
= high0
;
4951 else if (in0_p
&& ! in1_p
)
4953 /* If they don't overlap, the result is the first range. If they are
4954 equal, the result is false. If the second range is a subset of the
4955 first, and the ranges begin at the same place, we go from just after
4956 the end of the second range to the end of the first. If the second
4957 range is not a subset of the first, or if it is a subset and both
4958 ranges end at the same place, the range starts at the start of the
4959 first range and ends just before the second range.
4960 Otherwise, we can't describe this as a single range. */
4962 in_p
= 1, low
= low0
, high
= high0
;
4963 else if (lowequal
&& highequal
)
4964 in_p
= 0, low
= high
= 0;
4965 else if (subset
&& lowequal
)
4967 low
= range_successor (high1
);
4972 /* We are in the weird situation where high0 > high1 but
4973 high1 has no successor. Punt. */
4977 else if (! subset
|| highequal
)
4980 high
= range_predecessor (low1
);
4984 /* low0 < low1 but low1 has no predecessor. Punt. */
4992 else if (! in0_p
&& in1_p
)
4994 /* If they don't overlap, the result is the second range. If the second
4995 is a subset of the first, the result is false. Otherwise,
4996 the range starts just after the first range and ends at the
4997 end of the second. */
4999 in_p
= 1, low
= low1
, high
= high1
;
5000 else if (subset
|| highequal
)
5001 in_p
= 0, low
= high
= 0;
5004 low
= range_successor (high0
);
5009 /* high1 > high0 but high0 has no successor. Punt. */
5017 /* The case where we are excluding both ranges. Here the complex case
5018 is if they don't overlap. In that case, the only time we have a
5019 range is if they are adjacent. If the second is a subset of the
5020 first, the result is the first. Otherwise, the range to exclude
5021 starts at the beginning of the first range and ends at the end of the
5025 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
5026 range_successor (high0
),
5028 in_p
= 0, low
= low0
, high
= high1
;
5031 /* Canonicalize - [min, x] into - [-, x]. */
5032 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
5033 switch (TREE_CODE (TREE_TYPE (low0
)))
5036 if (TYPE_PRECISION (TREE_TYPE (low0
))
5037 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
5041 if (tree_int_cst_equal (low0
,
5042 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
5046 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
5047 && integer_zerop (low0
))
5054 /* Canonicalize - [x, max] into - [x, -]. */
5055 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
5056 switch (TREE_CODE (TREE_TYPE (high1
)))
5059 if (TYPE_PRECISION (TREE_TYPE (high1
))
5060 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
5064 if (tree_int_cst_equal (high1
,
5065 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
5069 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
5070 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
5072 build_int_cst (TREE_TYPE (high1
), 1),
5080 /* The ranges might be also adjacent between the maximum and
5081 minimum values of the given type. For
5082 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5083 return + [x + 1, y - 1]. */
5084 if (low0
== 0 && high1
== 0)
5086 low
= range_successor (high0
);
5087 high
= range_predecessor (low1
);
5088 if (low
== 0 || high
== 0)
5098 in_p
= 0, low
= low0
, high
= high0
;
5100 in_p
= 0, low
= low0
, high
= high1
;
5103 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
5108 /* Subroutine of fold, looking inside expressions of the form
5109 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5110 of the COND_EXPR. This function is being used also to optimize
5111 A op B ? C : A, by reversing the comparison first.
5113 Return a folded expression whose code is not a COND_EXPR
5114 anymore, or NULL_TREE if no folding opportunity is found. */
5117 fold_cond_expr_with_comparison (location_t loc
, tree type
,
5118 tree arg0
, tree arg1
, tree arg2
)
5120 enum tree_code comp_code
= TREE_CODE (arg0
);
5121 tree arg00
= TREE_OPERAND (arg0
, 0);
5122 tree arg01
= TREE_OPERAND (arg0
, 1);
5123 tree arg1_type
= TREE_TYPE (arg1
);
5129 /* If we have A op 0 ? A : -A, consider applying the following
5132 A == 0? A : -A same as -A
5133 A != 0? A : -A same as A
5134 A >= 0? A : -A same as abs (A)
5135 A > 0? A : -A same as abs (A)
5136 A <= 0? A : -A same as -abs (A)
5137 A < 0? A : -A same as -abs (A)
5139 None of these transformations work for modes with signed
5140 zeros. If A is +/-0, the first two transformations will
5141 change the sign of the result (from +0 to -0, or vice
5142 versa). The last four will fix the sign of the result,
5143 even though the original expressions could be positive or
5144 negative, depending on the sign of A.
5146 Note that all these transformations are correct if A is
5147 NaN, since the two alternatives (A and -A) are also NaNs. */
5148 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5149 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
5150 ? real_zerop (arg01
)
5151 : integer_zerop (arg01
))
5152 && ((TREE_CODE (arg2
) == NEGATE_EXPR
5153 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
5154 /* In the case that A is of the form X-Y, '-A' (arg2) may
5155 have already been folded to Y-X, check for that. */
5156 || (TREE_CODE (arg1
) == MINUS_EXPR
5157 && TREE_CODE (arg2
) == MINUS_EXPR
5158 && operand_equal_p (TREE_OPERAND (arg1
, 0),
5159 TREE_OPERAND (arg2
, 1), 0)
5160 && operand_equal_p (TREE_OPERAND (arg1
, 1),
5161 TREE_OPERAND (arg2
, 0), 0))))
5166 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
5167 return fold_convert_loc (loc
, type
, negate_expr (tem
));
5170 return fold_convert_loc (loc
, type
, arg1
);
5173 if (flag_trapping_math
)
5178 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5180 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5181 return fold_convert_loc (loc
, type
, tem
);
5184 if (flag_trapping_math
)
5189 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
5191 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
5192 return negate_expr (fold_convert_loc (loc
, type
, tem
));
5194 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5198 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5199 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5200 both transformations are correct when A is NaN: A != 0
5201 is then true, and A == 0 is false. */
5203 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5204 && integer_zerop (arg01
) && integer_zerop (arg2
))
5206 if (comp_code
== NE_EXPR
)
5207 return fold_convert_loc (loc
, type
, arg1
);
5208 else if (comp_code
== EQ_EXPR
)
5209 return build_zero_cst (type
);
5212 /* Try some transformations of A op B ? A : B.
5214 A == B? A : B same as B
5215 A != B? A : B same as A
5216 A >= B? A : B same as max (A, B)
5217 A > B? A : B same as max (B, A)
5218 A <= B? A : B same as min (A, B)
5219 A < B? A : B same as min (B, A)
5221 As above, these transformations don't work in the presence
5222 of signed zeros. For example, if A and B are zeros of
5223 opposite sign, the first two transformations will change
5224 the sign of the result. In the last four, the original
5225 expressions give different results for (A=+0, B=-0) and
5226 (A=-0, B=+0), but the transformed expressions do not.
5228 The first two transformations are correct if either A or B
5229 is a NaN. In the first transformation, the condition will
5230 be false, and B will indeed be chosen. In the case of the
5231 second transformation, the condition A != B will be true,
5232 and A will be chosen.
5234 The conversions to max() and min() are not correct if B is
5235 a number and A is not. The conditions in the original
5236 expressions will be false, so all four give B. The min()
5237 and max() versions would give a NaN instead. */
5238 if (!HONOR_SIGNED_ZEROS (element_mode (type
))
5239 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
5240 /* Avoid these transformations if the COND_EXPR may be used
5241 as an lvalue in the C++ front-end. PR c++/19199. */
5243 || VECTOR_TYPE_P (type
)
5244 || (! lang_GNU_CXX ()
5245 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
5246 || ! maybe_lvalue_p (arg1
)
5247 || ! maybe_lvalue_p (arg2
)))
5249 tree comp_op0
= arg00
;
5250 tree comp_op1
= arg01
;
5251 tree comp_type
= TREE_TYPE (comp_op0
);
5256 return fold_convert_loc (loc
, type
, arg2
);
5258 return fold_convert_loc (loc
, type
, arg1
);
5263 /* In C++ a ?: expression can be an lvalue, so put the
5264 operand which will be used if they are equal first
5265 so that we can convert this back to the
5266 corresponding COND_EXPR. */
5267 if (!HONOR_NANS (arg1
))
5269 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5270 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5271 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
5272 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
5273 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
5274 comp_op1
, comp_op0
);
5275 return fold_convert_loc (loc
, type
, tem
);
5282 if (!HONOR_NANS (arg1
))
5284 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
5285 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
5286 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
5287 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
5288 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
5289 comp_op1
, comp_op0
);
5290 return fold_convert_loc (loc
, type
, tem
);
5294 if (!HONOR_NANS (arg1
))
5295 return fold_convert_loc (loc
, type
, arg2
);
5298 if (!HONOR_NANS (arg1
))
5299 return fold_convert_loc (loc
, type
, arg1
);
5302 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
5312 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5313 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5314 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5318 /* EXP is some logical combination of boolean tests. See if we can
5319 merge it into some range test. Return the new tree if so. */
5322 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
5325 int or_op
= (code
== TRUTH_ORIF_EXPR
5326 || code
== TRUTH_OR_EXPR
);
5327 int in0_p
, in1_p
, in_p
;
5328 tree low0
, low1
, low
, high0
, high1
, high
;
5329 bool strict_overflow_p
= false;
5331 const char * const warnmsg
= G_("assuming signed overflow does not occur "
5332 "when simplifying range test");
5334 if (!INTEGRAL_TYPE_P (type
))
5337 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
5338 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
5340 /* If this is an OR operation, invert both sides; we will invert
5341 again at the end. */
5343 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
5345 /* If both expressions are the same, if we can merge the ranges, and we
5346 can build the range test, return it or it inverted. If one of the
5347 ranges is always true or always false, consider it to be the same
5348 expression as the other. */
5349 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
5350 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
5352 && 0 != (tem
= (build_range_check (loc
, type
,
5354 : rhs
!= 0 ? rhs
: integer_zero_node
,
5357 if (strict_overflow_p
)
5358 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
5359 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
5362 /* On machines where the branch cost is expensive, if this is a
5363 short-circuited branch and the underlying object on both sides
5364 is the same, make a non-short-circuit operation. */
5365 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5366 && lhs
!= 0 && rhs
!= 0
5367 && (code
== TRUTH_ANDIF_EXPR
5368 || code
== TRUTH_ORIF_EXPR
)
5369 && operand_equal_p (lhs
, rhs
, 0))
5371 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5372 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5373 which cases we can't do this. */
5374 if (simple_operand_p (lhs
))
5375 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5376 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5379 else if (!lang_hooks
.decls
.global_bindings_p ()
5380 && !CONTAINS_PLACEHOLDER_P (lhs
))
5382 tree common
= save_expr (lhs
);
5384 if (0 != (lhs
= build_range_check (loc
, type
, common
,
5385 or_op
? ! in0_p
: in0_p
,
5387 && (0 != (rhs
= build_range_check (loc
, type
, common
,
5388 or_op
? ! in1_p
: in1_p
,
5391 if (strict_overflow_p
)
5392 fold_overflow_warning (warnmsg
,
5393 WARN_STRICT_OVERFLOW_COMPARISON
);
5394 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
5395 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
5404 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5405 bit value. Arrange things so the extra bits will be set to zero if and
5406 only if C is signed-extended to its full width. If MASK is nonzero,
5407 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5410 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5412 tree type
= TREE_TYPE (c
);
5413 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5416 if (p
== modesize
|| unsignedp
)
5419 /* We work by getting just the sign bit into the low-order bit, then
5420 into the high-order bit, then sign-extend. We then XOR that value
5422 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
5424 /* We must use a signed type in order to get an arithmetic right shift.
5425 However, we must also avoid introducing accidental overflows, so that
5426 a subsequent call to integer_zerop will work. Hence we must
5427 do the type conversion here. At this point, the constant is either
5428 zero or one, and the conversion to a signed type can never overflow.
5429 We could get an overflow if this conversion is done anywhere else. */
5430 if (TYPE_UNSIGNED (type
))
5431 temp
= fold_convert (signed_type_for (type
), temp
);
5433 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5434 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5436 temp
= const_binop (BIT_AND_EXPR
, temp
,
5437 fold_convert (TREE_TYPE (c
), mask
));
5438 /* If necessary, convert the type back to match the type of C. */
5439 if (TYPE_UNSIGNED (type
))
5440 temp
= fold_convert (type
, temp
);
5442 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5445 /* For an expression that has the form
5449 we can drop one of the inner expressions and simplify to
5453 LOC is the location of the resulting expression. OP is the inner
5454 logical operation; the left-hand side in the examples above, while CMPOP
5455 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5456 removing a condition that guards another, as in
5457 (A != NULL && A->...) || A == NULL
5458 which we must not transform. If RHS_ONLY is true, only eliminate the
5459 right-most operand of the inner logical operation. */
5462 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5465 tree type
= TREE_TYPE (cmpop
);
5466 enum tree_code code
= TREE_CODE (cmpop
);
5467 enum tree_code truthop_code
= TREE_CODE (op
);
5468 tree lhs
= TREE_OPERAND (op
, 0);
5469 tree rhs
= TREE_OPERAND (op
, 1);
5470 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5471 enum tree_code rhs_code
= TREE_CODE (rhs
);
5472 enum tree_code lhs_code
= TREE_CODE (lhs
);
5473 enum tree_code inv_code
;
5475 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5478 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5481 if (rhs_code
== truthop_code
)
5483 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5484 if (newrhs
!= NULL_TREE
)
5487 rhs_code
= TREE_CODE (rhs
);
5490 if (lhs_code
== truthop_code
&& !rhs_only
)
5492 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5493 if (newlhs
!= NULL_TREE
)
5496 lhs_code
= TREE_CODE (lhs
);
5500 inv_code
= invert_tree_comparison (code
, HONOR_NANS (type
));
5501 if (inv_code
== rhs_code
5502 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5503 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5505 if (!rhs_only
&& inv_code
== lhs_code
5506 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5507 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5509 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5510 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5515 /* Find ways of folding logical expressions of LHS and RHS:
5516 Try to merge two comparisons to the same innermost item.
5517 Look for range tests like "ch >= '0' && ch <= '9'".
5518 Look for combinations of simple terms on machines with expensive branches
5519 and evaluate the RHS unconditionally.
5521 For example, if we have p->a == 2 && p->b == 4 and we can make an
5522 object large enough to span both A and B, we can do this with a comparison
5523 against the object ANDed with the a mask.
5525 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5526 operations to do this with one comparison.
5528 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5529 function and the one above.
5531 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5532 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5534 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5537 We return the simplified tree or 0 if no optimization is possible. */
5540 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5543 /* If this is the "or" of two comparisons, we can do something if
5544 the comparisons are NE_EXPR. If this is the "and", we can do something
5545 if the comparisons are EQ_EXPR. I.e.,
5546 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5548 WANTED_CODE is this operation code. For single bit fields, we can
5549 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5550 comparison for one-bit fields. */
5552 enum tree_code wanted_code
;
5553 enum tree_code lcode
, rcode
;
5554 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5555 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5556 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5557 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5558 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5559 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5560 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5561 int ll_reversep
, lr_reversep
, rl_reversep
, rr_reversep
;
5562 machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5563 machine_mode lnmode
, rnmode
;
5564 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5565 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5566 tree l_const
, r_const
;
5567 tree lntype
, rntype
, result
;
5568 HOST_WIDE_INT first_bit
, end_bit
;
5571 /* Start by getting the comparison codes. Fail if anything is volatile.
5572 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5573 it were surrounded with a NE_EXPR. */
5575 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5578 lcode
= TREE_CODE (lhs
);
5579 rcode
= TREE_CODE (rhs
);
5581 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5583 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5584 build_int_cst (TREE_TYPE (lhs
), 0));
5588 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5590 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5591 build_int_cst (TREE_TYPE (rhs
), 0));
5595 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5596 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5599 ll_arg
= TREE_OPERAND (lhs
, 0);
5600 lr_arg
= TREE_OPERAND (lhs
, 1);
5601 rl_arg
= TREE_OPERAND (rhs
, 0);
5602 rr_arg
= TREE_OPERAND (rhs
, 1);
5604 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5605 if (simple_operand_p (ll_arg
)
5606 && simple_operand_p (lr_arg
))
5608 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5609 && operand_equal_p (lr_arg
, rr_arg
, 0))
5611 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5612 truth_type
, ll_arg
, lr_arg
);
5616 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5617 && operand_equal_p (lr_arg
, rl_arg
, 0))
5619 result
= combine_comparisons (loc
, code
, lcode
,
5620 swap_tree_comparison (rcode
),
5621 truth_type
, ll_arg
, lr_arg
);
5627 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5628 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5630 /* If the RHS can be evaluated unconditionally and its operands are
5631 simple, it wins to evaluate the RHS unconditionally on machines
5632 with expensive branches. In this case, this isn't a comparison
5633 that can be merged. */
5635 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5637 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5638 && simple_operand_p (rl_arg
)
5639 && simple_operand_p (rr_arg
))
5641 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5642 if (code
== TRUTH_OR_EXPR
5643 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5644 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5645 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5646 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5647 return build2_loc (loc
, NE_EXPR
, truth_type
,
5648 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5650 build_int_cst (TREE_TYPE (ll_arg
), 0));
5652 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5653 if (code
== TRUTH_AND_EXPR
5654 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5655 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5656 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5657 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5658 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5659 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5661 build_int_cst (TREE_TYPE (ll_arg
), 0));
5664 /* See if the comparisons can be merged. Then get all the parameters for
5667 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5668 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5671 ll_reversep
= lr_reversep
= rl_reversep
= rr_reversep
= 0;
5673 ll_inner
= decode_field_reference (loc
, &ll_arg
,
5674 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5675 &ll_unsignedp
, &ll_reversep
, &volatilep
,
5676 &ll_mask
, &ll_and_mask
);
5677 lr_inner
= decode_field_reference (loc
, &lr_arg
,
5678 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5679 &lr_unsignedp
, &lr_reversep
, &volatilep
,
5680 &lr_mask
, &lr_and_mask
);
5681 rl_inner
= decode_field_reference (loc
, &rl_arg
,
5682 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5683 &rl_unsignedp
, &rl_reversep
, &volatilep
,
5684 &rl_mask
, &rl_and_mask
);
5685 rr_inner
= decode_field_reference (loc
, &rr_arg
,
5686 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5687 &rr_unsignedp
, &rr_reversep
, &volatilep
,
5688 &rr_mask
, &rr_and_mask
);
5690 /* It must be true that the inner operation on the lhs of each
5691 comparison must be the same if we are to be able to do anything.
5692 Then see if we have constants. If not, the same must be true for
5695 || ll_reversep
!= rl_reversep
5696 || ll_inner
== 0 || rl_inner
== 0
5697 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5700 if (TREE_CODE (lr_arg
) == INTEGER_CST
5701 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5703 l_const
= lr_arg
, r_const
= rr_arg
;
5704 lr_reversep
= ll_reversep
;
5706 else if (lr_reversep
!= rr_reversep
5707 || lr_inner
== 0 || rr_inner
== 0
5708 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5711 l_const
= r_const
= 0;
5713 /* If either comparison code is not correct for our logical operation,
5714 fail. However, we can convert a one-bit comparison against zero into
5715 the opposite comparison against that bit being set in the field. */
5717 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5718 if (lcode
!= wanted_code
)
5720 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5722 /* Make the left operand unsigned, since we are only interested
5723 in the value of one bit. Otherwise we are doing the wrong
5732 /* This is analogous to the code for l_const above. */
5733 if (rcode
!= wanted_code
)
5735 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5744 /* See if we can find a mode that contains both fields being compared on
5745 the left. If we can't, fail. Otherwise, update all constants and masks
5746 to be relative to a field of that size. */
5747 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5748 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5749 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5750 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5752 if (lnmode
== VOIDmode
)
5755 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5756 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5757 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5758 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5760 if (ll_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
5762 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5763 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5766 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5767 size_int (xll_bitpos
));
5768 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5769 size_int (xrl_bitpos
));
5773 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5774 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5775 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5776 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5777 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5780 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5782 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5787 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5788 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5789 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5790 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5791 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5794 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5796 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5800 /* If the right sides are not constant, do the same for it. Also,
5801 disallow this optimization if a size or signedness mismatch occurs
5802 between the left and right sides. */
5805 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5806 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5807 /* Make sure the two fields on the right
5808 correspond to the left without being swapped. */
5809 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5812 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5813 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5814 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5815 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5817 if (rnmode
== VOIDmode
)
5820 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5821 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5822 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5823 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5825 if (lr_reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
5827 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5828 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5831 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5833 size_int (xlr_bitpos
));
5834 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5836 size_int (xrr_bitpos
));
5838 /* Make a mask that corresponds to both fields being compared.
5839 Do this for both items being compared. If the operands are the
5840 same size and the bits being compared are in the same position
5841 then we can do this by masking both and comparing the masked
5843 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5844 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5845 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5847 lhs
= make_bit_field_ref (loc
, ll_inner
, ll_arg
,
5848 lntype
, lnbitsize
, lnbitpos
,
5849 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
5850 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5851 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5853 rhs
= make_bit_field_ref (loc
, lr_inner
, lr_arg
,
5854 rntype
, rnbitsize
, rnbitpos
,
5855 lr_unsignedp
|| rr_unsignedp
, lr_reversep
);
5856 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5857 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5859 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5862 /* There is still another way we can do something: If both pairs of
5863 fields being compared are adjacent, we may be able to make a wider
5864 field containing them both.
5866 Note that we still must mask the lhs/rhs expressions. Furthermore,
5867 the mask must be shifted to account for the shift done by
5868 make_bit_field_ref. */
5869 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5870 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5871 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5872 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5876 lhs
= make_bit_field_ref (loc
, ll_inner
, ll_arg
, lntype
,
5877 ll_bitsize
+ rl_bitsize
,
5878 MIN (ll_bitpos
, rl_bitpos
),
5879 ll_unsignedp
, ll_reversep
);
5880 rhs
= make_bit_field_ref (loc
, lr_inner
, lr_arg
, rntype
,
5881 lr_bitsize
+ rr_bitsize
,
5882 MIN (lr_bitpos
, rr_bitpos
),
5883 lr_unsignedp
, lr_reversep
);
5885 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5886 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5887 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5888 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5890 /* Convert to the smaller type before masking out unwanted bits. */
5892 if (lntype
!= rntype
)
5894 if (lnbitsize
> rnbitsize
)
5896 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5897 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5900 else if (lnbitsize
< rnbitsize
)
5902 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5903 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5908 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5909 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5911 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5912 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5914 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5920 /* Handle the case of comparisons with constants. If there is something in
5921 common between the masks, those bits of the constants must be the same.
5922 If not, the condition is always false. Test for this to avoid generating
5923 incorrect code below. */
5924 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5925 if (! integer_zerop (result
)
5926 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5927 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5929 if (wanted_code
== NE_EXPR
)
5931 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5932 return constant_boolean_node (true, truth_type
);
5936 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5937 return constant_boolean_node (false, truth_type
);
5941 /* Construct the expression we will return. First get the component
5942 reference we will make. Unless the mask is all ones the width of
5943 that field, perform the mask operation. Then compare with the
5945 result
= make_bit_field_ref (loc
, ll_inner
, ll_arg
,
5946 lntype
, lnbitsize
, lnbitpos
,
5947 ll_unsignedp
|| rl_unsignedp
, ll_reversep
);
5949 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5950 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5951 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5953 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5954 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5957 /* T is an integer expression that is being multiplied, divided, or taken a
5958 modulus (CODE says which and what kind of divide or modulus) by a
5959 constant C. See if we can eliminate that operation by folding it with
5960 other operations already in T. WIDE_TYPE, if non-null, is a type that
5961 should be used for the computation if wider than our type.
5963 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5964 (X * 2) + (Y * 4). We must, however, be assured that either the original
5965 expression would not overflow or that overflow is undefined for the type
5966 in the language in question.
5968 If we return a non-null expression, it is an equivalent form of the
5969 original computation, but need not be in the original type.
5971 We set *STRICT_OVERFLOW_P to true if the return values depends on
5972 signed overflow being undefined. Otherwise we do not change
5973 *STRICT_OVERFLOW_P. */
5976 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5977 bool *strict_overflow_p
)
5979 /* To avoid exponential search depth, refuse to allow recursion past
5980 three levels. Beyond that (1) it's highly unlikely that we'll find
5981 something interesting and (2) we've probably processed it before
5982 when we built the inner expression. */
5991 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5998 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5999 bool *strict_overflow_p
)
6001 tree type
= TREE_TYPE (t
);
6002 enum tree_code tcode
= TREE_CODE (t
);
6003 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
6004 > GET_MODE_SIZE (TYPE_MODE (type
)))
6005 ? wide_type
: type
);
6007 int same_p
= tcode
== code
;
6008 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
6009 bool sub_strict_overflow_p
;
6011 /* Don't deal with constants of zero here; they confuse the code below. */
6012 if (integer_zerop (c
))
6015 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
6016 op0
= TREE_OPERAND (t
, 0);
6018 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
6019 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
6021 /* Note that we need not handle conditional operations here since fold
6022 already handles those cases. So just do arithmetic here. */
6026 /* For a constant, we can always simplify if we are a multiply
6027 or (for divide and modulus) if it is a multiple of our constant. */
6028 if (code
== MULT_EXPR
6029 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
6031 tree tem
= const_binop (code
, fold_convert (ctype
, t
),
6032 fold_convert (ctype
, c
));
6033 /* If the multiplication overflowed, we lost information on it.
6034 See PR68142 and PR69845. */
6035 if (TREE_OVERFLOW (tem
))
6041 CASE_CONVERT
: case NON_LVALUE_EXPR
:
6042 /* If op0 is an expression ... */
6043 if ((COMPARISON_CLASS_P (op0
)
6044 || UNARY_CLASS_P (op0
)
6045 || BINARY_CLASS_P (op0
)
6046 || VL_EXP_CLASS_P (op0
)
6047 || EXPRESSION_CLASS_P (op0
))
6048 /* ... and has wrapping overflow, and its type is smaller
6049 than ctype, then we cannot pass through as widening. */
6050 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
6051 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
)))
6052 && (TYPE_PRECISION (ctype
)
6053 > TYPE_PRECISION (TREE_TYPE (op0
))))
6054 /* ... or this is a truncation (t is narrower than op0),
6055 then we cannot pass through this narrowing. */
6056 || (TYPE_PRECISION (type
)
6057 < TYPE_PRECISION (TREE_TYPE (op0
)))
6058 /* ... or signedness changes for division or modulus,
6059 then we cannot pass through this conversion. */
6060 || (code
!= MULT_EXPR
6061 && (TYPE_UNSIGNED (ctype
)
6062 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
6063 /* ... or has undefined overflow while the converted to
6064 type has not, we cannot do the operation in the inner type
6065 as that would introduce undefined overflow. */
6066 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0
))
6067 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
)))
6068 && !TYPE_OVERFLOW_UNDEFINED (type
))))
6071 /* Pass the constant down and see if we can make a simplification. If
6072 we can, replace this expression with the inner simplification for
6073 possible later conversion to our or some other type. */
6074 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
6075 && TREE_CODE (t2
) == INTEGER_CST
6076 && !TREE_OVERFLOW (t2
)
6077 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
6079 ? ctype
: NULL_TREE
,
6080 strict_overflow_p
))))
6085 /* If widening the type changes it from signed to unsigned, then we
6086 must avoid building ABS_EXPR itself as unsigned. */
6087 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
6089 tree cstype
= (*signed_type_for
) (ctype
);
6090 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
6093 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
6094 return fold_convert (ctype
, t1
);
6098 /* If the constant is negative, we cannot simplify this. */
6099 if (tree_int_cst_sgn (c
) == -1)
6103 /* For division and modulus, type can't be unsigned, as e.g.
6104 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6105 For signed types, even with wrapping overflow, this is fine. */
6106 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
6108 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
6110 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
6113 case MIN_EXPR
: case MAX_EXPR
:
6114 /* If widening the type changes the signedness, then we can't perform
6115 this optimization as that changes the result. */
6116 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
6119 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6120 sub_strict_overflow_p
= false;
6121 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6122 &sub_strict_overflow_p
)) != 0
6123 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
6124 &sub_strict_overflow_p
)) != 0)
6126 if (tree_int_cst_sgn (c
) < 0)
6127 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
6128 if (sub_strict_overflow_p
)
6129 *strict_overflow_p
= true;
6130 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6131 fold_convert (ctype
, t2
));
6135 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
6136 /* If the second operand is constant, this is a multiplication
6137 or floor division, by a power of two, so we can treat it that
6138 way unless the multiplier or divisor overflows. Signed
6139 left-shift overflow is implementation-defined rather than
6140 undefined in C90, so do not convert signed left shift into
6142 if (TREE_CODE (op1
) == INTEGER_CST
6143 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
6144 /* const_binop may not detect overflow correctly,
6145 so check for it explicitly here. */
6146 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
6147 && 0 != (t1
= fold_convert (ctype
,
6148 const_binop (LSHIFT_EXPR
,
6151 && !TREE_OVERFLOW (t1
))
6152 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
6153 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
6155 fold_convert (ctype
, op0
),
6157 c
, code
, wide_type
, strict_overflow_p
);
6160 case PLUS_EXPR
: case MINUS_EXPR
:
6161 /* See if we can eliminate the operation on both sides. If we can, we
6162 can return a new PLUS or MINUS. If we can't, the only remaining
6163 cases where we can do anything are if the second operand is a
6165 sub_strict_overflow_p
= false;
6166 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6167 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
6168 if (t1
!= 0 && t2
!= 0
6169 && (code
== MULT_EXPR
6170 /* If not multiplication, we can only do this if both operands
6171 are divisible by c. */
6172 || (multiple_of_p (ctype
, op0
, c
)
6173 && multiple_of_p (ctype
, op1
, c
))))
6175 if (sub_strict_overflow_p
)
6176 *strict_overflow_p
= true;
6177 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6178 fold_convert (ctype
, t2
));
6181 /* If this was a subtraction, negate OP1 and set it to be an addition.
6182 This simplifies the logic below. */
6183 if (tcode
== MINUS_EXPR
)
6185 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
6186 /* If OP1 was not easily negatable, the constant may be OP0. */
6187 if (TREE_CODE (op0
) == INTEGER_CST
)
6189 std::swap (op0
, op1
);
6194 if (TREE_CODE (op1
) != INTEGER_CST
)
6197 /* If either OP1 or C are negative, this optimization is not safe for
6198 some of the division and remainder types while for others we need
6199 to change the code. */
6200 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
6202 if (code
== CEIL_DIV_EXPR
)
6203 code
= FLOOR_DIV_EXPR
;
6204 else if (code
== FLOOR_DIV_EXPR
)
6205 code
= CEIL_DIV_EXPR
;
6206 else if (code
!= MULT_EXPR
6207 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
6211 /* If it's a multiply or a division/modulus operation of a multiple
6212 of our constant, do the operation and verify it doesn't overflow. */
6213 if (code
== MULT_EXPR
6214 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6216 op1
= const_binop (code
, fold_convert (ctype
, op1
),
6217 fold_convert (ctype
, c
));
6218 /* We allow the constant to overflow with wrapping semantics. */
6220 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
6226 /* If we have an unsigned type, we cannot widen the operation since it
6227 will change the result if the original computation overflowed. */
6228 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
6231 /* If we were able to eliminate our operation from the first side,
6232 apply our operation to the second side and reform the PLUS. */
6233 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
6234 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
6236 /* The last case is if we are a multiply. In that case, we can
6237 apply the distributive law to commute the multiply and addition
6238 if the multiplication of the constants doesn't overflow
6239 and overflow is defined. With undefined overflow
6240 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6241 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
6242 return fold_build2 (tcode
, ctype
,
6243 fold_build2 (code
, ctype
,
6244 fold_convert (ctype
, op0
),
6245 fold_convert (ctype
, c
)),
6251 /* We have a special case here if we are doing something like
6252 (C * 8) % 4 since we know that's zero. */
6253 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
6254 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
6255 /* If the multiplication can overflow we cannot optimize this. */
6256 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
6257 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
6258 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6260 *strict_overflow_p
= true;
6261 return omit_one_operand (type
, integer_zero_node
, op0
);
6264 /* ... fall through ... */
6266 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
6267 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
6268 /* If we can extract our operation from the LHS, do so and return a
6269 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6270 do something only if the second operand is a constant. */
6272 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
6273 strict_overflow_p
)) != 0)
6274 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
6275 fold_convert (ctype
, op1
));
6276 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
6277 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
6278 strict_overflow_p
)) != 0)
6279 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6280 fold_convert (ctype
, t1
));
6281 else if (TREE_CODE (op1
) != INTEGER_CST
)
6284 /* If these are the same operation types, we can associate them
6285 assuming no overflow. */
6288 bool overflow_p
= false;
6289 bool overflow_mul_p
;
6290 signop sign
= TYPE_SIGN (ctype
);
6291 unsigned prec
= TYPE_PRECISION (ctype
);
6292 wide_int mul
= wi::mul (wi::to_wide (op1
, prec
),
6293 wi::to_wide (c
, prec
),
6294 sign
, &overflow_mul_p
);
6295 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
6297 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
6300 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6301 wide_int_to_tree (ctype
, mul
));
6304 /* If these operations "cancel" each other, we have the main
6305 optimizations of this pass, which occur when either constant is a
6306 multiple of the other, in which case we replace this with either an
6307 operation or CODE or TCODE.
6309 If we have an unsigned type, we cannot do this since it will change
6310 the result if the original computation overflowed. */
6311 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
6312 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
6313 || (tcode
== MULT_EXPR
6314 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
6315 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
6316 && code
!= MULT_EXPR
)))
6318 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
6320 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6321 *strict_overflow_p
= true;
6322 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
6323 fold_convert (ctype
,
6324 const_binop (TRUNC_DIV_EXPR
,
6327 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
6329 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6330 *strict_overflow_p
= true;
6331 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6332 fold_convert (ctype
,
6333 const_binop (TRUNC_DIV_EXPR
,
6346 /* Return a node which has the indicated constant VALUE (either 0 or
6347 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6348 and is of the indicated TYPE. */
6351 constant_boolean_node (bool value
, tree type
)
6353 if (type
== integer_type_node
)
6354 return value
? integer_one_node
: integer_zero_node
;
6355 else if (type
== boolean_type_node
)
6356 return value
? boolean_true_node
: boolean_false_node
;
6357 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6358 return build_vector_from_val (type
,
6359 build_int_cst (TREE_TYPE (type
),
6362 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6366 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6367 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6368 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6369 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6370 COND is the first argument to CODE; otherwise (as in the example
6371 given here), it is the second argument. TYPE is the type of the
6372 original expression. Return NULL_TREE if no simplification is
6376 fold_binary_op_with_conditional_arg (location_t loc
,
6377 enum tree_code code
,
6378 tree type
, tree op0
, tree op1
,
6379 tree cond
, tree arg
, int cond_first_p
)
6381 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6382 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6383 tree test
, true_value
, false_value
;
6384 tree lhs
= NULL_TREE
;
6385 tree rhs
= NULL_TREE
;
6386 enum tree_code cond_code
= COND_EXPR
;
6388 if (TREE_CODE (cond
) == COND_EXPR
6389 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6391 test
= TREE_OPERAND (cond
, 0);
6392 true_value
= TREE_OPERAND (cond
, 1);
6393 false_value
= TREE_OPERAND (cond
, 2);
6394 /* If this operand throws an expression, then it does not make
6395 sense to try to perform a logical or arithmetic operation
6397 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6399 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6402 else if (!(TREE_CODE (type
) != VECTOR_TYPE
6403 && TREE_CODE (TREE_TYPE (cond
)) == VECTOR_TYPE
))
6405 tree testtype
= TREE_TYPE (cond
);
6407 true_value
= constant_boolean_node (true, testtype
);
6408 false_value
= constant_boolean_node (false, testtype
);
6411 /* Detect the case of mixing vector and scalar types - bail out. */
6414 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6415 cond_code
= VEC_COND_EXPR
;
6417 /* This transformation is only worthwhile if we don't have to wrap ARG
6418 in a SAVE_EXPR and the operation can be simplified without recursing
6419 on at least one of the branches once its pushed inside the COND_EXPR. */
6420 if (!TREE_CONSTANT (arg
)
6421 && (TREE_SIDE_EFFECTS (arg
)
6422 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6423 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6426 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6429 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6431 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6433 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6437 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6439 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6441 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6444 /* Check that we have simplified at least one of the branches. */
6445 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6448 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6452 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6454 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6455 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6456 ADDEND is the same as X.
6458 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6459 and finite. The problematic cases are when X is zero, and its mode
6460 has signed zeros. In the case of rounding towards -infinity,
6461 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6462 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6465 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6467 if (!real_zerop (addend
))
6470 /* Don't allow the fold with -fsignaling-nans. */
6471 if (HONOR_SNANS (element_mode (type
)))
6474 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6475 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
6478 /* In a vector or complex, we would need to check the sign of all zeros. */
6479 if (TREE_CODE (addend
) != REAL_CST
)
6482 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6483 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6486 /* The mode has signed zeros, and we have to honor their sign.
6487 In this situation, there is only one case we can return true for.
6488 X - 0 is the same as X unless rounding towards -infinity is
6490 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type
));
6493 /* Subroutine of fold() that optimizes comparisons of a division by
6494 a nonzero integer constant against an integer constant, i.e.
6497 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6498 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6499 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6501 The function returns the constant folded tree if a simplification
6502 can be made, and NULL_TREE otherwise. */
6505 fold_div_compare (location_t loc
,
6506 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6508 tree prod
, tmp
, hi
, lo
;
6509 tree arg00
= TREE_OPERAND (arg0
, 0);
6510 tree arg01
= TREE_OPERAND (arg0
, 1);
6511 signop sign
= TYPE_SIGN (TREE_TYPE (arg0
));
6512 bool neg_overflow
= false;
6515 /* We have to do this the hard way to detect unsigned overflow.
6516 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6517 wide_int val
= wi::mul (arg01
, arg1
, sign
, &overflow
);
6518 prod
= force_fit_type (TREE_TYPE (arg00
), val
, -1, overflow
);
6519 neg_overflow
= false;
6521 if (sign
== UNSIGNED
)
6523 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6524 build_int_cst (TREE_TYPE (arg01
), 1));
6527 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6528 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6529 hi
= force_fit_type (TREE_TYPE (arg00
), val
,
6530 -1, overflow
| TREE_OVERFLOW (prod
));
6532 else if (tree_int_cst_sgn (arg01
) >= 0)
6534 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6535 build_int_cst (TREE_TYPE (arg01
), 1));
6536 switch (tree_int_cst_sgn (arg1
))
6539 neg_overflow
= true;
6540 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6545 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6550 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6560 /* A negative divisor reverses the relational operators. */
6561 code
= swap_tree_comparison (code
);
6563 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6564 build_int_cst (TREE_TYPE (arg01
), 1));
6565 switch (tree_int_cst_sgn (arg1
))
6568 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6573 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6578 neg_overflow
= true;
6579 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6591 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6592 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6593 if (TREE_OVERFLOW (hi
))
6594 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6595 if (TREE_OVERFLOW (lo
))
6596 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6597 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6600 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6601 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6602 if (TREE_OVERFLOW (hi
))
6603 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6604 if (TREE_OVERFLOW (lo
))
6605 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6606 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6609 if (TREE_OVERFLOW (lo
))
6611 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6612 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6614 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6617 if (TREE_OVERFLOW (hi
))
6619 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6620 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6622 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6625 if (TREE_OVERFLOW (hi
))
6627 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6628 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6630 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6633 if (TREE_OVERFLOW (lo
))
6635 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6636 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6638 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6648 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6649 equality/inequality test, then return a simplified form of the test
6650 using a sign testing. Otherwise return NULL. TYPE is the desired
6654 fold_single_bit_test_into_sign_test (location_t loc
,
6655 enum tree_code code
, tree arg0
, tree arg1
,
6658 /* If this is testing a single bit, we can optimize the test. */
6659 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6660 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6661 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6663 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6664 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6665 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6667 if (arg00
!= NULL_TREE
6668 /* This is only a win if casting to a signed type is cheap,
6669 i.e. when arg00's type is not a partial mode. */
6670 && TYPE_PRECISION (TREE_TYPE (arg00
))
6671 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6673 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6674 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6676 fold_convert_loc (loc
, stype
, arg00
),
6677 build_int_cst (stype
, 0));
6684 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6685 equality/inequality test, then return a simplified form of
6686 the test using shifts and logical operations. Otherwise return
6687 NULL. TYPE is the desired result type. */
6690 fold_single_bit_test (location_t loc
, enum tree_code code
,
6691 tree arg0
, tree arg1
, tree result_type
)
6693 /* If this is testing a single bit, we can optimize the test. */
6694 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6695 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6696 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6698 tree inner
= TREE_OPERAND (arg0
, 0);
6699 tree type
= TREE_TYPE (arg0
);
6700 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6701 machine_mode operand_mode
= TYPE_MODE (type
);
6703 tree signed_type
, unsigned_type
, intermediate_type
;
6706 /* First, see if we can fold the single bit test into a sign-bit
6708 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6713 /* Otherwise we have (A & C) != 0 where C is a single bit,
6714 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6715 Similarly for (A & C) == 0. */
6717 /* If INNER is a right shift of a constant and it plus BITNUM does
6718 not overflow, adjust BITNUM and INNER. */
6719 if (TREE_CODE (inner
) == RSHIFT_EXPR
6720 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6721 && bitnum
< TYPE_PRECISION (type
)
6722 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6723 TYPE_PRECISION (type
) - bitnum
))
6725 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6726 inner
= TREE_OPERAND (inner
, 0);
6729 /* If we are going to be able to omit the AND below, we must do our
6730 operations as unsigned. If we must use the AND, we have a choice.
6731 Normally unsigned is faster, but for some machines signed is. */
6732 ops_unsigned
= (load_extend_op (operand_mode
) == SIGN_EXTEND
6733 && !flag_syntax_only
) ? 0 : 1;
6735 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6736 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6737 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6738 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6741 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6742 inner
, size_int (bitnum
));
6744 one
= build_int_cst (intermediate_type
, 1);
6746 if (code
== EQ_EXPR
)
6747 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6749 /* Put the AND last so it can combine with more things. */
6750 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6752 /* Make sure to return the proper type. */
6753 inner
= fold_convert_loc (loc
, result_type
, inner
);
6760 /* Test whether it is preferable two swap two operands, ARG0 and
6761 ARG1, for example because ARG0 is an integer constant and ARG1
6765 tree_swap_operands_p (const_tree arg0
, const_tree arg1
)
6767 if (CONSTANT_CLASS_P (arg1
))
6769 if (CONSTANT_CLASS_P (arg0
))
6775 if (TREE_CONSTANT (arg1
))
6777 if (TREE_CONSTANT (arg0
))
6780 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6781 for commutative and comparison operators. Ensuring a canonical
6782 form allows the optimizers to find additional redundancies without
6783 having to explicitly check for both orderings. */
6784 if (TREE_CODE (arg0
) == SSA_NAME
6785 && TREE_CODE (arg1
) == SSA_NAME
6786 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6789 /* Put SSA_NAMEs last. */
6790 if (TREE_CODE (arg1
) == SSA_NAME
)
6792 if (TREE_CODE (arg0
) == SSA_NAME
)
6795 /* Put variables last. */
6805 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6806 means A >= Y && A != MAX, but in this case we know that
6807 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6810 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6812 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6814 if (TREE_CODE (bound
) == LT_EXPR
)
6815 a
= TREE_OPERAND (bound
, 0);
6816 else if (TREE_CODE (bound
) == GT_EXPR
)
6817 a
= TREE_OPERAND (bound
, 1);
6821 typea
= TREE_TYPE (a
);
6822 if (!INTEGRAL_TYPE_P (typea
)
6823 && !POINTER_TYPE_P (typea
))
6826 if (TREE_CODE (ineq
) == LT_EXPR
)
6828 a1
= TREE_OPERAND (ineq
, 1);
6829 y
= TREE_OPERAND (ineq
, 0);
6831 else if (TREE_CODE (ineq
) == GT_EXPR
)
6833 a1
= TREE_OPERAND (ineq
, 0);
6834 y
= TREE_OPERAND (ineq
, 1);
6839 if (TREE_TYPE (a1
) != typea
)
6842 if (POINTER_TYPE_P (typea
))
6844 /* Convert the pointer types into integer before taking the difference. */
6845 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
6846 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
6847 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
6850 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
6852 if (!diff
|| !integer_onep (diff
))
6855 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
6858 /* Fold a sum or difference of at least one multiplication.
6859 Returns the folded tree or NULL if no simplification could be made. */
6862 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
6863 tree arg0
, tree arg1
)
6865 tree arg00
, arg01
, arg10
, arg11
;
6866 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6868 /* (A * C) +- (B * C) -> (A+-B) * C.
6869 (A * C) +- A -> A * (C+-1).
6870 We are most concerned about the case where C is a constant,
6871 but other combinations show up during loop reduction. Since
6872 it is not difficult, try all four possibilities. */
6874 if (TREE_CODE (arg0
) == MULT_EXPR
)
6876 arg00
= TREE_OPERAND (arg0
, 0);
6877 arg01
= TREE_OPERAND (arg0
, 1);
6879 else if (TREE_CODE (arg0
) == INTEGER_CST
)
6881 arg00
= build_one_cst (type
);
6886 /* We cannot generate constant 1 for fract. */
6887 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6890 arg01
= build_one_cst (type
);
6892 if (TREE_CODE (arg1
) == MULT_EXPR
)
6894 arg10
= TREE_OPERAND (arg1
, 0);
6895 arg11
= TREE_OPERAND (arg1
, 1);
6897 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6899 arg10
= build_one_cst (type
);
6900 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6901 the purpose of this canonicalization. */
6902 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
6903 && negate_expr_p (arg1
)
6904 && code
== PLUS_EXPR
)
6906 arg11
= negate_expr (arg1
);
6914 /* We cannot generate constant 1 for fract. */
6915 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6918 arg11
= build_one_cst (type
);
6922 if (operand_equal_p (arg01
, arg11
, 0))
6923 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6924 else if (operand_equal_p (arg00
, arg10
, 0))
6925 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6926 else if (operand_equal_p (arg00
, arg11
, 0))
6927 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6928 else if (operand_equal_p (arg01
, arg10
, 0))
6929 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6931 /* No identical multiplicands; see if we can find a common
6932 power-of-two factor in non-power-of-two multiplies. This
6933 can help in multi-dimensional array access. */
6934 else if (tree_fits_shwi_p (arg01
)
6935 && tree_fits_shwi_p (arg11
))
6937 HOST_WIDE_INT int01
, int11
, tmp
;
6940 int01
= tree_to_shwi (arg01
);
6941 int11
= tree_to_shwi (arg11
);
6943 /* Move min of absolute values to int11. */
6944 if (absu_hwi (int01
) < absu_hwi (int11
))
6946 tmp
= int01
, int01
= int11
, int11
= tmp
;
6947 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
6954 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
6955 /* The remainder should not be a constant, otherwise we
6956 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6957 increased the number of multiplications necessary. */
6958 && TREE_CODE (arg10
) != INTEGER_CST
)
6960 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
6961 build_int_cst (TREE_TYPE (arg00
),
6966 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
6971 return fold_build2_loc (loc
, MULT_EXPR
, type
,
6972 fold_build2_loc (loc
, code
, type
,
6973 fold_convert_loc (loc
, type
, alt0
),
6974 fold_convert_loc (loc
, type
, alt1
)),
6975 fold_convert_loc (loc
, type
, same
));
6980 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6981 specified by EXPR into the buffer PTR of length LEN bytes.
6982 Return the number of bytes placed in the buffer, or zero
6986 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
6988 tree type
= TREE_TYPE (expr
);
6989 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
6990 int byte
, offset
, word
, words
;
6991 unsigned char value
;
6993 if ((off
== -1 && total_bytes
> len
)
6994 || off
>= total_bytes
)
6998 words
= total_bytes
/ UNITS_PER_WORD
;
7000 for (byte
= 0; byte
< total_bytes
; byte
++)
7002 int bitpos
= byte
* BITS_PER_UNIT
;
7003 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7005 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7007 if (total_bytes
> UNITS_PER_WORD
)
7009 word
= byte
/ UNITS_PER_WORD
;
7010 if (WORDS_BIG_ENDIAN
)
7011 word
= (words
- 1) - word
;
7012 offset
= word
* UNITS_PER_WORD
;
7013 if (BYTES_BIG_ENDIAN
)
7014 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7016 offset
+= byte
% UNITS_PER_WORD
;
7019 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7021 && offset
- off
< len
)
7022 ptr
[offset
- off
] = value
;
7024 return MIN (len
, total_bytes
- off
);
7028 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7029 specified by EXPR into the buffer PTR of length LEN bytes.
7030 Return the number of bytes placed in the buffer, or zero
7034 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7036 tree type
= TREE_TYPE (expr
);
7037 machine_mode mode
= TYPE_MODE (type
);
7038 int total_bytes
= GET_MODE_SIZE (mode
);
7039 FIXED_VALUE_TYPE value
;
7040 tree i_value
, i_type
;
7042 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7045 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7047 if (NULL_TREE
== i_type
7048 || TYPE_PRECISION (i_type
) != total_bytes
)
7051 value
= TREE_FIXED_CST (expr
);
7052 i_value
= double_int_to_tree (i_type
, value
.data
);
7054 return native_encode_int (i_value
, ptr
, len
, off
);
7058 /* Subroutine of native_encode_expr. Encode the REAL_CST
7059 specified by EXPR into the buffer PTR of length LEN bytes.
7060 Return the number of bytes placed in the buffer, or zero
7064 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7066 tree type
= TREE_TYPE (expr
);
7067 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7068 int byte
, offset
, word
, words
, bitpos
;
7069 unsigned char value
;
7071 /* There are always 32 bits in each long, no matter the size of
7072 the hosts long. We handle floating point representations with
7076 if ((off
== -1 && total_bytes
> len
)
7077 || off
>= total_bytes
)
7081 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7083 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7085 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7086 bitpos
+= BITS_PER_UNIT
)
7088 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7089 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7091 if (UNITS_PER_WORD
< 4)
7093 word
= byte
/ UNITS_PER_WORD
;
7094 if (WORDS_BIG_ENDIAN
)
7095 word
= (words
- 1) - word
;
7096 offset
= word
* UNITS_PER_WORD
;
7097 if (BYTES_BIG_ENDIAN
)
7098 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7100 offset
+= byte
% UNITS_PER_WORD
;
7105 if (BYTES_BIG_ENDIAN
)
7107 /* Reverse bytes within each long, or within the entire float
7108 if it's smaller than a long (for HFmode). */
7109 offset
= MIN (3, total_bytes
- 1) - offset
;
7110 gcc_assert (offset
>= 0);
7113 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7115 && offset
- off
< len
)
7116 ptr
[offset
- off
] = value
;
7118 return MIN (len
, total_bytes
- off
);
7121 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7122 specified by EXPR into the buffer PTR of length LEN bytes.
7123 Return the number of bytes placed in the buffer, or zero
7127 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7132 part
= TREE_REALPART (expr
);
7133 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7137 part
= TREE_IMAGPART (expr
);
7139 off
= MAX (0, off
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part
))));
7140 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
, off
);
7144 return rsize
+ isize
;
7148 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7149 specified by EXPR into the buffer PTR of length LEN bytes.
7150 Return the number of bytes placed in the buffer, or zero
7154 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7161 count
= VECTOR_CST_NELTS (expr
);
7162 itype
= TREE_TYPE (TREE_TYPE (expr
));
7163 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7164 for (i
= 0; i
< count
; i
++)
7171 elem
= VECTOR_CST_ELT (expr
, i
);
7172 int res
= native_encode_expr (elem
, ptr
+offset
, len
-offset
, off
);
7173 if ((off
== -1 && res
!= size
)
7186 /* Subroutine of native_encode_expr. Encode the STRING_CST
7187 specified by EXPR into the buffer PTR of length LEN bytes.
7188 Return the number of bytes placed in the buffer, or zero
7192 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7194 tree type
= TREE_TYPE (expr
);
7195 HOST_WIDE_INT total_bytes
;
7197 if (TREE_CODE (type
) != ARRAY_TYPE
7198 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7199 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7200 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7202 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7203 if ((off
== -1 && total_bytes
> len
)
7204 || off
>= total_bytes
)
7208 if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7211 if (off
< TREE_STRING_LENGTH (expr
))
7213 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7214 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7216 memset (ptr
+ written
, 0,
7217 MIN (total_bytes
- written
, len
- written
));
7220 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7221 return MIN (total_bytes
- off
, len
);
7225 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7226 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7227 buffer PTR of length LEN bytes. If OFF is not -1 then start
7228 the encoding at byte offset OFF and encode at most LEN bytes.
7229 Return the number of bytes placed in the buffer, or zero upon failure. */
7232 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7234 /* We don't support starting at negative offset and -1 is special. */
7238 switch (TREE_CODE (expr
))
7241 return native_encode_int (expr
, ptr
, len
, off
);
7244 return native_encode_real (expr
, ptr
, len
, off
);
7247 return native_encode_fixed (expr
, ptr
, len
, off
);
7250 return native_encode_complex (expr
, ptr
, len
, off
);
7253 return native_encode_vector (expr
, ptr
, len
, off
);
7256 return native_encode_string (expr
, ptr
, len
, off
);
7264 /* Subroutine of native_interpret_expr. Interpret the contents of
7265 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7266 If the buffer cannot be interpreted, return NULL_TREE. */
7269 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7271 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7273 if (total_bytes
> len
7274 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7277 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7279 return wide_int_to_tree (type
, result
);
7283 /* Subroutine of native_interpret_expr. Interpret the contents of
7284 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7285 If the buffer cannot be interpreted, return NULL_TREE. */
7288 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7290 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7292 FIXED_VALUE_TYPE fixed_value
;
7294 if (total_bytes
> len
7295 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7298 result
= double_int::from_buffer (ptr
, total_bytes
);
7299 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7301 return build_fixed (type
, fixed_value
);
7305 /* Subroutine of native_interpret_expr. Interpret the contents of
7306 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7307 If the buffer cannot be interpreted, return NULL_TREE. */
7310 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7312 machine_mode mode
= TYPE_MODE (type
);
7313 int total_bytes
= GET_MODE_SIZE (mode
);
7314 unsigned char value
;
7315 /* There are always 32 bits in each long, no matter the size of
7316 the hosts long. We handle floating point representations with
7321 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7322 if (total_bytes
> len
|| total_bytes
> 24)
7324 int words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7326 memset (tmp
, 0, sizeof (tmp
));
7327 for (int bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7328 bitpos
+= BITS_PER_UNIT
)
7330 /* Both OFFSET and BYTE index within a long;
7331 bitpos indexes the whole float. */
7332 int offset
, byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7333 if (UNITS_PER_WORD
< 4)
7335 int word
= byte
/ UNITS_PER_WORD
;
7336 if (WORDS_BIG_ENDIAN
)
7337 word
= (words
- 1) - word
;
7338 offset
= word
* UNITS_PER_WORD
;
7339 if (BYTES_BIG_ENDIAN
)
7340 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7342 offset
+= byte
% UNITS_PER_WORD
;
7347 if (BYTES_BIG_ENDIAN
)
7349 /* Reverse bytes within each long, or within the entire float
7350 if it's smaller than a long (for HFmode). */
7351 offset
= MIN (3, total_bytes
- 1) - offset
;
7352 gcc_assert (offset
>= 0);
7355 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7357 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7360 real_from_target (&r
, tmp
, mode
);
7361 return build_real (type
, r
);
7365 /* Subroutine of native_interpret_expr. Interpret the contents of
7366 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7367 If the buffer cannot be interpreted, return NULL_TREE. */
7370 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7372 tree etype
, rpart
, ipart
;
7375 etype
= TREE_TYPE (type
);
7376 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7379 rpart
= native_interpret_expr (etype
, ptr
, size
);
7382 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7385 return build_complex (type
, rpart
, ipart
);
7389 /* Subroutine of native_interpret_expr. Interpret the contents of
7390 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7391 If the buffer cannot be interpreted, return NULL_TREE. */
7394 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7400 etype
= TREE_TYPE (type
);
7401 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7402 count
= TYPE_VECTOR_SUBPARTS (type
);
7403 if (size
* count
> len
)
7406 elements
= XALLOCAVEC (tree
, count
);
7407 for (i
= count
- 1; i
>= 0; i
--)
7409 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7414 return build_vector (type
, elements
);
7418 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7419 the buffer PTR of length LEN as a constant of type TYPE. For
7420 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7421 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7422 return NULL_TREE. */
7425 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7427 switch (TREE_CODE (type
))
7433 case REFERENCE_TYPE
:
7434 return native_interpret_int (type
, ptr
, len
);
7437 return native_interpret_real (type
, ptr
, len
);
7439 case FIXED_POINT_TYPE
:
7440 return native_interpret_fixed (type
, ptr
, len
);
7443 return native_interpret_complex (type
, ptr
, len
);
7446 return native_interpret_vector (type
, ptr
, len
);
7453 /* Returns true if we can interpret the contents of a native encoding
7457 can_native_interpret_type_p (tree type
)
7459 switch (TREE_CODE (type
))
7465 case REFERENCE_TYPE
:
7466 case FIXED_POINT_TYPE
:
7476 /* Return true iff a constant of type TYPE is accepted by
7477 native_encode_expr. */
7480 can_native_encode_type_p (tree type
)
7482 switch (TREE_CODE (type
))
7486 case FIXED_POINT_TYPE
:
7496 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7497 TYPE at compile-time. If we're unable to perform the conversion
7498 return NULL_TREE. */
7501 fold_view_convert_expr (tree type
, tree expr
)
7503 /* We support up to 512-bit values (for V8DFmode). */
7504 unsigned char buffer
[64];
7507 /* Check that the host and target are sane. */
7508 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7511 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7515 return native_interpret_expr (type
, buffer
, len
);
7518 /* Build an expression for the address of T. Folds away INDIRECT_REF
7519 to avoid confusing the gimplify process. */
7522 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7524 /* The size of the object is not relevant when talking about its address. */
7525 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7526 t
= TREE_OPERAND (t
, 0);
7528 if (TREE_CODE (t
) == INDIRECT_REF
)
7530 t
= TREE_OPERAND (t
, 0);
7532 if (TREE_TYPE (t
) != ptrtype
)
7533 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7535 else if (TREE_CODE (t
) == MEM_REF
7536 && integer_zerop (TREE_OPERAND (t
, 1)))
7537 return TREE_OPERAND (t
, 0);
7538 else if (TREE_CODE (t
) == MEM_REF
7539 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7540 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7541 TREE_OPERAND (t
, 0),
7542 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7543 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7545 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7547 if (TREE_TYPE (t
) != ptrtype
)
7548 t
= fold_convert_loc (loc
, ptrtype
, t
);
7551 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7556 /* Build an expression for the address of T. */
7559 build_fold_addr_expr_loc (location_t loc
, tree t
)
7561 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7563 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7566 /* Fold a unary expression of code CODE and type TYPE with operand
7567 OP0. Return the folded expression if folding is successful.
7568 Otherwise, return NULL_TREE. */
7571 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7575 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7577 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7578 && TREE_CODE_LENGTH (code
) == 1);
7583 if (CONVERT_EXPR_CODE_P (code
)
7584 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7586 /* Don't use STRIP_NOPS, because signedness of argument type
7588 STRIP_SIGN_NOPS (arg0
);
7592 /* Strip any conversions that don't change the mode. This
7593 is safe for every expression, except for a comparison
7594 expression because its signedness is derived from its
7597 Note that this is done as an internal manipulation within
7598 the constant folder, in order to find the simplest
7599 representation of the arguments so that their form can be
7600 studied. In any cases, the appropriate type conversions
7601 should be put back in the tree that will get out of the
7606 if (CONSTANT_CLASS_P (arg0
))
7608 tree tem
= const_unop (code
, type
, arg0
);
7611 if (TREE_TYPE (tem
) != type
)
7612 tem
= fold_convert_loc (loc
, type
, tem
);
7618 tem
= generic_simplify (loc
, code
, type
, op0
);
7622 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7624 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7625 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7626 fold_build1_loc (loc
, code
, type
,
7627 fold_convert_loc (loc
, TREE_TYPE (op0
),
7628 TREE_OPERAND (arg0
, 1))));
7629 else if (TREE_CODE (arg0
) == COND_EXPR
)
7631 tree arg01
= TREE_OPERAND (arg0
, 1);
7632 tree arg02
= TREE_OPERAND (arg0
, 2);
7633 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7634 arg01
= fold_build1_loc (loc
, code
, type
,
7635 fold_convert_loc (loc
,
7636 TREE_TYPE (op0
), arg01
));
7637 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7638 arg02
= fold_build1_loc (loc
, code
, type
,
7639 fold_convert_loc (loc
,
7640 TREE_TYPE (op0
), arg02
));
7641 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7644 /* If this was a conversion, and all we did was to move into
7645 inside the COND_EXPR, bring it back out. But leave it if
7646 it is a conversion from integer to integer and the
7647 result precision is no wider than a word since such a
7648 conversion is cheap and may be optimized away by combine,
7649 while it couldn't if it were outside the COND_EXPR. Then return
7650 so we don't get into an infinite recursion loop taking the
7651 conversion out and then back in. */
7653 if ((CONVERT_EXPR_CODE_P (code
)
7654 || code
== NON_LVALUE_EXPR
)
7655 && TREE_CODE (tem
) == COND_EXPR
7656 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7657 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7658 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7659 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7660 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7661 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7662 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7664 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7665 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7666 || flag_syntax_only
))
7667 tem
= build1_loc (loc
, code
, type
,
7669 TREE_TYPE (TREE_OPERAND
7670 (TREE_OPERAND (tem
, 1), 0)),
7671 TREE_OPERAND (tem
, 0),
7672 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7673 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7681 case NON_LVALUE_EXPR
:
7682 if (!maybe_lvalue_p (op0
))
7683 return fold_convert_loc (loc
, type
, op0
);
7688 case FIX_TRUNC_EXPR
:
7689 if (COMPARISON_CLASS_P (op0
))
7691 /* If we have (type) (a CMP b) and type is an integral type, return
7692 new expression involving the new type. Canonicalize
7693 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7695 Do not fold the result as that would not simplify further, also
7696 folding again results in recursions. */
7697 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7698 return build2_loc (loc
, TREE_CODE (op0
), type
,
7699 TREE_OPERAND (op0
, 0),
7700 TREE_OPERAND (op0
, 1));
7701 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7702 && TREE_CODE (type
) != VECTOR_TYPE
)
7703 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7704 constant_boolean_node (true, type
),
7705 constant_boolean_node (false, type
));
7708 /* Handle (T *)&A.B.C for A being of type T and B and C
7709 living at offset zero. This occurs frequently in
7710 C++ upcasting and then accessing the base. */
7711 if (TREE_CODE (op0
) == ADDR_EXPR
7712 && POINTER_TYPE_P (type
)
7713 && handled_component_p (TREE_OPERAND (op0
, 0)))
7715 HOST_WIDE_INT bitsize
, bitpos
;
7718 int unsignedp
, reversep
, volatilep
;
7720 = get_inner_reference (TREE_OPERAND (op0
, 0), &bitsize
, &bitpos
,
7721 &offset
, &mode
, &unsignedp
, &reversep
,
7723 /* If the reference was to a (constant) zero offset, we can use
7724 the address of the base if it has the same base type
7725 as the result type and the pointer type is unqualified. */
7726 if (! offset
&& bitpos
== 0
7727 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7728 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7729 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7730 return fold_convert_loc (loc
, type
,
7731 build_fold_addr_expr_loc (loc
, base
));
7734 if (TREE_CODE (op0
) == MODIFY_EXPR
7735 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7736 /* Detect assigning a bitfield. */
7737 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7739 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7741 /* Don't leave an assignment inside a conversion
7742 unless assigning a bitfield. */
7743 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7744 /* First do the assignment, then return converted constant. */
7745 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7746 TREE_NO_WARNING (tem
) = 1;
7747 TREE_USED (tem
) = 1;
7751 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7752 constants (if x has signed type, the sign bit cannot be set
7753 in c). This folds extension into the BIT_AND_EXPR.
7754 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7755 very likely don't have maximal range for their precision and this
7756 transformation effectively doesn't preserve non-maximal ranges. */
7757 if (TREE_CODE (type
) == INTEGER_TYPE
7758 && TREE_CODE (op0
) == BIT_AND_EXPR
7759 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7761 tree and_expr
= op0
;
7762 tree and0
= TREE_OPERAND (and_expr
, 0);
7763 tree and1
= TREE_OPERAND (and_expr
, 1);
7766 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7767 || (TYPE_PRECISION (type
)
7768 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7770 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7771 <= HOST_BITS_PER_WIDE_INT
7772 && tree_fits_uhwi_p (and1
))
7774 unsigned HOST_WIDE_INT cst
;
7776 cst
= tree_to_uhwi (and1
);
7777 cst
&= HOST_WIDE_INT_M1U
7778 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7779 change
= (cst
== 0);
7781 && !flag_syntax_only
7782 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0
)))
7785 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7786 and0
= fold_convert_loc (loc
, uns
, and0
);
7787 and1
= fold_convert_loc (loc
, uns
, and1
);
7792 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
7793 TREE_OVERFLOW (and1
));
7794 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7795 fold_convert_loc (loc
, type
, and0
), tem
);
7799 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7800 cast (T1)X will fold away. We assume that this happens when X itself
7802 if (POINTER_TYPE_P (type
)
7803 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7804 && CONVERT_EXPR_P (TREE_OPERAND (arg0
, 0)))
7806 tree arg00
= TREE_OPERAND (arg0
, 0);
7807 tree arg01
= TREE_OPERAND (arg0
, 1);
7809 return fold_build_pointer_plus_loc
7810 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7813 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7814 of the same precision, and X is an integer type not narrower than
7815 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7816 if (INTEGRAL_TYPE_P (type
)
7817 && TREE_CODE (op0
) == BIT_NOT_EXPR
7818 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7819 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7820 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7822 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7823 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7824 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7825 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7826 fold_convert_loc (loc
, type
, tem
));
7829 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7830 type of X and Y (integer types only). */
7831 if (INTEGRAL_TYPE_P (type
)
7832 && TREE_CODE (op0
) == MULT_EXPR
7833 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7834 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7836 /* Be careful not to introduce new overflows. */
7838 if (TYPE_OVERFLOW_WRAPS (type
))
7841 mult_type
= unsigned_type_for (type
);
7843 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7845 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
7846 fold_convert_loc (loc
, mult_type
,
7847 TREE_OPERAND (op0
, 0)),
7848 fold_convert_loc (loc
, mult_type
,
7849 TREE_OPERAND (op0
, 1)));
7850 return fold_convert_loc (loc
, type
, tem
);
7856 case VIEW_CONVERT_EXPR
:
7857 if (TREE_CODE (op0
) == MEM_REF
)
7859 if (TYPE_ALIGN (TREE_TYPE (op0
)) != TYPE_ALIGN (type
))
7860 type
= build_aligned_type (type
, TYPE_ALIGN (TREE_TYPE (op0
)));
7861 tem
= fold_build2_loc (loc
, MEM_REF
, type
,
7862 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
7863 REF_REVERSE_STORAGE_ORDER (tem
) = REF_REVERSE_STORAGE_ORDER (op0
);
7870 tem
= fold_negate_expr (loc
, arg0
);
7872 return fold_convert_loc (loc
, type
, tem
);
7876 /* Convert fabs((double)float) into (double)fabsf(float). */
7877 if (TREE_CODE (arg0
) == NOP_EXPR
7878 && TREE_CODE (type
) == REAL_TYPE
)
7880 tree targ0
= strip_float_extensions (arg0
);
7882 return fold_convert_loc (loc
, type
,
7883 fold_build1_loc (loc
, ABS_EXPR
,
7890 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7891 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7892 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7893 fold_convert_loc (loc
, type
,
7894 TREE_OPERAND (arg0
, 0)))))
7895 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
7896 fold_convert_loc (loc
, type
,
7897 TREE_OPERAND (arg0
, 1)));
7898 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
7899 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
7900 fold_convert_loc (loc
, type
,
7901 TREE_OPERAND (arg0
, 1)))))
7902 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
7903 fold_convert_loc (loc
, type
,
7904 TREE_OPERAND (arg0
, 0)), tem
);
7908 case TRUTH_NOT_EXPR
:
7909 /* Note that the operand of this must be an int
7910 and its values must be 0 or 1.
7911 ("true" is a fixed value perhaps depending on the language,
7912 but we don't handle values other than 1 correctly yet.) */
7913 tem
= fold_truth_not_expr (loc
, arg0
);
7916 return fold_convert_loc (loc
, type
, tem
);
7919 /* Fold *&X to X if X is an lvalue. */
7920 if (TREE_CODE (op0
) == ADDR_EXPR
)
7922 tree op00
= TREE_OPERAND (op0
, 0);
7924 || TREE_CODE (op00
) == PARM_DECL
7925 || TREE_CODE (op00
) == RESULT_DECL
)
7926 && !TREE_READONLY (op00
))
7933 } /* switch (code) */
7937 /* If the operation was a conversion do _not_ mark a resulting constant
7938 with TREE_OVERFLOW if the original constant was not. These conversions
7939 have implementation defined behavior and retaining the TREE_OVERFLOW
7940 flag here would confuse later passes such as VRP. */
7942 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
7943 tree type
, tree op0
)
7945 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
7947 && TREE_CODE (res
) == INTEGER_CST
7948 && TREE_CODE (op0
) == INTEGER_CST
7949 && CONVERT_EXPR_CODE_P (code
))
7950 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
7955 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7956 operands OP0 and OP1. LOC is the location of the resulting expression.
7957 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7958 Return the folded expression if folding is successful. Otherwise,
7959 return NULL_TREE. */
7961 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
7962 tree arg0
, tree arg1
, tree op0
, tree op1
)
7966 /* We only do these simplifications if we are optimizing. */
7970 /* Check for things like (A || B) && (A || C). We can convert this
7971 to A || (B && C). Note that either operator can be any of the four
7972 truth and/or operations and the transformation will still be
7973 valid. Also note that we only care about order for the
7974 ANDIF and ORIF operators. If B contains side effects, this
7975 might change the truth-value of A. */
7976 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
7977 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
7978 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
7979 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
7980 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
7981 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
7983 tree a00
= TREE_OPERAND (arg0
, 0);
7984 tree a01
= TREE_OPERAND (arg0
, 1);
7985 tree a10
= TREE_OPERAND (arg1
, 0);
7986 tree a11
= TREE_OPERAND (arg1
, 1);
7987 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
7988 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
7989 && (code
== TRUTH_AND_EXPR
7990 || code
== TRUTH_OR_EXPR
));
7992 if (operand_equal_p (a00
, a10
, 0))
7993 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
7994 fold_build2_loc (loc
, code
, type
, a01
, a11
));
7995 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
7996 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
7997 fold_build2_loc (loc
, code
, type
, a01
, a10
));
7998 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
7999 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8000 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8002 /* This case if tricky because we must either have commutative
8003 operators or else A10 must not have side-effects. */
8005 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8006 && operand_equal_p (a01
, a11
, 0))
8007 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8008 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8012 /* See if we can build a range comparison. */
8013 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8016 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8017 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8019 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8021 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8024 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8025 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8027 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8029 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8032 /* Check for the possibility of merging component references. If our
8033 lhs is another similar operation, try to merge its rhs with our
8034 rhs. Then try to merge our lhs and rhs. */
8035 if (TREE_CODE (arg0
) == code
8036 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8037 TREE_OPERAND (arg0
, 1), arg1
)))
8038 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8040 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8043 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8044 && (code
== TRUTH_AND_EXPR
8045 || code
== TRUTH_ANDIF_EXPR
8046 || code
== TRUTH_OR_EXPR
8047 || code
== TRUTH_ORIF_EXPR
))
8049 enum tree_code ncode
, icode
;
8051 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8052 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8053 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8055 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8056 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8057 We don't want to pack more than two leafs to a non-IF AND/OR
8059 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8060 equal to IF-CODE, then we don't want to add right-hand operand.
8061 If the inner right-hand side of left-hand operand has
8062 side-effects, or isn't simple, then we can't add to it,
8063 as otherwise we might destroy if-sequence. */
8064 if (TREE_CODE (arg0
) == icode
8065 && simple_operand_p_2 (arg1
)
8066 /* Needed for sequence points to handle trappings, and
8068 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8070 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8072 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8075 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8076 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8077 else if (TREE_CODE (arg1
) == icode
8078 && simple_operand_p_2 (arg0
)
8079 /* Needed for sequence points to handle trappings, and
8081 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8083 tem
= fold_build2_loc (loc
, ncode
, type
,
8084 arg0
, TREE_OPERAND (arg1
, 0));
8085 return fold_build2_loc (loc
, icode
, type
, tem
,
8086 TREE_OPERAND (arg1
, 1));
8088 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8090 For sequence point consistancy, we need to check for trapping,
8091 and side-effects. */
8092 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8093 && simple_operand_p_2 (arg1
))
8094 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8100 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8101 by changing CODE to reduce the magnitude of constants involved in
8102 ARG0 of the comparison.
8103 Returns a canonicalized comparison tree if a simplification was
8104 possible, otherwise returns NULL_TREE.
8105 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8106 valid if signed overflow is undefined. */
8109 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8110 tree arg0
, tree arg1
,
8111 bool *strict_overflow_p
)
8113 enum tree_code code0
= TREE_CODE (arg0
);
8114 tree t
, cst0
= NULL_TREE
;
8117 /* Match A +- CST code arg1. We can change this only if overflow
8119 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8120 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8121 /* In principle pointers also have undefined overflow behavior,
8122 but that causes problems elsewhere. */
8123 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8124 && (code0
== MINUS_EXPR
8125 || code0
== PLUS_EXPR
)
8126 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
))
8129 /* Identify the constant in arg0 and its sign. */
8130 cst0
= TREE_OPERAND (arg0
, 1);
8131 sgn0
= tree_int_cst_sgn (cst0
);
8133 /* Overflowed constants and zero will cause problems. */
8134 if (integer_zerop (cst0
)
8135 || TREE_OVERFLOW (cst0
))
8138 /* See if we can reduce the magnitude of the constant in
8139 arg0 by changing the comparison code. */
8140 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8142 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8144 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8145 else if (code
== GT_EXPR
8146 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8148 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8149 else if (code
== LE_EXPR
8150 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8152 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8153 else if (code
== GE_EXPR
8154 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8158 *strict_overflow_p
= true;
8160 /* Now build the constant reduced in magnitude. But not if that
8161 would produce one outside of its types range. */
8162 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8164 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8165 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8167 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8168 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8171 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8172 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8173 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8174 t
= fold_convert (TREE_TYPE (arg1
), t
);
8176 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8179 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8180 overflow further. Try to decrease the magnitude of constants involved
8181 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8182 and put sole constants at the second argument position.
8183 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8186 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8187 tree arg0
, tree arg1
)
8190 bool strict_overflow_p
;
8191 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8192 "when reducing constant in comparison");
8194 /* Try canonicalization by simplifying arg0. */
8195 strict_overflow_p
= false;
8196 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8197 &strict_overflow_p
);
8200 if (strict_overflow_p
)
8201 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8205 /* Try canonicalization by simplifying arg1 using the swapped
8207 code
= swap_tree_comparison (code
);
8208 strict_overflow_p
= false;
8209 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8210 &strict_overflow_p
);
8211 if (t
&& strict_overflow_p
)
8212 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8216 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8217 space. This is used to avoid issuing overflow warnings for
8218 expressions like &p->x which can not wrap. */
8221 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8223 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8230 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8231 if (offset
== NULL_TREE
)
8232 wi_offset
= wi::zero (precision
);
8233 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8239 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8240 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8244 if (!wi::fits_uhwi_p (total
))
8247 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8251 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8253 if (TREE_CODE (base
) == ADDR_EXPR
)
8255 HOST_WIDE_INT base_size
;
8257 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8258 if (base_size
> 0 && size
< base_size
)
8262 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8265 /* Return a positive integer when the symbol DECL is known to have
8266 a nonzero address, zero when it's known not to (e.g., it's a weak
8267 symbol), and a negative integer when the symbol is not yet in the
8268 symbol table and so whether or not its address is zero is unknown.
8269 For function local objects always return positive integer. */
8271 maybe_nonzero_address (tree decl
)
8273 if (DECL_P (decl
) && decl_in_symtab_p (decl
))
8274 if (struct symtab_node
*symbol
= symtab_node::get_create (decl
))
8275 return symbol
->nonzero_address ();
8277 /* Function local objects are never NULL. */
8279 && (DECL_CONTEXT (decl
)
8280 && TREE_CODE (DECL_CONTEXT (decl
)) == FUNCTION_DECL
8281 && auto_var_in_fn_p (decl
, DECL_CONTEXT (decl
))))
8287 /* Subroutine of fold_binary. This routine performs all of the
8288 transformations that are common to the equality/inequality
8289 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8290 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8291 fold_binary should call fold_binary. Fold a comparison with
8292 tree code CODE and type TYPE with operands OP0 and OP1. Return
8293 the folded comparison or NULL_TREE. */
8296 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8299 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8300 tree arg0
, arg1
, tem
;
8305 STRIP_SIGN_NOPS (arg0
);
8306 STRIP_SIGN_NOPS (arg1
);
8308 /* For comparisons of pointers we can decompose it to a compile time
8309 comparison of the base objects and the offsets into the object.
8310 This requires at least one operand being an ADDR_EXPR or a
8311 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8312 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8313 && (TREE_CODE (arg0
) == ADDR_EXPR
8314 || TREE_CODE (arg1
) == ADDR_EXPR
8315 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8316 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8318 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8319 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8321 int volatilep
, reversep
, unsignedp
;
8322 bool indirect_base0
= false, indirect_base1
= false;
8324 /* Get base and offset for the access. Strip ADDR_EXPR for
8325 get_inner_reference, but put it back by stripping INDIRECT_REF
8326 off the base object if possible. indirect_baseN will be true
8327 if baseN is not an address but refers to the object itself. */
8329 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8332 = get_inner_reference (TREE_OPERAND (arg0
, 0),
8333 &bitsize
, &bitpos0
, &offset0
, &mode
,
8334 &unsignedp
, &reversep
, &volatilep
);
8335 if (TREE_CODE (base0
) == INDIRECT_REF
)
8336 base0
= TREE_OPERAND (base0
, 0);
8338 indirect_base0
= true;
8340 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8342 base0
= TREE_OPERAND (arg0
, 0);
8343 STRIP_SIGN_NOPS (base0
);
8344 if (TREE_CODE (base0
) == ADDR_EXPR
)
8347 = get_inner_reference (TREE_OPERAND (base0
, 0),
8348 &bitsize
, &bitpos0
, &offset0
, &mode
,
8349 &unsignedp
, &reversep
, &volatilep
);
8350 if (TREE_CODE (base0
) == INDIRECT_REF
)
8351 base0
= TREE_OPERAND (base0
, 0);
8353 indirect_base0
= true;
8355 if (offset0
== NULL_TREE
|| integer_zerop (offset0
))
8356 offset0
= TREE_OPERAND (arg0
, 1);
8358 offset0
= size_binop (PLUS_EXPR
, offset0
,
8359 TREE_OPERAND (arg0
, 1));
8360 if (TREE_CODE (offset0
) == INTEGER_CST
)
8362 offset_int tem
= wi::sext (wi::to_offset (offset0
),
8363 TYPE_PRECISION (sizetype
));
8364 tem
<<= LOG2_BITS_PER_UNIT
;
8366 if (wi::fits_shwi_p (tem
))
8368 bitpos0
= tem
.to_shwi ();
8369 offset0
= NULL_TREE
;
8375 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8378 = get_inner_reference (TREE_OPERAND (arg1
, 0),
8379 &bitsize
, &bitpos1
, &offset1
, &mode
,
8380 &unsignedp
, &reversep
, &volatilep
);
8381 if (TREE_CODE (base1
) == INDIRECT_REF
)
8382 base1
= TREE_OPERAND (base1
, 0);
8384 indirect_base1
= true;
8386 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8388 base1
= TREE_OPERAND (arg1
, 0);
8389 STRIP_SIGN_NOPS (base1
);
8390 if (TREE_CODE (base1
) == ADDR_EXPR
)
8393 = get_inner_reference (TREE_OPERAND (base1
, 0),
8394 &bitsize
, &bitpos1
, &offset1
, &mode
,
8395 &unsignedp
, &reversep
, &volatilep
);
8396 if (TREE_CODE (base1
) == INDIRECT_REF
)
8397 base1
= TREE_OPERAND (base1
, 0);
8399 indirect_base1
= true;
8401 if (offset1
== NULL_TREE
|| integer_zerop (offset1
))
8402 offset1
= TREE_OPERAND (arg1
, 1);
8404 offset1
= size_binop (PLUS_EXPR
, offset1
,
8405 TREE_OPERAND (arg1
, 1));
8406 if (TREE_CODE (offset1
) == INTEGER_CST
)
8408 offset_int tem
= wi::sext (wi::to_offset (offset1
),
8409 TYPE_PRECISION (sizetype
));
8410 tem
<<= LOG2_BITS_PER_UNIT
;
8412 if (wi::fits_shwi_p (tem
))
8414 bitpos1
= tem
.to_shwi ();
8415 offset1
= NULL_TREE
;
8420 /* If we have equivalent bases we might be able to simplify. */
8421 if (indirect_base0
== indirect_base1
8422 && operand_equal_p (base0
, base1
,
8423 indirect_base0
? OEP_ADDRESS_OF
: 0))
8425 /* We can fold this expression to a constant if the non-constant
8426 offset parts are equal. */
8427 if ((offset0
== offset1
8428 || (offset0
&& offset1
8429 && operand_equal_p (offset0
, offset1
, 0)))
8432 && (DECL_P (base0
) || CONSTANT_CLASS_P (base0
)))
8433 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8437 && bitpos0
!= bitpos1
8438 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8439 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8440 fold_overflow_warning (("assuming pointer wraparound does not "
8441 "occur when comparing P +- C1 with "
8443 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8448 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8450 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8452 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8454 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8456 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8458 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8462 /* We can simplify the comparison to a comparison of the variable
8463 offset parts if the constant offset parts are equal.
8464 Be careful to use signed sizetype here because otherwise we
8465 mess with array offsets in the wrong way. This is possible
8466 because pointer arithmetic is restricted to retain within an
8467 object and overflow on pointer differences is undefined as of
8468 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8469 else if (bitpos0
== bitpos1
8472 && (DECL_P (base0
) || CONSTANT_CLASS_P (base0
)))
8473 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8475 /* By converting to signed sizetype we cover middle-end pointer
8476 arithmetic which operates on unsigned pointer types of size
8477 type size and ARRAY_REF offsets which are properly sign or
8478 zero extended from their type in case it is narrower than
8480 if (offset0
== NULL_TREE
)
8481 offset0
= build_int_cst (ssizetype
, 0);
8483 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8484 if (offset1
== NULL_TREE
)
8485 offset1
= build_int_cst (ssizetype
, 0);
8487 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
8490 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8491 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8492 fold_overflow_warning (("assuming pointer wraparound does not "
8493 "occur when comparing P +- C1 with "
8495 WARN_STRICT_OVERFLOW_COMPARISON
);
8497 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
8500 /* For equal offsets we can simplify to a comparison of the
8502 else if (bitpos0
== bitpos1
8504 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8506 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8507 && ((offset0
== offset1
)
8508 || (offset0
&& offset1
8509 && operand_equal_p (offset0
, offset1
, 0))))
8512 base0
= build_fold_addr_expr_loc (loc
, base0
);
8514 base1
= build_fold_addr_expr_loc (loc
, base1
);
8515 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
8517 /* Comparison between an ordinary (non-weak) symbol and a null
8518 pointer can be eliminated since such symbols must have a non
8519 null address. In C, relational expressions between pointers
8520 to objects and null pointers are undefined. The results
8521 below follow the C++ rules with the additional property that
8522 every object pointer compares greater than a null pointer.
8524 else if (((DECL_P (base0
)
8525 && maybe_nonzero_address (base0
) > 0
8526 /* Avoid folding references to struct members at offset 0 to
8527 prevent tests like '&ptr->firstmember == 0' from getting
8528 eliminated. When ptr is null, although the -> expression
8529 is strictly speaking invalid, GCC retains it as a matter
8530 of QoI. See PR c/44555. */
8531 && (offset0
== NULL_TREE
&& bitpos0
!= 0))
8532 || CONSTANT_CLASS_P (base0
))
8534 /* The caller guarantees that when one of the arguments is
8535 constant (i.e., null in this case) it is second. */
8536 && integer_zerop (arg1
))
8543 return constant_boolean_node (false, type
);
8547 return constant_boolean_node (true, type
);
8554 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8555 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8556 the resulting offset is smaller in absolute value than the
8557 original one and has the same sign. */
8558 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
8559 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8560 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8561 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8562 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8563 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8564 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8565 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8567 tree const1
= TREE_OPERAND (arg0
, 1);
8568 tree const2
= TREE_OPERAND (arg1
, 1);
8569 tree variable1
= TREE_OPERAND (arg0
, 0);
8570 tree variable2
= TREE_OPERAND (arg1
, 0);
8572 const char * const warnmsg
= G_("assuming signed overflow does not "
8573 "occur when combining constants around "
8576 /* Put the constant on the side where it doesn't overflow and is
8577 of lower absolute value and of same sign than before. */
8578 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8579 ? MINUS_EXPR
: PLUS_EXPR
,
8581 if (!TREE_OVERFLOW (cst
)
8582 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
8583 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
8585 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8586 return fold_build2_loc (loc
, code
, type
,
8588 fold_build2_loc (loc
, TREE_CODE (arg1
),
8593 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8594 ? MINUS_EXPR
: PLUS_EXPR
,
8596 if (!TREE_OVERFLOW (cst
)
8597 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
8598 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
8600 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8601 return fold_build2_loc (loc
, code
, type
,
8602 fold_build2_loc (loc
, TREE_CODE (arg0
),
8609 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
8613 /* If we are comparing an expression that just has comparisons
8614 of two integer values, arithmetic expressions of those comparisons,
8615 and constants, we can simplify it. There are only three cases
8616 to check: the two values can either be equal, the first can be
8617 greater, or the second can be greater. Fold the expression for
8618 those three values. Since each value must be 0 or 1, we have
8619 eight possibilities, each of which corresponds to the constant 0
8620 or 1 or one of the six possible comparisons.
8622 This handles common cases like (a > b) == 0 but also handles
8623 expressions like ((x > y) - (y > x)) > 0, which supposedly
8624 occur in macroized code. */
8626 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
8628 tree cval1
= 0, cval2
= 0;
8631 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
8632 /* Don't handle degenerate cases here; they should already
8633 have been handled anyway. */
8634 && cval1
!= 0 && cval2
!= 0
8635 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
8636 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
8637 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
8638 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
8639 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
8640 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
8641 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
8643 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
8644 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
8646 /* We can't just pass T to eval_subst in case cval1 or cval2
8647 was the same as ARG1. */
8650 = fold_build2_loc (loc
, code
, type
,
8651 eval_subst (loc
, arg0
, cval1
, maxval
,
8655 = fold_build2_loc (loc
, code
, type
,
8656 eval_subst (loc
, arg0
, cval1
, maxval
,
8660 = fold_build2_loc (loc
, code
, type
,
8661 eval_subst (loc
, arg0
, cval1
, minval
,
8665 /* All three of these results should be 0 or 1. Confirm they are.
8666 Then use those values to select the proper code to use. */
8668 if (TREE_CODE (high_result
) == INTEGER_CST
8669 && TREE_CODE (equal_result
) == INTEGER_CST
8670 && TREE_CODE (low_result
) == INTEGER_CST
)
8672 /* Make a 3-bit mask with the high-order bit being the
8673 value for `>', the next for '=', and the low for '<'. */
8674 switch ((integer_onep (high_result
) * 4)
8675 + (integer_onep (equal_result
) * 2)
8676 + integer_onep (low_result
))
8680 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
8701 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
8706 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
8707 protected_set_expr_location (tem
, loc
);
8710 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
8715 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8716 into a single range test. */
8717 if (TREE_CODE (arg0
) == TRUNC_DIV_EXPR
8718 && TREE_CODE (arg1
) == INTEGER_CST
8719 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8720 && !integer_zerop (TREE_OPERAND (arg0
, 1))
8721 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8722 && !TREE_OVERFLOW (arg1
))
8724 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
8725 if (tem
!= NULL_TREE
)
8733 /* Subroutine of fold_binary. Optimize complex multiplications of the
8734 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8735 argument EXPR represents the expression "z" of type TYPE. */
8738 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
8740 tree itype
= TREE_TYPE (type
);
8741 tree rpart
, ipart
, tem
;
8743 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
8745 rpart
= TREE_OPERAND (expr
, 0);
8746 ipart
= TREE_OPERAND (expr
, 1);
8748 else if (TREE_CODE (expr
) == COMPLEX_CST
)
8750 rpart
= TREE_REALPART (expr
);
8751 ipart
= TREE_IMAGPART (expr
);
8755 expr
= save_expr (expr
);
8756 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
8757 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
8760 rpart
= save_expr (rpart
);
8761 ipart
= save_expr (ipart
);
8762 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
8763 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
8764 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
8765 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
8766 build_zero_cst (itype
));
8770 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8771 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8774 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
8776 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
8778 if (TREE_CODE (arg
) == VECTOR_CST
)
8780 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
8781 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
8783 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
8785 constructor_elt
*elt
;
8787 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
8788 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
8791 elts
[i
] = elt
->value
;
8795 for (; i
< nelts
; i
++)
8797 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
8801 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8802 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8803 NULL_TREE otherwise. */
8806 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
8808 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8810 bool need_ctor
= false;
8812 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
8813 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
8814 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
8815 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
8818 elts
= XALLOCAVEC (tree
, nelts
* 3);
8819 if (!vec_cst_ctor_to_array (arg0
, elts
)
8820 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
8823 for (i
= 0; i
< nelts
; i
++)
8825 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
8827 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
8832 vec
<constructor_elt
, va_gc
> *v
;
8833 vec_alloc (v
, nelts
);
8834 for (i
= 0; i
< nelts
; i
++)
8835 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
8836 return build_constructor (type
, v
);
8839 return build_vector (type
, &elts
[2 * nelts
]);
8842 /* Try to fold a pointer difference of type TYPE two address expressions of
8843 array references AREF0 and AREF1 using location LOC. Return a
8844 simplified expression for the difference or NULL_TREE. */
8847 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
8848 tree aref0
, tree aref1
)
8850 tree base0
= TREE_OPERAND (aref0
, 0);
8851 tree base1
= TREE_OPERAND (aref1
, 0);
8852 tree base_offset
= build_int_cst (type
, 0);
8854 /* If the bases are array references as well, recurse. If the bases
8855 are pointer indirections compute the difference of the pointers.
8856 If the bases are equal, we are set. */
8857 if ((TREE_CODE (base0
) == ARRAY_REF
8858 && TREE_CODE (base1
) == ARRAY_REF
8860 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
8861 || (INDIRECT_REF_P (base0
)
8862 && INDIRECT_REF_P (base1
)
8864 = fold_binary_loc (loc
, MINUS_EXPR
, type
,
8865 fold_convert (type
, TREE_OPERAND (base0
, 0)),
8867 TREE_OPERAND (base1
, 0)))))
8868 || operand_equal_p (base0
, base1
, OEP_ADDRESS_OF
))
8870 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
8871 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
8872 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
8873 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
8874 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
8876 fold_build2_loc (loc
, MULT_EXPR
, type
,
8882 /* If the real or vector real constant CST of type TYPE has an exact
8883 inverse, return it, else return NULL. */
8886 exact_inverse (tree type
, tree cst
)
8889 tree unit_type
, *elts
;
8891 unsigned vec_nelts
, i
;
8893 switch (TREE_CODE (cst
))
8896 r
= TREE_REAL_CST (cst
);
8898 if (exact_real_inverse (TYPE_MODE (type
), &r
))
8899 return build_real (type
, r
);
8904 vec_nelts
= VECTOR_CST_NELTS (cst
);
8905 elts
= XALLOCAVEC (tree
, vec_nelts
);
8906 unit_type
= TREE_TYPE (type
);
8907 mode
= TYPE_MODE (unit_type
);
8909 for (i
= 0; i
< vec_nelts
; i
++)
8911 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
8912 if (!exact_real_inverse (mode
, &r
))
8914 elts
[i
] = build_real (unit_type
, r
);
8917 return build_vector (type
, elts
);
8924 /* Mask out the tz least significant bits of X of type TYPE where
8925 tz is the number of trailing zeroes in Y. */
8927 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
8929 int tz
= wi::ctz (y
);
8931 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
8935 /* Return true when T is an address and is known to be nonzero.
8936 For floating point we further ensure that T is not denormal.
8937 Similar logic is present in nonzero_address in rtlanal.h.
8939 If the return value is based on the assumption that signed overflow
8940 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8941 change *STRICT_OVERFLOW_P. */
8944 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
8946 tree type
= TREE_TYPE (t
);
8947 enum tree_code code
;
8949 /* Doing something useful for floating point would need more work. */
8950 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
8953 code
= TREE_CODE (t
);
8954 switch (TREE_CODE_CLASS (code
))
8957 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
8960 case tcc_comparison
:
8961 return tree_binary_nonzero_warnv_p (code
, type
,
8962 TREE_OPERAND (t
, 0),
8963 TREE_OPERAND (t
, 1),
8966 case tcc_declaration
:
8968 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
8976 case TRUTH_NOT_EXPR
:
8977 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
8980 case TRUTH_AND_EXPR
:
8982 case TRUTH_XOR_EXPR
:
8983 return tree_binary_nonzero_warnv_p (code
, type
,
8984 TREE_OPERAND (t
, 0),
8985 TREE_OPERAND (t
, 1),
8993 case WITH_SIZE_EXPR
:
8995 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9000 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
9004 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
9009 tree fndecl
= get_callee_fndecl (t
);
9010 if (!fndecl
) return false;
9011 if (flag_delete_null_pointer_checks
&& !flag_check_new
9012 && DECL_IS_OPERATOR_NEW (fndecl
)
9013 && !TREE_NOTHROW (fndecl
))
9015 if (flag_delete_null_pointer_checks
9016 && lookup_attribute ("returns_nonnull",
9017 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
9019 return alloca_call_p (t
);
9028 /* Return true when T is an address and is known to be nonzero.
9029 Handle warnings about undefined signed overflow. */
9032 tree_expr_nonzero_p (tree t
)
9034 bool ret
, strict_overflow_p
;
9036 strict_overflow_p
= false;
9037 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
9038 if (strict_overflow_p
)
9039 fold_overflow_warning (("assuming signed overflow does not occur when "
9040 "determining that expression is always "
9042 WARN_STRICT_OVERFLOW_MISC
);
9046 /* Return true if T is known not to be equal to an integer W. */
9049 expr_not_equal_to (tree t
, const wide_int
&w
)
9051 wide_int min
, max
, nz
;
9052 value_range_type rtype
;
9053 switch (TREE_CODE (t
))
9056 return wi::ne_p (t
, w
);
9059 if (!INTEGRAL_TYPE_P (TREE_TYPE (t
)))
9061 rtype
= get_range_info (t
, &min
, &max
);
9062 if (rtype
== VR_RANGE
)
9064 if (wi::lt_p (max
, w
, TYPE_SIGN (TREE_TYPE (t
))))
9066 if (wi::lt_p (w
, min
, TYPE_SIGN (TREE_TYPE (t
))))
9069 else if (rtype
== VR_ANTI_RANGE
9070 && wi::le_p (min
, w
, TYPE_SIGN (TREE_TYPE (t
)))
9071 && wi::le_p (w
, max
, TYPE_SIGN (TREE_TYPE (t
))))
9073 /* If T has some known zero bits and W has any of those bits set,
9074 then T is known not to be equal to W. */
9075 if (wi::ne_p (wi::zext (wi::bit_and_not (w
, get_nonzero_bits (t
)),
9076 TYPE_PRECISION (TREE_TYPE (t
))), 0))
9085 /* Fold a binary expression of code CODE and type TYPE with operands
9086 OP0 and OP1. LOC is the location of the resulting expression.
9087 Return the folded expression if folding is successful. Otherwise,
9088 return NULL_TREE. */
9091 fold_binary_loc (location_t loc
,
9092 enum tree_code code
, tree type
, tree op0
, tree op1
)
9094 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9095 tree arg0
, arg1
, tem
;
9096 tree t1
= NULL_TREE
;
9097 bool strict_overflow_p
;
9100 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9101 && TREE_CODE_LENGTH (code
) == 2
9103 && op1
!= NULL_TREE
);
9108 /* Strip any conversions that don't change the mode. This is
9109 safe for every expression, except for a comparison expression
9110 because its signedness is derived from its operands. So, in
9111 the latter case, only strip conversions that don't change the
9112 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9115 Note that this is done as an internal manipulation within the
9116 constant folder, in order to find the simplest representation
9117 of the arguments so that their form can be studied. In any
9118 cases, the appropriate type conversions should be put back in
9119 the tree that will get out of the constant folder. */
9121 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9123 STRIP_SIGN_NOPS (arg0
);
9124 STRIP_SIGN_NOPS (arg1
);
9132 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9133 constant but we can't do arithmetic on them. */
9134 if (CONSTANT_CLASS_P (arg0
) && CONSTANT_CLASS_P (arg1
))
9136 tem
= const_binop (code
, type
, arg0
, arg1
);
9137 if (tem
!= NULL_TREE
)
9139 if (TREE_TYPE (tem
) != type
)
9140 tem
= fold_convert_loc (loc
, type
, tem
);
9145 /* If this is a commutative operation, and ARG0 is a constant, move it
9146 to ARG1 to reduce the number of tests below. */
9147 if (commutative_tree_code (code
)
9148 && tree_swap_operands_p (arg0
, arg1
))
9149 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9151 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9152 to ARG1 to reduce the number of tests below. */
9153 if (kind
== tcc_comparison
9154 && tree_swap_operands_p (arg0
, arg1
))
9155 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9157 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
9161 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9163 First check for cases where an arithmetic operation is applied to a
9164 compound, conditional, or comparison operation. Push the arithmetic
9165 operation inside the compound or conditional to see if any folding
9166 can then be done. Convert comparison to conditional for this purpose.
9167 The also optimizes non-constant cases that used to be done in
9170 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9171 one of the operands is a comparison and the other is a comparison, a
9172 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9173 code below would make the expression more complex. Change it to a
9174 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9175 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9177 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9178 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9179 && TREE_CODE (type
) != VECTOR_TYPE
9180 && ((truth_value_p (TREE_CODE (arg0
))
9181 && (truth_value_p (TREE_CODE (arg1
))
9182 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9183 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9184 || (truth_value_p (TREE_CODE (arg1
))
9185 && (truth_value_p (TREE_CODE (arg0
))
9186 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9187 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9189 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9190 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9193 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9194 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9196 if (code
== EQ_EXPR
)
9197 tem
= invert_truthvalue_loc (loc
, tem
);
9199 return fold_convert_loc (loc
, type
, tem
);
9202 if (TREE_CODE_CLASS (code
) == tcc_binary
9203 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9205 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9207 tem
= fold_build2_loc (loc
, code
, type
,
9208 fold_convert_loc (loc
, TREE_TYPE (op0
),
9209 TREE_OPERAND (arg0
, 1)), op1
);
9210 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9213 if (TREE_CODE (arg1
) == COMPOUND_EXPR
)
9215 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9216 fold_convert_loc (loc
, TREE_TYPE (op1
),
9217 TREE_OPERAND (arg1
, 1)));
9218 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9222 if (TREE_CODE (arg0
) == COND_EXPR
9223 || TREE_CODE (arg0
) == VEC_COND_EXPR
9224 || COMPARISON_CLASS_P (arg0
))
9226 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9228 /*cond_first_p=*/1);
9229 if (tem
!= NULL_TREE
)
9233 if (TREE_CODE (arg1
) == COND_EXPR
9234 || TREE_CODE (arg1
) == VEC_COND_EXPR
9235 || COMPARISON_CLASS_P (arg1
))
9237 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9239 /*cond_first_p=*/0);
9240 if (tem
!= NULL_TREE
)
9248 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9249 if (TREE_CODE (arg0
) == ADDR_EXPR
9250 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9252 tree iref
= TREE_OPERAND (arg0
, 0);
9253 return fold_build2 (MEM_REF
, type
,
9254 TREE_OPERAND (iref
, 0),
9255 int_const_binop (PLUS_EXPR
, arg1
,
9256 TREE_OPERAND (iref
, 1)));
9259 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9260 if (TREE_CODE (arg0
) == ADDR_EXPR
9261 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9264 HOST_WIDE_INT coffset
;
9265 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9269 return fold_build2 (MEM_REF
, type
,
9270 build_fold_addr_expr (base
),
9271 int_const_binop (PLUS_EXPR
, arg1
,
9272 size_int (coffset
)));
9277 case POINTER_PLUS_EXPR
:
9278 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9279 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9280 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9281 return fold_convert_loc (loc
, type
,
9282 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9283 fold_convert_loc (loc
, sizetype
,
9285 fold_convert_loc (loc
, sizetype
,
9291 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
9293 /* X + (X / CST) * -CST is X % CST. */
9294 if (TREE_CODE (arg1
) == MULT_EXPR
9295 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9296 && operand_equal_p (arg0
,
9297 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9299 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9300 tree cst1
= TREE_OPERAND (arg1
, 1);
9301 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9303 if (sum
&& integer_zerop (sum
))
9304 return fold_convert_loc (loc
, type
,
9305 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9306 TREE_TYPE (arg0
), arg0
,
9311 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9312 one. Make sure the type is not saturating and has the signedness of
9313 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9314 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9315 if ((TREE_CODE (arg0
) == MULT_EXPR
9316 || TREE_CODE (arg1
) == MULT_EXPR
)
9317 && !TYPE_SATURATING (type
)
9318 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9319 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9320 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9322 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9327 if (! FLOAT_TYPE_P (type
))
9329 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9330 (plus (plus (mult) (mult)) (foo)) so that we can
9331 take advantage of the factoring cases below. */
9332 if (ANY_INTEGRAL_TYPE_P (type
)
9333 && TYPE_OVERFLOW_WRAPS (type
)
9334 && (((TREE_CODE (arg0
) == PLUS_EXPR
9335 || TREE_CODE (arg0
) == MINUS_EXPR
)
9336 && TREE_CODE (arg1
) == MULT_EXPR
)
9337 || ((TREE_CODE (arg1
) == PLUS_EXPR
9338 || TREE_CODE (arg1
) == MINUS_EXPR
)
9339 && TREE_CODE (arg0
) == MULT_EXPR
)))
9341 tree parg0
, parg1
, parg
, marg
;
9342 enum tree_code pcode
;
9344 if (TREE_CODE (arg1
) == MULT_EXPR
)
9345 parg
= arg0
, marg
= arg1
;
9347 parg
= arg1
, marg
= arg0
;
9348 pcode
= TREE_CODE (parg
);
9349 parg0
= TREE_OPERAND (parg
, 0);
9350 parg1
= TREE_OPERAND (parg
, 1);
9354 if (TREE_CODE (parg0
) == MULT_EXPR
9355 && TREE_CODE (parg1
) != MULT_EXPR
)
9356 return fold_build2_loc (loc
, pcode
, type
,
9357 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9358 fold_convert_loc (loc
, type
,
9360 fold_convert_loc (loc
, type
,
9362 fold_convert_loc (loc
, type
, parg1
));
9363 if (TREE_CODE (parg0
) != MULT_EXPR
9364 && TREE_CODE (parg1
) == MULT_EXPR
)
9366 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9367 fold_convert_loc (loc
, type
, parg0
),
9368 fold_build2_loc (loc
, pcode
, type
,
9369 fold_convert_loc (loc
, type
, marg
),
9370 fold_convert_loc (loc
, type
,
9376 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9377 to __complex__ ( x, y ). This is not the same for SNaNs or
9378 if signed zeros are involved. */
9379 if (!HONOR_SNANS (element_mode (arg0
))
9380 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9381 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9383 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9384 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9385 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9386 bool arg0rz
= false, arg0iz
= false;
9387 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9388 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9390 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9391 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9392 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9394 tree rp
= arg1r
? arg1r
9395 : build1 (REALPART_EXPR
, rtype
, arg1
);
9396 tree ip
= arg0i
? arg0i
9397 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9398 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9400 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9402 tree rp
= arg0r
? arg0r
9403 : build1 (REALPART_EXPR
, rtype
, arg0
);
9404 tree ip
= arg1i
? arg1i
9405 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9406 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9411 if (flag_unsafe_math_optimizations
9412 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9413 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9414 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9417 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9418 We associate floats only if the user has specified
9419 -fassociative-math. */
9420 if (flag_associative_math
9421 && TREE_CODE (arg1
) == PLUS_EXPR
9422 && TREE_CODE (arg0
) != MULT_EXPR
)
9424 tree tree10
= TREE_OPERAND (arg1
, 0);
9425 tree tree11
= TREE_OPERAND (arg1
, 1);
9426 if (TREE_CODE (tree11
) == MULT_EXPR
9427 && TREE_CODE (tree10
) == MULT_EXPR
)
9430 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
9431 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
9434 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9435 We associate floats only if the user has specified
9436 -fassociative-math. */
9437 if (flag_associative_math
9438 && TREE_CODE (arg0
) == PLUS_EXPR
9439 && TREE_CODE (arg1
) != MULT_EXPR
)
9441 tree tree00
= TREE_OPERAND (arg0
, 0);
9442 tree tree01
= TREE_OPERAND (arg0
, 1);
9443 if (TREE_CODE (tree01
) == MULT_EXPR
9444 && TREE_CODE (tree00
) == MULT_EXPR
)
9447 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
9448 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
9454 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9455 is a rotate of A by C1 bits. */
9456 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9457 is a rotate of A by B bits. */
9459 enum tree_code code0
, code1
;
9461 code0
= TREE_CODE (arg0
);
9462 code1
= TREE_CODE (arg1
);
9463 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9464 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9465 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9466 TREE_OPERAND (arg1
, 0), 0)
9467 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9468 TYPE_UNSIGNED (rtype
))
9469 /* Only create rotates in complete modes. Other cases are not
9470 expanded properly. */
9471 && (element_precision (rtype
)
9472 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype
))))
9474 tree tree01
, tree11
;
9475 enum tree_code code01
, code11
;
9477 tree01
= TREE_OPERAND (arg0
, 1);
9478 tree11
= TREE_OPERAND (arg1
, 1);
9479 STRIP_NOPS (tree01
);
9480 STRIP_NOPS (tree11
);
9481 code01
= TREE_CODE (tree01
);
9482 code11
= TREE_CODE (tree11
);
9483 if (code01
== INTEGER_CST
9484 && code11
== INTEGER_CST
9485 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
9486 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9488 tem
= build2_loc (loc
, LROTATE_EXPR
,
9489 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9490 TREE_OPERAND (arg0
, 0),
9491 code0
== LSHIFT_EXPR
9492 ? TREE_OPERAND (arg0
, 1)
9493 : TREE_OPERAND (arg1
, 1));
9494 return fold_convert_loc (loc
, type
, tem
);
9496 else if (code11
== MINUS_EXPR
)
9498 tree tree110
, tree111
;
9499 tree110
= TREE_OPERAND (tree11
, 0);
9500 tree111
= TREE_OPERAND (tree11
, 1);
9501 STRIP_NOPS (tree110
);
9502 STRIP_NOPS (tree111
);
9503 if (TREE_CODE (tree110
) == INTEGER_CST
9504 && 0 == compare_tree_int (tree110
,
9506 (TREE_TYPE (TREE_OPERAND
9508 && operand_equal_p (tree01
, tree111
, 0))
9510 fold_convert_loc (loc
, type
,
9511 build2 ((code0
== LSHIFT_EXPR
9514 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9515 TREE_OPERAND (arg0
, 0),
9516 TREE_OPERAND (arg0
, 1)));
9518 else if (code01
== MINUS_EXPR
)
9520 tree tree010
, tree011
;
9521 tree010
= TREE_OPERAND (tree01
, 0);
9522 tree011
= TREE_OPERAND (tree01
, 1);
9523 STRIP_NOPS (tree010
);
9524 STRIP_NOPS (tree011
);
9525 if (TREE_CODE (tree010
) == INTEGER_CST
9526 && 0 == compare_tree_int (tree010
,
9528 (TREE_TYPE (TREE_OPERAND
9530 && operand_equal_p (tree11
, tree011
, 0))
9531 return fold_convert_loc
9533 build2 ((code0
!= LSHIFT_EXPR
9536 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9537 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1)));
9543 /* In most languages, can't associate operations on floats through
9544 parentheses. Rather than remember where the parentheses were, we
9545 don't associate floats at all, unless the user has specified
9547 And, we need to make sure type is not saturating. */
9549 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
9550 && !TYPE_SATURATING (type
))
9552 tree var0
, con0
, lit0
, minus_lit0
;
9553 tree var1
, con1
, lit1
, minus_lit1
;
9557 /* Split both trees into variables, constants, and literals. Then
9558 associate each group together, the constants with literals,
9559 then the result with variables. This increases the chances of
9560 literals being recombined later and of generating relocatable
9561 expressions for the sum of a constant and literal. */
9562 var0
= split_tree (loc
, arg0
, type
, code
,
9563 &con0
, &lit0
, &minus_lit0
, 0);
9564 var1
= split_tree (loc
, arg1
, type
, code
,
9565 &con1
, &lit1
, &minus_lit1
, code
== MINUS_EXPR
);
9567 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9568 if (code
== MINUS_EXPR
)
9571 /* With undefined overflow prefer doing association in a type
9572 which wraps on overflow, if that is one of the operand types. */
9573 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
9574 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
9576 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
9577 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
9578 atype
= TREE_TYPE (arg0
);
9579 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9580 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
9581 atype
= TREE_TYPE (arg1
);
9582 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
9585 /* With undefined overflow we can only associate constants with one
9586 variable, and constants whose association doesn't overflow. */
9587 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
9588 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
9594 bool one_neg
= false;
9596 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
9598 tmp0
= TREE_OPERAND (tmp0
, 0);
9601 if (CONVERT_EXPR_P (tmp0
)
9602 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
9603 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
9604 <= TYPE_PRECISION (atype
)))
9605 tmp0
= TREE_OPERAND (tmp0
, 0);
9606 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
9608 tmp1
= TREE_OPERAND (tmp1
, 0);
9611 if (CONVERT_EXPR_P (tmp1
)
9612 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
9613 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
9614 <= TYPE_PRECISION (atype
)))
9615 tmp1
= TREE_OPERAND (tmp1
, 0);
9616 /* The only case we can still associate with two variables
9617 is if they cancel out. */
9619 || !operand_equal_p (tmp0
, tmp1
, 0))
9624 /* Only do something if we found more than two objects. Otherwise,
9625 nothing has changed and we risk infinite recursion. */
9627 && (2 < ((var0
!= 0) + (var1
!= 0)
9628 + (con0
!= 0) + (con1
!= 0)
9629 + (lit0
!= 0) + (lit1
!= 0)
9630 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
9632 bool any_overflows
= false;
9633 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
9634 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
9635 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
9636 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
9637 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
9638 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
9639 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
9640 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
9643 /* Preserve the MINUS_EXPR if the negative part of the literal is
9644 greater than the positive part. Otherwise, the multiplicative
9645 folding code (i.e extract_muldiv) may be fooled in case
9646 unsigned constants are subtracted, like in the following
9647 example: ((X*2 + 4) - 8U)/2. */
9648 if (minus_lit0
&& lit0
)
9650 if (TREE_CODE (lit0
) == INTEGER_CST
9651 && TREE_CODE (minus_lit0
) == INTEGER_CST
9652 && tree_int_cst_lt (lit0
, minus_lit0
))
9654 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
9660 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
9666 /* Don't introduce overflows through reassociation. */
9668 && ((lit0
&& TREE_OVERFLOW_P (lit0
))
9669 || (minus_lit0
&& TREE_OVERFLOW_P (minus_lit0
))))
9676 fold_convert_loc (loc
, type
,
9677 associate_trees (loc
, var0
, minus_lit0
,
9678 MINUS_EXPR
, atype
));
9681 con0
= associate_trees (loc
, con0
, minus_lit0
,
9684 fold_convert_loc (loc
, type
,
9685 associate_trees (loc
, var0
, con0
,
9690 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
9692 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
9700 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9701 if (TREE_CODE (arg0
) == NEGATE_EXPR
9702 && negate_expr_p (op1
))
9703 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9705 fold_convert_loc (loc
, type
,
9706 TREE_OPERAND (arg0
, 0)));
9708 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9709 __complex__ ( x, -y ). This is not the same for SNaNs or if
9710 signed zeros are involved. */
9711 if (!HONOR_SNANS (element_mode (arg0
))
9712 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9713 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9715 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9716 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9717 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9718 bool arg0rz
= false, arg0iz
= false;
9719 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9720 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9722 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9723 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9724 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9726 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
9728 : build1 (REALPART_EXPR
, rtype
, arg1
));
9729 tree ip
= arg0i
? arg0i
9730 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9731 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9733 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9735 tree rp
= arg0r
? arg0r
9736 : build1 (REALPART_EXPR
, rtype
, arg0
);
9737 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
9739 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
9740 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9745 /* A - B -> A + (-B) if B is easily negatable. */
9746 if (negate_expr_p (op1
)
9747 && ! TYPE_OVERFLOW_SANITIZED (type
)
9748 && ((FLOAT_TYPE_P (type
)
9749 /* Avoid this transformation if B is a positive REAL_CST. */
9750 && (TREE_CODE (op1
) != REAL_CST
9751 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1
))))
9752 || INTEGRAL_TYPE_P (type
)))
9753 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9754 fold_convert_loc (loc
, type
, arg0
),
9757 /* Fold &a[i] - &a[j] to i-j. */
9758 if (TREE_CODE (arg0
) == ADDR_EXPR
9759 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
9760 && TREE_CODE (arg1
) == ADDR_EXPR
9761 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
9763 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
9764 TREE_OPERAND (arg0
, 0),
9765 TREE_OPERAND (arg1
, 0));
9770 if (FLOAT_TYPE_P (type
)
9771 && flag_unsafe_math_optimizations
9772 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9773 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9774 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9777 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9778 one. Make sure the type is not saturating and has the signedness of
9779 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9780 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9781 if ((TREE_CODE (arg0
) == MULT_EXPR
9782 || TREE_CODE (arg1
) == MULT_EXPR
)
9783 && !TYPE_SATURATING (type
)
9784 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
9785 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
9786 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9788 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9796 if (! FLOAT_TYPE_P (type
))
9798 /* Transform x * -C into -x * C if x is easily negatable. */
9799 if (TREE_CODE (op1
) == INTEGER_CST
9800 && tree_int_cst_sgn (op1
) == -1
9801 && negate_expr_p (op0
)
9802 && (tem
= negate_expr (op1
)) != op1
9803 && ! TREE_OVERFLOW (tem
))
9804 return fold_build2_loc (loc
, MULT_EXPR
, type
,
9805 fold_convert_loc (loc
, type
,
9806 negate_expr (op0
)), tem
);
9808 strict_overflow_p
= false;
9809 if (TREE_CODE (arg1
) == INTEGER_CST
9810 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
9811 &strict_overflow_p
)))
9813 if (strict_overflow_p
)
9814 fold_overflow_warning (("assuming signed overflow does not "
9815 "occur when simplifying "
9817 WARN_STRICT_OVERFLOW_MISC
);
9818 return fold_convert_loc (loc
, type
, tem
);
9821 /* Optimize z * conj(z) for integer complex numbers. */
9822 if (TREE_CODE (arg0
) == CONJ_EXPR
9823 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9824 return fold_mult_zconjz (loc
, type
, arg1
);
9825 if (TREE_CODE (arg1
) == CONJ_EXPR
9826 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9827 return fold_mult_zconjz (loc
, type
, arg0
);
9831 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9832 This is not the same for NaNs or if signed zeros are
9834 if (!HONOR_NANS (arg0
)
9835 && !HONOR_SIGNED_ZEROS (element_mode (arg0
))
9836 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
9837 && TREE_CODE (arg1
) == COMPLEX_CST
9838 && real_zerop (TREE_REALPART (arg1
)))
9840 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9841 if (real_onep (TREE_IMAGPART (arg1
)))
9843 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
9844 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
9846 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
9847 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
9849 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
9850 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
9851 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
9855 /* Optimize z * conj(z) for floating point complex numbers.
9856 Guarded by flag_unsafe_math_optimizations as non-finite
9857 imaginary components don't produce scalar results. */
9858 if (flag_unsafe_math_optimizations
9859 && TREE_CODE (arg0
) == CONJ_EXPR
9860 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
9861 return fold_mult_zconjz (loc
, type
, arg1
);
9862 if (flag_unsafe_math_optimizations
9863 && TREE_CODE (arg1
) == CONJ_EXPR
9864 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
9865 return fold_mult_zconjz (loc
, type
, arg0
);
9870 /* Canonicalize (X & C1) | C2. */
9871 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9872 && TREE_CODE (arg1
) == INTEGER_CST
9873 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9875 int width
= TYPE_PRECISION (type
), w
;
9876 wide_int c1
= TREE_OPERAND (arg0
, 1);
9879 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9880 if ((c1
& c2
) == c1
)
9881 return omit_one_operand_loc (loc
, type
, arg1
,
9882 TREE_OPERAND (arg0
, 0));
9884 wide_int msk
= wi::mask (width
, false,
9885 TYPE_PRECISION (TREE_TYPE (arg1
)));
9887 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9888 if (msk
.and_not (c1
| c2
) == 0)
9889 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
9890 TREE_OPERAND (arg0
, 0), arg1
);
9892 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9893 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9894 mode which allows further optimizations. */
9897 wide_int c3
= c1
.and_not (c2
);
9898 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
9900 wide_int mask
= wi::mask (w
, false,
9901 TYPE_PRECISION (type
));
9902 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
9910 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
9911 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
9912 TREE_OPERAND (arg0
, 0),
9913 wide_int_to_tree (type
,
9918 /* See if this can be simplified into a rotate first. If that
9919 is unsuccessful continue in the association code. */
9923 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9924 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9925 && INTEGRAL_TYPE_P (type
)
9926 && integer_onep (TREE_OPERAND (arg0
, 1))
9927 && integer_onep (arg1
))
9928 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
9929 build_zero_cst (TREE_TYPE (arg0
)));
9931 /* See if this can be simplified into a rotate first. If that
9932 is unsuccessful continue in the association code. */
9936 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9937 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
9938 && INTEGRAL_TYPE_P (type
)
9939 && integer_onep (TREE_OPERAND (arg0
, 1))
9940 && integer_onep (arg1
))
9943 tem
= TREE_OPERAND (arg0
, 0);
9944 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
9945 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
9947 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
9948 build_zero_cst (TREE_TYPE (tem
)));
9950 /* Fold ~X & 1 as (X & 1) == 0. */
9951 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9952 && INTEGRAL_TYPE_P (type
)
9953 && integer_onep (arg1
))
9956 tem
= TREE_OPERAND (arg0
, 0);
9957 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
9958 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
9960 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
9961 build_zero_cst (TREE_TYPE (tem
)));
9963 /* Fold !X & 1 as X == 0. */
9964 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
9965 && integer_onep (arg1
))
9967 tem
= TREE_OPERAND (arg0
, 0);
9968 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
9969 build_zero_cst (TREE_TYPE (tem
)));
9972 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
9973 multiple of 1 << CST. */
9974 if (TREE_CODE (arg1
) == INTEGER_CST
)
9976 wide_int cst1
= arg1
;
9977 wide_int ncst1
= -cst1
;
9978 if ((cst1
& ncst1
) == ncst1
9979 && multiple_of_p (type
, arg0
,
9980 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
9981 return fold_convert_loc (loc
, type
, arg0
);
9984 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
9986 if (TREE_CODE (arg1
) == INTEGER_CST
9987 && TREE_CODE (arg0
) == MULT_EXPR
9988 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9990 wide_int warg1
= arg1
;
9991 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
9994 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
9996 else if (masked
!= warg1
)
9998 /* Avoid the transform if arg1 is a mask of some
9999 mode which allows further optimizations. */
10000 int pop
= wi::popcount (warg1
);
10001 if (!(pop
>= BITS_PER_UNIT
10003 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
10004 return fold_build2_loc (loc
, code
, type
, op0
,
10005 wide_int_to_tree (type
, masked
));
10009 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10010 ((A & N) + B) & M -> (A + B) & M
10011 Similarly if (N & M) == 0,
10012 ((A | N) + B) & M -> (A + B) & M
10013 and for - instead of + (or unary - instead of +)
10014 and/or ^ instead of |.
10015 If B is constant and (B & M) == 0, fold into A & M. */
10016 if (TREE_CODE (arg1
) == INTEGER_CST
)
10018 wide_int cst1
= arg1
;
10019 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
10020 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10021 && (TREE_CODE (arg0
) == PLUS_EXPR
10022 || TREE_CODE (arg0
) == MINUS_EXPR
10023 || TREE_CODE (arg0
) == NEGATE_EXPR
)
10024 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
10025 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
10031 /* Now we know that arg0 is (C + D) or (C - D) or
10032 -C and arg1 (M) is == (1LL << cst) - 1.
10033 Store C into PMOP[0] and D into PMOP[1]. */
10034 pmop
[0] = TREE_OPERAND (arg0
, 0);
10036 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
10038 pmop
[1] = TREE_OPERAND (arg0
, 1);
10042 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
10045 for (; which
>= 0; which
--)
10046 switch (TREE_CODE (pmop
[which
]))
10051 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
10054 cst0
= TREE_OPERAND (pmop
[which
], 1);
10056 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
10061 else if (cst0
!= 0)
10063 /* If C or D is of the form (A & N) where
10064 (N & M) == M, or of the form (A | N) or
10065 (A ^ N) where (N & M) == 0, replace it with A. */
10066 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
10069 /* If C or D is a N where (N & M) == 0, it can be
10070 omitted (assumed 0). */
10071 if ((TREE_CODE (arg0
) == PLUS_EXPR
10072 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
10073 && (cst1
& pmop
[which
]) == 0)
10074 pmop
[which
] = NULL
;
10080 /* Only build anything new if we optimized one or both arguments
10082 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
10083 || (TREE_CODE (arg0
) != NEGATE_EXPR
10084 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
10086 tree utype
= TREE_TYPE (arg0
);
10087 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10089 /* Perform the operations in a type that has defined
10090 overflow behavior. */
10091 utype
= unsigned_type_for (TREE_TYPE (arg0
));
10092 if (pmop
[0] != NULL
)
10093 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
10094 if (pmop
[1] != NULL
)
10095 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
10098 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
10099 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
10100 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
10102 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
10103 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
10105 else if (pmop
[0] != NULL
)
10107 else if (pmop
[1] != NULL
)
10110 return build_int_cst (type
, 0);
10112 else if (pmop
[0] == NULL
)
10113 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
10115 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
10117 /* TEM is now the new binary +, - or unary - replacement. */
10118 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
10119 fold_convert_loc (loc
, utype
, arg1
));
10120 return fold_convert_loc (loc
, type
, tem
);
10125 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10126 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
10127 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
10129 prec
= element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
10131 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
10134 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10140 /* Don't touch a floating-point divide by zero unless the mode
10141 of the constant can represent infinity. */
10142 if (TREE_CODE (arg1
) == REAL_CST
10143 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
10144 && real_zerop (arg1
))
10147 /* (-A) / (-B) -> A / B */
10148 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10149 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10150 TREE_OPERAND (arg0
, 0),
10151 negate_expr (arg1
));
10152 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10153 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
10154 negate_expr (arg0
),
10155 TREE_OPERAND (arg1
, 0));
10158 case TRUNC_DIV_EXPR
:
10161 case FLOOR_DIV_EXPR
:
10162 /* Simplify A / (B << N) where A and B are positive and B is
10163 a power of 2, to A >> (N + log2(B)). */
10164 strict_overflow_p
= false;
10165 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10166 && (TYPE_UNSIGNED (type
)
10167 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
10169 tree sval
= TREE_OPERAND (arg1
, 0);
10170 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
10172 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
10173 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
10174 wi::exact_log2 (sval
));
10176 if (strict_overflow_p
)
10177 fold_overflow_warning (("assuming signed overflow does not "
10178 "occur when simplifying A / (B << N)"),
10179 WARN_STRICT_OVERFLOW_MISC
);
10181 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
10183 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
10184 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
10190 case ROUND_DIV_EXPR
:
10191 case CEIL_DIV_EXPR
:
10192 case EXACT_DIV_EXPR
:
10193 if (integer_zerop (arg1
))
10196 /* Convert -A / -B to A / B when the type is signed and overflow is
10198 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
10199 && TREE_CODE (arg0
) == NEGATE_EXPR
10200 && negate_expr_p (op1
))
10202 if (INTEGRAL_TYPE_P (type
))
10203 fold_overflow_warning (("assuming signed overflow does not occur "
10204 "when distributing negation across "
10206 WARN_STRICT_OVERFLOW_MISC
);
10207 return fold_build2_loc (loc
, code
, type
,
10208 fold_convert_loc (loc
, type
,
10209 TREE_OPERAND (arg0
, 0)),
10210 negate_expr (op1
));
10212 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
10213 && TREE_CODE (arg1
) == NEGATE_EXPR
10214 && negate_expr_p (op0
))
10216 if (INTEGRAL_TYPE_P (type
))
10217 fold_overflow_warning (("assuming signed overflow does not occur "
10218 "when distributing negation across "
10220 WARN_STRICT_OVERFLOW_MISC
);
10221 return fold_build2_loc (loc
, code
, type
,
10223 fold_convert_loc (loc
, type
,
10224 TREE_OPERAND (arg1
, 0)));
10227 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10228 operation, EXACT_DIV_EXPR.
10230 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10231 At one time others generated faster code, it's not clear if they do
10232 after the last round to changes to the DIV code in expmed.c. */
10233 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
10234 && multiple_of_p (type
, arg0
, arg1
))
10235 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
,
10236 fold_convert (type
, arg0
),
10237 fold_convert (type
, arg1
));
10239 strict_overflow_p
= false;
10240 if (TREE_CODE (arg1
) == INTEGER_CST
10241 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10242 &strict_overflow_p
)))
10244 if (strict_overflow_p
)
10245 fold_overflow_warning (("assuming signed overflow does not occur "
10246 "when simplifying division"),
10247 WARN_STRICT_OVERFLOW_MISC
);
10248 return fold_convert_loc (loc
, type
, tem
);
10253 case CEIL_MOD_EXPR
:
10254 case FLOOR_MOD_EXPR
:
10255 case ROUND_MOD_EXPR
:
10256 case TRUNC_MOD_EXPR
:
10257 strict_overflow_p
= false;
10258 if (TREE_CODE (arg1
) == INTEGER_CST
10259 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10260 &strict_overflow_p
)))
10262 if (strict_overflow_p
)
10263 fold_overflow_warning (("assuming signed overflow does not occur "
10264 "when simplifying modulus"),
10265 WARN_STRICT_OVERFLOW_MISC
);
10266 return fold_convert_loc (loc
, type
, tem
);
10275 /* Since negative shift count is not well-defined,
10276 don't try to compute it in the compiler. */
10277 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
10280 prec
= element_precision (type
);
10282 /* If we have a rotate of a bit operation with the rotate count and
10283 the second operand of the bit operation both constant,
10284 permute the two operations. */
10285 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10286 && (TREE_CODE (arg0
) == BIT_AND_EXPR
10287 || TREE_CODE (arg0
) == BIT_IOR_EXPR
10288 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
10289 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10291 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10292 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10293 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
10294 fold_build2_loc (loc
, code
, type
,
10296 fold_build2_loc (loc
, code
, type
,
10300 /* Two consecutive rotates adding up to the some integer
10301 multiple of the precision of the type can be ignored. */
10302 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
10303 && TREE_CODE (arg0
) == RROTATE_EXPR
10304 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10305 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
10307 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10315 case TRUTH_ANDIF_EXPR
:
10316 /* Note that the operands of this must be ints
10317 and their values must be 0 or 1.
10318 ("true" is a fixed value perhaps depending on the language.) */
10319 /* If first arg is constant zero, return it. */
10320 if (integer_zerop (arg0
))
10321 return fold_convert_loc (loc
, type
, arg0
);
10323 case TRUTH_AND_EXPR
:
10324 /* If either arg is constant true, drop it. */
10325 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10326 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10327 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
10328 /* Preserve sequence points. */
10329 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10330 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10331 /* If second arg is constant zero, result is zero, but first arg
10332 must be evaluated. */
10333 if (integer_zerop (arg1
))
10334 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10335 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10336 case will be handled here. */
10337 if (integer_zerop (arg0
))
10338 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
10340 /* !X && X is always false. */
10341 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10342 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10343 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
10344 /* X && !X is always false. */
10345 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10346 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10347 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10349 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10350 means A >= Y && A != MAX, but in this case we know that
10353 if (!TREE_SIDE_EFFECTS (arg0
)
10354 && !TREE_SIDE_EFFECTS (arg1
))
10356 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
10357 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
10358 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
10360 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
10361 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
10362 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
10365 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
10371 case TRUTH_ORIF_EXPR
:
10372 /* Note that the operands of this must be ints
10373 and their values must be 0 or true.
10374 ("true" is a fixed value perhaps depending on the language.) */
10375 /* If first arg is constant true, return it. */
10376 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10377 return fold_convert_loc (loc
, type
, arg0
);
10379 case TRUTH_OR_EXPR
:
10380 /* If either arg is constant zero, drop it. */
10381 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
10382 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10383 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
10384 /* Preserve sequence points. */
10385 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
10386 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10387 /* If second arg is constant true, result is true, but we must
10388 evaluate first arg. */
10389 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
10390 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10391 /* Likewise for first arg, but note this only occurs here for
10393 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
10394 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
10396 /* !X || X is always true. */
10397 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10398 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10399 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
10400 /* X || !X is always true. */
10401 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10402 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10403 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
10405 /* (X && !Y) || (!X && Y) is X ^ Y */
10406 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
10407 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
10409 tree a0
, a1
, l0
, l1
, n0
, n1
;
10411 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10412 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10414 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10415 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10417 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
10418 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
10420 if ((operand_equal_p (n0
, a0
, 0)
10421 && operand_equal_p (n1
, a1
, 0))
10422 || (operand_equal_p (n0
, a1
, 0)
10423 && operand_equal_p (n1
, a0
, 0)))
10424 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
10427 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
10433 case TRUTH_XOR_EXPR
:
10434 /* If the second arg is constant zero, drop it. */
10435 if (integer_zerop (arg1
))
10436 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10437 /* If the second arg is constant true, this is a logical inversion. */
10438 if (integer_onep (arg1
))
10440 tem
= invert_truthvalue_loc (loc
, arg0
);
10441 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
10443 /* Identical arguments cancel to zero. */
10444 if (operand_equal_p (arg0
, arg1
, 0))
10445 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10447 /* !X ^ X is always true. */
10448 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
10449 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10450 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
10452 /* X ^ !X is always true. */
10453 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
10454 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10455 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
10464 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
10465 if (tem
!= NULL_TREE
)
10468 /* bool_var != 1 becomes !bool_var. */
10469 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
10470 && code
== NE_EXPR
)
10471 return fold_convert_loc (loc
, type
,
10472 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
10473 TREE_TYPE (arg0
), arg0
));
10475 /* bool_var == 0 becomes !bool_var. */
10476 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
10477 && code
== EQ_EXPR
)
10478 return fold_convert_loc (loc
, type
,
10479 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
10480 TREE_TYPE (arg0
), arg0
));
10482 /* !exp != 0 becomes !exp */
10483 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
10484 && code
== NE_EXPR
)
10485 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10487 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10488 if ((TREE_CODE (arg0
) == PLUS_EXPR
10489 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10490 || TREE_CODE (arg0
) == MINUS_EXPR
)
10491 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
10494 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10495 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
10497 tree val
= TREE_OPERAND (arg0
, 1);
10498 val
= fold_build2_loc (loc
, code
, type
, val
,
10499 build_int_cst (TREE_TYPE (val
), 0));
10500 return omit_two_operands_loc (loc
, type
, val
,
10501 TREE_OPERAND (arg0
, 0), arg1
);
10504 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10505 if ((TREE_CODE (arg1
) == PLUS_EXPR
10506 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
10507 || TREE_CODE (arg1
) == MINUS_EXPR
)
10508 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1
,
10511 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10512 || POINTER_TYPE_P (TREE_TYPE (arg1
))))
10514 tree val
= TREE_OPERAND (arg1
, 1);
10515 val
= fold_build2_loc (loc
, code
, type
, val
,
10516 build_int_cst (TREE_TYPE (val
), 0));
10517 return omit_two_operands_loc (loc
, type
, val
,
10518 TREE_OPERAND (arg1
, 0), arg0
);
10521 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10522 if (TREE_CODE (arg0
) == MINUS_EXPR
10523 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
10524 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
10527 && wi::extract_uhwi (TREE_OPERAND (arg0
, 0), 0, 1) == 1)
10528 return omit_two_operands_loc (loc
, type
,
10530 ? boolean_true_node
: boolean_false_node
,
10531 TREE_OPERAND (arg0
, 1), arg1
);
10533 /* Transform comparisons of the form X CMP C - X if C % 2 == 1. */
10534 if (TREE_CODE (arg1
) == MINUS_EXPR
10535 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == INTEGER_CST
10536 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1
,
10539 && wi::extract_uhwi (TREE_OPERAND (arg1
, 0), 0, 1) == 1)
10540 return omit_two_operands_loc (loc
, type
,
10542 ? boolean_true_node
: boolean_false_node
,
10543 TREE_OPERAND (arg1
, 1), arg0
);
10545 /* If this is an EQ or NE comparison with zero and ARG0 is
10546 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10547 two operations, but the latter can be done in one less insn
10548 on machines that have only two-operand insns or on which a
10549 constant cannot be the first operand. */
10550 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10551 && integer_zerop (arg1
))
10553 tree arg00
= TREE_OPERAND (arg0
, 0);
10554 tree arg01
= TREE_OPERAND (arg0
, 1);
10555 if (TREE_CODE (arg00
) == LSHIFT_EXPR
10556 && integer_onep (TREE_OPERAND (arg00
, 0)))
10558 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
10559 arg01
, TREE_OPERAND (arg00
, 1));
10560 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
10561 build_int_cst (TREE_TYPE (arg0
), 1));
10562 return fold_build2_loc (loc
, code
, type
,
10563 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
10566 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
10567 && integer_onep (TREE_OPERAND (arg01
, 0)))
10569 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
10570 arg00
, TREE_OPERAND (arg01
, 1));
10571 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
10572 build_int_cst (TREE_TYPE (arg0
), 1));
10573 return fold_build2_loc (loc
, code
, type
,
10574 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
10579 /* If this is an NE or EQ comparison of zero against the result of a
10580 signed MOD operation whose second operand is a power of 2, make
10581 the MOD operation unsigned since it is simpler and equivalent. */
10582 if (integer_zerop (arg1
)
10583 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
10584 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
10585 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
10586 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
10587 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
10588 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10590 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
10591 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
10592 fold_convert_loc (loc
, newtype
,
10593 TREE_OPERAND (arg0
, 0)),
10594 fold_convert_loc (loc
, newtype
,
10595 TREE_OPERAND (arg0
, 1)));
10597 return fold_build2_loc (loc
, code
, type
, newmod
,
10598 fold_convert_loc (loc
, newtype
, arg1
));
10601 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10602 C1 is a valid shift constant, and C2 is a power of two, i.e.
10604 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10605 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
10606 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
10608 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10609 && integer_zerop (arg1
))
10611 tree itype
= TREE_TYPE (arg0
);
10612 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
10613 prec
= TYPE_PRECISION (itype
);
10615 /* Check for a valid shift count. */
10616 if (wi::ltu_p (arg001
, prec
))
10618 tree arg01
= TREE_OPERAND (arg0
, 1);
10619 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10620 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
10621 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10622 can be rewritten as (X & (C2 << C1)) != 0. */
10623 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
10625 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
10626 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
10627 return fold_build2_loc (loc
, code
, type
, tem
,
10628 fold_convert_loc (loc
, itype
, arg1
));
10630 /* Otherwise, for signed (arithmetic) shifts,
10631 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10632 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10633 else if (!TYPE_UNSIGNED (itype
))
10634 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
10635 arg000
, build_int_cst (itype
, 0));
10636 /* Otherwise, of unsigned (logical) shifts,
10637 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10638 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10640 return omit_one_operand_loc (loc
, type
,
10641 code
== EQ_EXPR
? integer_one_node
10642 : integer_zero_node
,
10647 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10648 Similarly for NE_EXPR. */
10649 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10650 && TREE_CODE (arg1
) == INTEGER_CST
10651 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10653 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
10654 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
10655 TREE_OPERAND (arg0
, 1));
10657 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
10658 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
10660 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
10661 if (integer_nonzerop (dandnotc
))
10662 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
10665 /* If this is a comparison of a field, we may be able to simplify it. */
10666 if ((TREE_CODE (arg0
) == COMPONENT_REF
10667 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
10668 /* Handle the constant case even without -O
10669 to make sure the warnings are given. */
10670 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
10672 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
10677 /* Optimize comparisons of strlen vs zero to a compare of the
10678 first character of the string vs zero. To wit,
10679 strlen(ptr) == 0 => *ptr == 0
10680 strlen(ptr) != 0 => *ptr != 0
10681 Other cases should reduce to one of these two (or a constant)
10682 due to the return value of strlen being unsigned. */
10683 if (TREE_CODE (arg0
) == CALL_EXPR
10684 && integer_zerop (arg1
))
10686 tree fndecl
= get_callee_fndecl (arg0
);
10689 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
10690 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
10691 && call_expr_nargs (arg0
) == 1
10692 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
10694 tree iref
= build_fold_indirect_ref_loc (loc
,
10695 CALL_EXPR_ARG (arg0
, 0));
10696 return fold_build2_loc (loc
, code
, type
, iref
,
10697 build_int_cst (TREE_TYPE (iref
), 0));
10701 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10702 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10703 if (TREE_CODE (arg0
) == RSHIFT_EXPR
10704 && integer_zerop (arg1
)
10705 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10707 tree arg00
= TREE_OPERAND (arg0
, 0);
10708 tree arg01
= TREE_OPERAND (arg0
, 1);
10709 tree itype
= TREE_TYPE (arg00
);
10710 if (wi::eq_p (arg01
, element_precision (itype
) - 1))
10712 if (TYPE_UNSIGNED (itype
))
10714 itype
= signed_type_for (itype
);
10715 arg00
= fold_convert_loc (loc
, itype
, arg00
);
10717 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
10718 type
, arg00
, build_zero_cst (itype
));
10722 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10723 (X & C) == 0 when C is a single bit. */
10724 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10725 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
10726 && integer_zerop (arg1
)
10727 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
10729 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
10730 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
10731 TREE_OPERAND (arg0
, 1));
10732 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
10734 fold_convert_loc (loc
, TREE_TYPE (arg0
),
10738 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10739 constant C is a power of two, i.e. a single bit. */
10740 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10741 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
10742 && integer_zerop (arg1
)
10743 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10744 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10745 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
10747 tree arg00
= TREE_OPERAND (arg0
, 0);
10748 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10749 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
10752 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10753 when is C is a power of two, i.e. a single bit. */
10754 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10755 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
10756 && integer_zerop (arg1
)
10757 && integer_pow2p (TREE_OPERAND (arg0
, 1))
10758 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
10759 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
10761 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
10762 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
10763 arg000
, TREE_OPERAND (arg0
, 1));
10764 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
10765 tem
, build_int_cst (TREE_TYPE (tem
), 0));
10768 if (integer_zerop (arg1
)
10769 && tree_expr_nonzero_p (arg0
))
10771 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
10772 return omit_one_operand_loc (loc
, type
, res
, arg0
);
10775 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10776 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10777 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10779 tree arg00
= TREE_OPERAND (arg0
, 0);
10780 tree arg01
= TREE_OPERAND (arg0
, 1);
10781 tree arg10
= TREE_OPERAND (arg1
, 0);
10782 tree arg11
= TREE_OPERAND (arg1
, 1);
10783 tree itype
= TREE_TYPE (arg0
);
10785 if (operand_equal_p (arg01
, arg11
, 0))
10786 return fold_build2_loc (loc
, code
, type
,
10787 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
10788 fold_build2_loc (loc
,
10789 BIT_XOR_EXPR
, itype
,
10792 build_zero_cst (itype
));
10794 if (operand_equal_p (arg01
, arg10
, 0))
10795 return fold_build2_loc (loc
, code
, type
,
10796 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
10797 fold_build2_loc (loc
,
10798 BIT_XOR_EXPR
, itype
,
10801 build_zero_cst (itype
));
10803 if (operand_equal_p (arg00
, arg11
, 0))
10804 return fold_build2_loc (loc
, code
, type
,
10805 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
10806 fold_build2_loc (loc
,
10807 BIT_XOR_EXPR
, itype
,
10810 build_zero_cst (itype
));
10812 if (operand_equal_p (arg00
, arg10
, 0))
10813 return fold_build2_loc (loc
, code
, type
,
10814 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
10815 fold_build2_loc (loc
,
10816 BIT_XOR_EXPR
, itype
,
10819 build_zero_cst (itype
));
10822 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10823 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
10825 tree arg00
= TREE_OPERAND (arg0
, 0);
10826 tree arg01
= TREE_OPERAND (arg0
, 1);
10827 tree arg10
= TREE_OPERAND (arg1
, 0);
10828 tree arg11
= TREE_OPERAND (arg1
, 1);
10829 tree itype
= TREE_TYPE (arg0
);
10831 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10832 operand_equal_p guarantees no side-effects so we don't need
10833 to use omit_one_operand on Z. */
10834 if (operand_equal_p (arg01
, arg11
, 0))
10835 return fold_build2_loc (loc
, code
, type
, arg00
,
10836 fold_convert_loc (loc
, TREE_TYPE (arg00
),
10838 if (operand_equal_p (arg01
, arg10
, 0))
10839 return fold_build2_loc (loc
, code
, type
, arg00
,
10840 fold_convert_loc (loc
, TREE_TYPE (arg00
),
10842 if (operand_equal_p (arg00
, arg11
, 0))
10843 return fold_build2_loc (loc
, code
, type
, arg01
,
10844 fold_convert_loc (loc
, TREE_TYPE (arg01
),
10846 if (operand_equal_p (arg00
, arg10
, 0))
10847 return fold_build2_loc (loc
, code
, type
, arg01
,
10848 fold_convert_loc (loc
, TREE_TYPE (arg01
),
10851 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10852 if (TREE_CODE (arg01
) == INTEGER_CST
10853 && TREE_CODE (arg11
) == INTEGER_CST
)
10855 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
10856 fold_convert_loc (loc
, itype
, arg11
));
10857 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
10858 return fold_build2_loc (loc
, code
, type
, tem
,
10859 fold_convert_loc (loc
, itype
, arg10
));
10863 /* Attempt to simplify equality/inequality comparisons of complex
10864 values. Only lower the comparison if the result is known or
10865 can be simplified to a single scalar comparison. */
10866 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
10867 || TREE_CODE (arg0
) == COMPLEX_CST
)
10868 && (TREE_CODE (arg1
) == COMPLEX_EXPR
10869 || TREE_CODE (arg1
) == COMPLEX_CST
))
10871 tree real0
, imag0
, real1
, imag1
;
10874 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
10876 real0
= TREE_OPERAND (arg0
, 0);
10877 imag0
= TREE_OPERAND (arg0
, 1);
10881 real0
= TREE_REALPART (arg0
);
10882 imag0
= TREE_IMAGPART (arg0
);
10885 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
10887 real1
= TREE_OPERAND (arg1
, 0);
10888 imag1
= TREE_OPERAND (arg1
, 1);
10892 real1
= TREE_REALPART (arg1
);
10893 imag1
= TREE_IMAGPART (arg1
);
10896 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
10897 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
10899 if (integer_zerop (rcond
))
10901 if (code
== EQ_EXPR
)
10902 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
10904 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
10908 if (code
== NE_EXPR
)
10909 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
10911 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
10915 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
10916 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
10918 if (integer_zerop (icond
))
10920 if (code
== EQ_EXPR
)
10921 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
10923 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
10927 if (code
== NE_EXPR
)
10928 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
10930 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
10941 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
10942 if (tem
!= NULL_TREE
)
10945 /* Transform comparisons of the form X +- C CMP X. */
10946 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
10947 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10948 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
10949 && !HONOR_SNANS (arg0
))
10950 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10951 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
10953 tree arg01
= TREE_OPERAND (arg0
, 1);
10954 enum tree_code code0
= TREE_CODE (arg0
);
10957 if (TREE_CODE (arg01
) == REAL_CST
)
10958 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
10960 is_positive
= tree_int_cst_sgn (arg01
);
10962 /* (X - c) > X becomes false. */
10963 if (code
== GT_EXPR
10964 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
10965 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
10967 if (TREE_CODE (arg01
) == INTEGER_CST
10968 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
10969 fold_overflow_warning (("assuming signed overflow does not "
10970 "occur when assuming that (X - c) > X "
10971 "is always false"),
10972 WARN_STRICT_OVERFLOW_ALL
);
10973 return constant_boolean_node (0, type
);
10976 /* Likewise (X + c) < X becomes false. */
10977 if (code
== LT_EXPR
10978 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
10979 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
10981 if (TREE_CODE (arg01
) == INTEGER_CST
10982 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
10983 fold_overflow_warning (("assuming signed overflow does not "
10984 "occur when assuming that "
10985 "(X + c) < X is always false"),
10986 WARN_STRICT_OVERFLOW_ALL
);
10987 return constant_boolean_node (0, type
);
10990 /* Convert (X - c) <= X to true. */
10991 if (!HONOR_NANS (arg1
)
10993 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
10994 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
10996 if (TREE_CODE (arg01
) == INTEGER_CST
10997 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
10998 fold_overflow_warning (("assuming signed overflow does not "
10999 "occur when assuming that "
11000 "(X - c) <= X is always true"),
11001 WARN_STRICT_OVERFLOW_ALL
);
11002 return constant_boolean_node (1, type
);
11005 /* Convert (X + c) >= X to true. */
11006 if (!HONOR_NANS (arg1
)
11008 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
11009 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
11011 if (TREE_CODE (arg01
) == INTEGER_CST
11012 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11013 fold_overflow_warning (("assuming signed overflow does not "
11014 "occur when assuming that "
11015 "(X + c) >= X is always true"),
11016 WARN_STRICT_OVERFLOW_ALL
);
11017 return constant_boolean_node (1, type
);
11020 if (TREE_CODE (arg01
) == INTEGER_CST
)
11022 /* Convert X + c > X and X - c < X to true for integers. */
11023 if (code
== GT_EXPR
11024 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11025 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11027 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11028 fold_overflow_warning (("assuming signed overflow does "
11029 "not occur when assuming that "
11030 "(X + c) > X is always true"),
11031 WARN_STRICT_OVERFLOW_ALL
);
11032 return constant_boolean_node (1, type
);
11035 if (code
== LT_EXPR
11036 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11037 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11039 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11040 fold_overflow_warning (("assuming signed overflow does "
11041 "not occur when assuming that "
11042 "(X - c) < X is always true"),
11043 WARN_STRICT_OVERFLOW_ALL
);
11044 return constant_boolean_node (1, type
);
11047 /* Convert X + c <= X and X - c >= X to false for integers. */
11048 if (code
== LE_EXPR
11049 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
11050 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
11052 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11053 fold_overflow_warning (("assuming signed overflow does "
11054 "not occur when assuming that "
11055 "(X + c) <= X is always false"),
11056 WARN_STRICT_OVERFLOW_ALL
);
11057 return constant_boolean_node (0, type
);
11060 if (code
== GE_EXPR
11061 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
11062 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
11064 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
11065 fold_overflow_warning (("assuming signed overflow does "
11066 "not occur when assuming that "
11067 "(X - c) >= X is always false"),
11068 WARN_STRICT_OVERFLOW_ALL
);
11069 return constant_boolean_node (0, type
);
11074 /* If we are comparing an ABS_EXPR with a constant, we can
11075 convert all the cases into explicit comparisons, but they may
11076 well not be faster than doing the ABS and one comparison.
11077 But ABS (X) <= C is a range comparison, which becomes a subtraction
11078 and a comparison, and is probably faster. */
11079 if (code
== LE_EXPR
11080 && TREE_CODE (arg1
) == INTEGER_CST
11081 && TREE_CODE (arg0
) == ABS_EXPR
11082 && ! TREE_SIDE_EFFECTS (arg0
)
11083 && (0 != (tem
= negate_expr (arg1
)))
11084 && TREE_CODE (tem
) == INTEGER_CST
11085 && !TREE_OVERFLOW (tem
))
11086 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
11087 build2 (GE_EXPR
, type
,
11088 TREE_OPERAND (arg0
, 0), tem
),
11089 build2 (LE_EXPR
, type
,
11090 TREE_OPERAND (arg0
, 0), arg1
));
11092 /* Convert ABS_EXPR<x> >= 0 to true. */
11093 strict_overflow_p
= false;
11094 if (code
== GE_EXPR
11095 && (integer_zerop (arg1
)
11096 || (! HONOR_NANS (arg0
)
11097 && real_zerop (arg1
)))
11098 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11100 if (strict_overflow_p
)
11101 fold_overflow_warning (("assuming signed overflow does not occur "
11102 "when simplifying comparison of "
11103 "absolute value and zero"),
11104 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11105 return omit_one_operand_loc (loc
, type
,
11106 constant_boolean_node (true, type
),
11110 /* Convert ABS_EXPR<x> < 0 to false. */
11111 strict_overflow_p
= false;
11112 if (code
== LT_EXPR
11113 && (integer_zerop (arg1
) || real_zerop (arg1
))
11114 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
11116 if (strict_overflow_p
)
11117 fold_overflow_warning (("assuming signed overflow does not occur "
11118 "when simplifying comparison of "
11119 "absolute value and zero"),
11120 WARN_STRICT_OVERFLOW_CONDITIONAL
);
11121 return omit_one_operand_loc (loc
, type
,
11122 constant_boolean_node (false, type
),
11126 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11127 and similarly for >= into !=. */
11128 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11129 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11130 && TREE_CODE (arg1
) == LSHIFT_EXPR
11131 && integer_onep (TREE_OPERAND (arg1
, 0)))
11132 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11133 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11134 TREE_OPERAND (arg1
, 1)),
11135 build_zero_cst (TREE_TYPE (arg0
)));
11137 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11138 otherwise Y might be >= # of bits in X's type and thus e.g.
11139 (unsigned char) (1 << Y) for Y 15 might be 0.
11140 If the cast is widening, then 1 << Y should have unsigned type,
11141 otherwise if Y is number of bits in the signed shift type minus 1,
11142 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11143 31 might be 0xffffffff80000000. */
11144 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
11145 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
11146 && CONVERT_EXPR_P (arg1
)
11147 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
11148 && (element_precision (TREE_TYPE (arg1
))
11149 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
11150 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
11151 || (element_precision (TREE_TYPE (arg1
))
11152 == element_precision (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
11153 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
11155 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
11156 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
11157 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
11158 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
11159 build_zero_cst (TREE_TYPE (arg0
)));
11164 case UNORDERED_EXPR
:
11172 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11174 tree targ0
= strip_float_extensions (arg0
);
11175 tree targ1
= strip_float_extensions (arg1
);
11176 tree newtype
= TREE_TYPE (targ0
);
11178 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
11179 newtype
= TREE_TYPE (targ1
);
11181 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
11182 return fold_build2_loc (loc
, code
, type
,
11183 fold_convert_loc (loc
, newtype
, targ0
),
11184 fold_convert_loc (loc
, newtype
, targ1
));
11189 case COMPOUND_EXPR
:
11190 /* When pedantic, a compound expression can be neither an lvalue
11191 nor an integer constant expression. */
11192 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
11194 /* Don't let (0, 0) be null pointer constant. */
11195 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
11196 : fold_convert_loc (loc
, type
, arg1
);
11197 return pedantic_non_lvalue_loc (loc
, tem
);
11200 /* An ASSERT_EXPR should never be passed to fold_binary. */
11201 gcc_unreachable ();
11205 } /* switch (code) */
11208 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11209 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11213 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
11215 switch (TREE_CODE (*tp
))
11221 *walk_subtrees
= 0;
11230 /* Return whether the sub-tree ST contains a label which is accessible from
11231 outside the sub-tree. */
11234 contains_label_p (tree st
)
11237 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
11240 /* Fold a ternary expression of code CODE and type TYPE with operands
11241 OP0, OP1, and OP2. Return the folded expression if folding is
11242 successful. Otherwise, return NULL_TREE. */
11245 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
11246 tree op0
, tree op1
, tree op2
)
11249 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
11250 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11252 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
11253 && TREE_CODE_LENGTH (code
) == 3);
11255 /* If this is a commutative operation, and OP0 is a constant, move it
11256 to OP1 to reduce the number of tests below. */
11257 if (commutative_ternary_tree_code (code
)
11258 && tree_swap_operands_p (op0
, op1
))
11259 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
11261 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
11265 /* Strip any conversions that don't change the mode. This is safe
11266 for every expression, except for a comparison expression because
11267 its signedness is derived from its operands. So, in the latter
11268 case, only strip conversions that don't change the signedness.
11270 Note that this is done as an internal manipulation within the
11271 constant folder, in order to find the simplest representation of
11272 the arguments so that their form can be studied. In any cases,
11273 the appropriate type conversions should be put back in the tree
11274 that will get out of the constant folder. */
11295 case COMPONENT_REF
:
11296 if (TREE_CODE (arg0
) == CONSTRUCTOR
11297 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
11299 unsigned HOST_WIDE_INT idx
;
11301 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
11308 case VEC_COND_EXPR
:
11309 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11310 so all simple results must be passed through pedantic_non_lvalue. */
11311 if (TREE_CODE (arg0
) == INTEGER_CST
)
11313 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
11314 tem
= integer_zerop (arg0
) ? op2
: op1
;
11315 /* Only optimize constant conditions when the selected branch
11316 has the same type as the COND_EXPR. This avoids optimizing
11317 away "c ? x : throw", where the throw has a void type.
11318 Avoid throwing away that operand which contains label. */
11319 if ((!TREE_SIDE_EFFECTS (unused_op
)
11320 || !contains_label_p (unused_op
))
11321 && (! VOID_TYPE_P (TREE_TYPE (tem
))
11322 || VOID_TYPE_P (type
)))
11323 return pedantic_non_lvalue_loc (loc
, tem
);
11326 else if (TREE_CODE (arg0
) == VECTOR_CST
)
11328 if ((TREE_CODE (arg1
) == VECTOR_CST
11329 || TREE_CODE (arg1
) == CONSTRUCTOR
)
11330 && (TREE_CODE (arg2
) == VECTOR_CST
11331 || TREE_CODE (arg2
) == CONSTRUCTOR
))
11333 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
11334 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
11335 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
11336 for (i
= 0; i
< nelts
; i
++)
11338 tree val
= VECTOR_CST_ELT (arg0
, i
);
11339 if (integer_all_onesp (val
))
11341 else if (integer_zerop (val
))
11342 sel
[i
] = nelts
+ i
;
11343 else /* Currently unreachable. */
11346 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
11347 if (t
!= NULL_TREE
)
11352 /* If we have A op B ? A : C, we may be able to convert this to a
11353 simpler expression, depending on the operation and the values
11354 of B and C. Signed zeros prevent all of these transformations,
11355 for reasons given above each one.
11357 Also try swapping the arguments and inverting the conditional. */
11358 if (COMPARISON_CLASS_P (arg0
)
11359 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11360 arg1
, TREE_OPERAND (arg0
, 1))
11361 && !HONOR_SIGNED_ZEROS (element_mode (arg1
)))
11363 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
11368 if (COMPARISON_CLASS_P (arg0
)
11369 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
11371 TREE_OPERAND (arg0
, 1))
11372 && !HONOR_SIGNED_ZEROS (element_mode (op2
)))
11374 location_t loc0
= expr_location_or (arg0
, loc
);
11375 tem
= fold_invert_truthvalue (loc0
, arg0
);
11376 if (tem
&& COMPARISON_CLASS_P (tem
))
11378 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
11384 /* If the second operand is simpler than the third, swap them
11385 since that produces better jump optimization results. */
11386 if (truth_value_p (TREE_CODE (arg0
))
11387 && tree_swap_operands_p (op1
, op2
))
11389 location_t loc0
= expr_location_or (arg0
, loc
);
11390 /* See if this can be inverted. If it can't, possibly because
11391 it was a floating-point inequality comparison, don't do
11393 tem
= fold_invert_truthvalue (loc0
, arg0
);
11395 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
11398 /* Convert A ? 1 : 0 to simply A. */
11399 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
11400 : (integer_onep (op1
)
11401 && !VECTOR_TYPE_P (type
)))
11402 && integer_zerop (op2
)
11403 /* If we try to convert OP0 to our type, the
11404 call to fold will try to move the conversion inside
11405 a COND, which will recurse. In that case, the COND_EXPR
11406 is probably the best choice, so leave it alone. */
11407 && type
== TREE_TYPE (arg0
))
11408 return pedantic_non_lvalue_loc (loc
, arg0
);
11410 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11411 over COND_EXPR in cases such as floating point comparisons. */
11412 if (integer_zerop (op1
)
11413 && code
== COND_EXPR
11414 && integer_onep (op2
)
11415 && !VECTOR_TYPE_P (type
)
11416 && truth_value_p (TREE_CODE (arg0
)))
11417 return pedantic_non_lvalue_loc (loc
,
11418 fold_convert_loc (loc
, type
,
11419 invert_truthvalue_loc (loc
,
11422 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11423 if (TREE_CODE (arg0
) == LT_EXPR
11424 && integer_zerop (TREE_OPERAND (arg0
, 1))
11425 && integer_zerop (op2
)
11426 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
11428 /* sign_bit_p looks through both zero and sign extensions,
11429 but for this optimization only sign extensions are
11431 tree tem2
= TREE_OPERAND (arg0
, 0);
11432 while (tem
!= tem2
)
11434 if (TREE_CODE (tem2
) != NOP_EXPR
11435 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
11440 tem2
= TREE_OPERAND (tem2
, 0);
11442 /* sign_bit_p only checks ARG1 bits within A's precision.
11443 If <sign bit of A> has wider type than A, bits outside
11444 of A's precision in <sign bit of A> need to be checked.
11445 If they are all 0, this optimization needs to be done
11446 in unsigned A's type, if they are all 1 in signed A's type,
11447 otherwise this can't be done. */
11449 && TYPE_PRECISION (TREE_TYPE (tem
))
11450 < TYPE_PRECISION (TREE_TYPE (arg1
))
11451 && TYPE_PRECISION (TREE_TYPE (tem
))
11452 < TYPE_PRECISION (type
))
11454 int inner_width
, outer_width
;
11457 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
11458 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
11459 if (outer_width
> TYPE_PRECISION (type
))
11460 outer_width
= TYPE_PRECISION (type
);
11462 wide_int mask
= wi::shifted_mask
11463 (inner_width
, outer_width
- inner_width
, false,
11464 TYPE_PRECISION (TREE_TYPE (arg1
)));
11466 wide_int common
= mask
& arg1
;
11467 if (common
== mask
)
11469 tem_type
= signed_type_for (TREE_TYPE (tem
));
11470 tem
= fold_convert_loc (loc
, tem_type
, tem
);
11472 else if (common
== 0)
11474 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
11475 tem
= fold_convert_loc (loc
, tem_type
, tem
);
11483 fold_convert_loc (loc
, type
,
11484 fold_build2_loc (loc
, BIT_AND_EXPR
,
11485 TREE_TYPE (tem
), tem
,
11486 fold_convert_loc (loc
,
11491 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11492 already handled above. */
11493 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11494 && integer_onep (TREE_OPERAND (arg0
, 1))
11495 && integer_zerop (op2
)
11496 && integer_pow2p (arg1
))
11498 tree tem
= TREE_OPERAND (arg0
, 0);
11500 if (TREE_CODE (tem
) == RSHIFT_EXPR
11501 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
11502 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
11503 tree_to_uhwi (TREE_OPERAND (tem
, 1)))
11504 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11505 TREE_OPERAND (tem
, 0), arg1
);
11508 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11509 is probably obsolete because the first operand should be a
11510 truth value (that's why we have the two cases above), but let's
11511 leave it in until we can confirm this for all front-ends. */
11512 if (integer_zerop (op2
)
11513 && TREE_CODE (arg0
) == NE_EXPR
11514 && integer_zerop (TREE_OPERAND (arg0
, 1))
11515 && integer_pow2p (arg1
)
11516 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
11517 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
11518 arg1
, OEP_ONLY_CONST
))
11519 return pedantic_non_lvalue_loc (loc
,
11520 fold_convert_loc (loc
, type
,
11521 TREE_OPERAND (arg0
, 0)));
11523 /* Disable the transformations below for vectors, since
11524 fold_binary_op_with_conditional_arg may undo them immediately,
11525 yielding an infinite loop. */
11526 if (code
== VEC_COND_EXPR
)
11529 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11530 if (integer_zerop (op2
)
11531 && truth_value_p (TREE_CODE (arg0
))
11532 && truth_value_p (TREE_CODE (arg1
))
11533 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11534 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
11535 : TRUTH_ANDIF_EXPR
,
11536 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
11538 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11539 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
11540 && truth_value_p (TREE_CODE (arg0
))
11541 && truth_value_p (TREE_CODE (arg1
))
11542 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11544 location_t loc0
= expr_location_or (arg0
, loc
);
11545 /* Only perform transformation if ARG0 is easily inverted. */
11546 tem
= fold_invert_truthvalue (loc0
, arg0
);
11548 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11551 type
, fold_convert_loc (loc
, type
, tem
),
11555 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11556 if (integer_zerop (arg1
)
11557 && truth_value_p (TREE_CODE (arg0
))
11558 && truth_value_p (TREE_CODE (op2
))
11559 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11561 location_t loc0
= expr_location_or (arg0
, loc
);
11562 /* Only perform transformation if ARG0 is easily inverted. */
11563 tem
= fold_invert_truthvalue (loc0
, arg0
);
11565 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11566 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
11567 type
, fold_convert_loc (loc
, type
, tem
),
11571 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11572 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
11573 && truth_value_p (TREE_CODE (arg0
))
11574 && truth_value_p (TREE_CODE (op2
))
11575 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
11576 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
11577 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
11578 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
11583 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11584 of fold_ternary on them. */
11585 gcc_unreachable ();
11587 case BIT_FIELD_REF
:
11588 if (TREE_CODE (arg0
) == VECTOR_CST
11589 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
11590 || (TREE_CODE (type
) == VECTOR_TYPE
11591 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
11593 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
11594 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
11595 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
11596 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
11599 && (idx
% width
) == 0
11600 && (n
% width
) == 0
11601 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
11606 if (TREE_CODE (arg0
) == VECTOR_CST
)
11609 return VECTOR_CST_ELT (arg0
, idx
);
11611 tree
*vals
= XALLOCAVEC (tree
, n
);
11612 for (unsigned i
= 0; i
< n
; ++i
)
11613 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
11614 return build_vector (type
, vals
);
11619 /* On constants we can use native encode/interpret to constant
11620 fold (nearly) all BIT_FIELD_REFs. */
11621 if (CONSTANT_CLASS_P (arg0
)
11622 && can_native_interpret_type_p (type
)
11623 && BITS_PER_UNIT
== 8)
11625 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
11626 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
11627 /* Limit us to a reasonable amount of work. To relax the
11628 other limitations we need bit-shifting of the buffer
11629 and rounding up the size. */
11630 if (bitpos
% BITS_PER_UNIT
== 0
11631 && bitsize
% BITS_PER_UNIT
== 0
11632 && bitsize
<= MAX_BITSIZE_MODE_ANY_MODE
)
11634 unsigned char b
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
11635 unsigned HOST_WIDE_INT len
11636 = native_encode_expr (arg0
, b
, bitsize
/ BITS_PER_UNIT
,
11637 bitpos
/ BITS_PER_UNIT
);
11639 && len
* BITS_PER_UNIT
>= bitsize
)
11641 tree v
= native_interpret_expr (type
, b
,
11642 bitsize
/ BITS_PER_UNIT
);
11652 /* For integers we can decompose the FMA if possible. */
11653 if (TREE_CODE (arg0
) == INTEGER_CST
11654 && TREE_CODE (arg1
) == INTEGER_CST
)
11655 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
11656 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
11657 if (integer_zerop (arg2
))
11658 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11660 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
11662 case VEC_PERM_EXPR
:
11663 if (TREE_CODE (arg2
) == VECTOR_CST
)
11665 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
, mask2
;
11666 unsigned char *sel
= XALLOCAVEC (unsigned char, 2 * nelts
);
11667 unsigned char *sel2
= sel
+ nelts
;
11668 bool need_mask_canon
= false;
11669 bool need_mask_canon2
= false;
11670 bool all_in_vec0
= true;
11671 bool all_in_vec1
= true;
11672 bool maybe_identity
= true;
11673 bool single_arg
= (op0
== op1
);
11674 bool changed
= false;
11676 mask2
= 2 * nelts
- 1;
11677 mask
= single_arg
? (nelts
- 1) : mask2
;
11678 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
11679 for (i
= 0; i
< nelts
; i
++)
11681 tree val
= VECTOR_CST_ELT (arg2
, i
);
11682 if (TREE_CODE (val
) != INTEGER_CST
)
11685 /* Make sure that the perm value is in an acceptable
11688 need_mask_canon
|= wi::gtu_p (t
, mask
);
11689 need_mask_canon2
|= wi::gtu_p (t
, mask2
);
11690 sel
[i
] = t
.to_uhwi () & mask
;
11691 sel2
[i
] = t
.to_uhwi () & mask2
;
11693 if (sel
[i
] < nelts
)
11694 all_in_vec1
= false;
11696 all_in_vec0
= false;
11698 if ((sel
[i
] & (nelts
-1)) != i
)
11699 maybe_identity
= false;
11702 if (maybe_identity
)
11712 else if (all_in_vec1
)
11715 for (i
= 0; i
< nelts
; i
++)
11717 need_mask_canon
= true;
11720 if ((TREE_CODE (op0
) == VECTOR_CST
11721 || TREE_CODE (op0
) == CONSTRUCTOR
)
11722 && (TREE_CODE (op1
) == VECTOR_CST
11723 || TREE_CODE (op1
) == CONSTRUCTOR
))
11725 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
11726 if (t
!= NULL_TREE
)
11730 if (op0
== op1
&& !single_arg
)
11733 /* Some targets are deficient and fail to expand a single
11734 argument permutation while still allowing an equivalent
11735 2-argument version. */
11736 if (need_mask_canon
&& arg2
== op2
11737 && !can_vec_perm_p (TYPE_MODE (type
), false, sel
)
11738 && can_vec_perm_p (TYPE_MODE (type
), false, sel2
))
11740 need_mask_canon
= need_mask_canon2
;
11744 if (need_mask_canon
&& arg2
== op2
)
11746 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
11747 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
11748 for (i
= 0; i
< nelts
; i
++)
11749 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
11750 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
11755 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
11759 case BIT_INSERT_EXPR
:
11760 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11761 if (TREE_CODE (arg0
) == INTEGER_CST
11762 && TREE_CODE (arg1
) == INTEGER_CST
)
11764 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
11765 unsigned bitsize
= TYPE_PRECISION (TREE_TYPE (arg1
));
11766 wide_int tem
= wi::bit_and (arg0
,
11767 wi::shifted_mask (bitpos
, bitsize
, true,
11768 TYPE_PRECISION (type
)));
11770 = wi::lshift (wi::zext (wi::to_wide (arg1
, TYPE_PRECISION (type
)),
11772 return wide_int_to_tree (type
, wi::bit_or (tem
, tem2
));
11774 else if (TREE_CODE (arg0
) == VECTOR_CST
11775 && CONSTANT_CLASS_P (arg1
)
11776 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0
)),
11779 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
11780 unsigned HOST_WIDE_INT elsize
11781 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1
)));
11782 if (bitpos
% elsize
== 0)
11784 unsigned k
= bitpos
/ elsize
;
11785 if (operand_equal_p (VECTOR_CST_ELT (arg0
, k
), arg1
, 0))
11789 tree
*elts
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
11790 memcpy (elts
, VECTOR_CST_ELTS (arg0
),
11791 sizeof (tree
) * TYPE_VECTOR_SUBPARTS (type
));
11793 return build_vector (type
, elts
);
11801 } /* switch (code) */
11804 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11805 of an array (or vector). */
11808 get_array_ctor_element_at_index (tree ctor
, offset_int access_index
)
11810 tree index_type
= NULL_TREE
;
11811 offset_int low_bound
= 0;
11813 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
11815 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
11816 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
11818 /* Static constructors for variably sized objects makes no sense. */
11819 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
11820 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
11821 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
11826 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
11827 TYPE_SIGN (index_type
));
11829 offset_int index
= low_bound
- 1;
11831 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
11832 TYPE_SIGN (index_type
));
11834 offset_int max_index
;
11835 unsigned HOST_WIDE_INT cnt
;
11838 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
11840 /* Array constructor might explicitly set index, or specify a range,
11841 or leave index NULL meaning that it is next index after previous
11845 if (TREE_CODE (cfield
) == INTEGER_CST
)
11846 max_index
= index
= wi::to_offset (cfield
);
11849 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
11850 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
11851 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
11858 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
11859 TYPE_SIGN (index_type
));
11863 /* Do we have match? */
11864 if (wi::cmpu (access_index
, index
) >= 0
11865 && wi::cmpu (access_index
, max_index
) <= 0)
11871 /* Perform constant folding and related simplification of EXPR.
11872 The related simplifications include x*1 => x, x*0 => 0, etc.,
11873 and application of the associative law.
11874 NOP_EXPR conversions may be removed freely (as long as we
11875 are careful not to change the type of the overall expression).
11876 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11877 but we can constant-fold them if they have constant operands. */
11879 #ifdef ENABLE_FOLD_CHECKING
11880 # define fold(x) fold_1 (x)
11881 static tree
fold_1 (tree
);
11887 const tree t
= expr
;
11888 enum tree_code code
= TREE_CODE (t
);
11889 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
11891 location_t loc
= EXPR_LOCATION (expr
);
11893 /* Return right away if a constant. */
11894 if (kind
== tcc_constant
)
11897 /* CALL_EXPR-like objects with variable numbers of operands are
11898 treated specially. */
11899 if (kind
== tcc_vl_exp
)
11901 if (code
== CALL_EXPR
)
11903 tem
= fold_call_expr (loc
, expr
, false);
11904 return tem
? tem
: expr
;
11909 if (IS_EXPR_CODE_CLASS (kind
))
11911 tree type
= TREE_TYPE (t
);
11912 tree op0
, op1
, op2
;
11914 switch (TREE_CODE_LENGTH (code
))
11917 op0
= TREE_OPERAND (t
, 0);
11918 tem
= fold_unary_loc (loc
, code
, type
, op0
);
11919 return tem
? tem
: expr
;
11921 op0
= TREE_OPERAND (t
, 0);
11922 op1
= TREE_OPERAND (t
, 1);
11923 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
11924 return tem
? tem
: expr
;
11926 op0
= TREE_OPERAND (t
, 0);
11927 op1
= TREE_OPERAND (t
, 1);
11928 op2
= TREE_OPERAND (t
, 2);
11929 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
11930 return tem
? tem
: expr
;
11940 tree op0
= TREE_OPERAND (t
, 0);
11941 tree op1
= TREE_OPERAND (t
, 1);
11943 if (TREE_CODE (op1
) == INTEGER_CST
11944 && TREE_CODE (op0
) == CONSTRUCTOR
11945 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
11947 tree val
= get_array_ctor_element_at_index (op0
,
11948 wi::to_offset (op1
));
11956 /* Return a VECTOR_CST if possible. */
11959 tree type
= TREE_TYPE (t
);
11960 if (TREE_CODE (type
) != VECTOR_TYPE
)
11965 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), i
, val
)
11966 if (! CONSTANT_CLASS_P (val
))
11969 return build_vector_from_ctor (type
, CONSTRUCTOR_ELTS (t
));
11973 return fold (DECL_INITIAL (t
));
11977 } /* switch (code) */
11980 #ifdef ENABLE_FOLD_CHECKING
11983 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
11984 hash_table
<nofree_ptr_hash
<const tree_node
> > *);
11985 static void fold_check_failed (const_tree
, const_tree
);
11986 void print_fold_checksum (const_tree
);
11988 /* When --enable-checking=fold, compute a digest of expr before
11989 and after actual fold call to see if fold did not accidentally
11990 change original expr. */
11996 struct md5_ctx ctx
;
11997 unsigned char checksum_before
[16], checksum_after
[16];
11998 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12000 md5_init_ctx (&ctx
);
12001 fold_checksum_tree (expr
, &ctx
, &ht
);
12002 md5_finish_ctx (&ctx
, checksum_before
);
12005 ret
= fold_1 (expr
);
12007 md5_init_ctx (&ctx
);
12008 fold_checksum_tree (expr
, &ctx
, &ht
);
12009 md5_finish_ctx (&ctx
, checksum_after
);
12011 if (memcmp (checksum_before
, checksum_after
, 16))
12012 fold_check_failed (expr
, ret
);
12018 print_fold_checksum (const_tree expr
)
12020 struct md5_ctx ctx
;
12021 unsigned char checksum
[16], cnt
;
12022 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12024 md5_init_ctx (&ctx
);
12025 fold_checksum_tree (expr
, &ctx
, &ht
);
12026 md5_finish_ctx (&ctx
, checksum
);
12027 for (cnt
= 0; cnt
< 16; ++cnt
)
12028 fprintf (stderr
, "%02x", checksum
[cnt
]);
12029 putc ('\n', stderr
);
12033 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
12035 internal_error ("fold check: original tree changed by fold");
12039 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
12040 hash_table
<nofree_ptr_hash
<const tree_node
> > *ht
)
12042 const tree_node
**slot
;
12043 enum tree_code code
;
12044 union tree_node buf
;
12050 slot
= ht
->find_slot (expr
, INSERT
);
12054 code
= TREE_CODE (expr
);
12055 if (TREE_CODE_CLASS (code
) == tcc_declaration
12056 && HAS_DECL_ASSEMBLER_NAME_P (expr
))
12058 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12059 memcpy ((char *) &buf
, expr
, tree_size (expr
));
12060 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
12061 buf
.decl_with_vis
.symtab_node
= NULL
;
12062 expr
= (tree
) &buf
;
12064 else if (TREE_CODE_CLASS (code
) == tcc_type
12065 && (TYPE_POINTER_TO (expr
)
12066 || TYPE_REFERENCE_TO (expr
)
12067 || TYPE_CACHED_VALUES_P (expr
)
12068 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
12069 || TYPE_NEXT_VARIANT (expr
)
12070 || TYPE_ALIAS_SET_KNOWN_P (expr
)))
12072 /* Allow these fields to be modified. */
12074 memcpy ((char *) &buf
, expr
, tree_size (expr
));
12075 expr
= tmp
= (tree
) &buf
;
12076 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
12077 TYPE_POINTER_TO (tmp
) = NULL
;
12078 TYPE_REFERENCE_TO (tmp
) = NULL
;
12079 TYPE_NEXT_VARIANT (tmp
) = NULL
;
12080 TYPE_ALIAS_SET (tmp
) = -1;
12081 if (TYPE_CACHED_VALUES_P (tmp
))
12083 TYPE_CACHED_VALUES_P (tmp
) = 0;
12084 TYPE_CACHED_VALUES (tmp
) = NULL
;
12087 md5_process_bytes (expr
, tree_size (expr
), ctx
);
12088 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
12089 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
12090 if (TREE_CODE_CLASS (code
) != tcc_type
12091 && TREE_CODE_CLASS (code
) != tcc_declaration
12092 && code
!= TREE_LIST
12093 && code
!= SSA_NAME
12094 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
12095 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
12096 switch (TREE_CODE_CLASS (code
))
12102 md5_process_bytes (TREE_STRING_POINTER (expr
),
12103 TREE_STRING_LENGTH (expr
), ctx
);
12106 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
12107 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
12110 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
12111 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
12117 case tcc_exceptional
:
12121 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
12122 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
12123 expr
= TREE_CHAIN (expr
);
12124 goto recursive_label
;
12127 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
12128 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
12134 case tcc_expression
:
12135 case tcc_reference
:
12136 case tcc_comparison
:
12139 case tcc_statement
:
12141 len
= TREE_OPERAND_LENGTH (expr
);
12142 for (i
= 0; i
< len
; ++i
)
12143 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
12145 case tcc_declaration
:
12146 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
12147 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
12148 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
12150 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
12151 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
12152 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
12153 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
12154 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
12157 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
12159 if (TREE_CODE (expr
) == FUNCTION_DECL
)
12161 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
12162 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
12164 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
12168 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
12169 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
12170 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
12171 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
12172 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
12173 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
12174 if (INTEGRAL_TYPE_P (expr
)
12175 || SCALAR_FLOAT_TYPE_P (expr
))
12177 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
12178 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
12180 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
12181 if (TREE_CODE (expr
) == RECORD_TYPE
12182 || TREE_CODE (expr
) == UNION_TYPE
12183 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
12184 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
12185 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
12192 /* Helper function for outputting the checksum of a tree T. When
12193 debugging with gdb, you can "define mynext" to be "next" followed
12194 by "call debug_fold_checksum (op0)", then just trace down till the
12197 DEBUG_FUNCTION
void
12198 debug_fold_checksum (const_tree t
)
12201 unsigned char checksum
[16];
12202 struct md5_ctx ctx
;
12203 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12205 md5_init_ctx (&ctx
);
12206 fold_checksum_tree (t
, &ctx
, &ht
);
12207 md5_finish_ctx (&ctx
, checksum
);
12210 for (i
= 0; i
< 16; i
++)
12211 fprintf (stderr
, "%d ", checksum
[i
]);
12213 fprintf (stderr
, "\n");
12218 /* Fold a unary tree expression with code CODE of type TYPE with an
12219 operand OP0. LOC is the location of the resulting expression.
12220 Return a folded expression if successful. Otherwise, return a tree
12221 expression with code CODE of type TYPE with an operand OP0. */
12224 fold_build1_stat_loc (location_t loc
,
12225 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
12228 #ifdef ENABLE_FOLD_CHECKING
12229 unsigned char checksum_before
[16], checksum_after
[16];
12230 struct md5_ctx ctx
;
12231 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12233 md5_init_ctx (&ctx
);
12234 fold_checksum_tree (op0
, &ctx
, &ht
);
12235 md5_finish_ctx (&ctx
, checksum_before
);
12239 tem
= fold_unary_loc (loc
, code
, type
, op0
);
12241 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
12243 #ifdef ENABLE_FOLD_CHECKING
12244 md5_init_ctx (&ctx
);
12245 fold_checksum_tree (op0
, &ctx
, &ht
);
12246 md5_finish_ctx (&ctx
, checksum_after
);
12248 if (memcmp (checksum_before
, checksum_after
, 16))
12249 fold_check_failed (op0
, tem
);
12254 /* Fold a binary tree expression with code CODE of type TYPE with
12255 operands OP0 and OP1. LOC is the location of the resulting
12256 expression. Return a folded expression if successful. Otherwise,
12257 return a tree expression with code CODE of type TYPE with operands
12261 fold_build2_stat_loc (location_t loc
,
12262 enum tree_code code
, tree type
, tree op0
, tree op1
12266 #ifdef ENABLE_FOLD_CHECKING
12267 unsigned char checksum_before_op0
[16],
12268 checksum_before_op1
[16],
12269 checksum_after_op0
[16],
12270 checksum_after_op1
[16];
12271 struct md5_ctx ctx
;
12272 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12274 md5_init_ctx (&ctx
);
12275 fold_checksum_tree (op0
, &ctx
, &ht
);
12276 md5_finish_ctx (&ctx
, checksum_before_op0
);
12279 md5_init_ctx (&ctx
);
12280 fold_checksum_tree (op1
, &ctx
, &ht
);
12281 md5_finish_ctx (&ctx
, checksum_before_op1
);
12285 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
12287 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
12289 #ifdef ENABLE_FOLD_CHECKING
12290 md5_init_ctx (&ctx
);
12291 fold_checksum_tree (op0
, &ctx
, &ht
);
12292 md5_finish_ctx (&ctx
, checksum_after_op0
);
12295 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12296 fold_check_failed (op0
, tem
);
12298 md5_init_ctx (&ctx
);
12299 fold_checksum_tree (op1
, &ctx
, &ht
);
12300 md5_finish_ctx (&ctx
, checksum_after_op1
);
12302 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12303 fold_check_failed (op1
, tem
);
12308 /* Fold a ternary tree expression with code CODE of type TYPE with
12309 operands OP0, OP1, and OP2. Return a folded expression if
12310 successful. Otherwise, return a tree expression with code CODE of
12311 type TYPE with operands OP0, OP1, and OP2. */
12314 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
12315 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
12318 #ifdef ENABLE_FOLD_CHECKING
12319 unsigned char checksum_before_op0
[16],
12320 checksum_before_op1
[16],
12321 checksum_before_op2
[16],
12322 checksum_after_op0
[16],
12323 checksum_after_op1
[16],
12324 checksum_after_op2
[16];
12325 struct md5_ctx ctx
;
12326 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12328 md5_init_ctx (&ctx
);
12329 fold_checksum_tree (op0
, &ctx
, &ht
);
12330 md5_finish_ctx (&ctx
, checksum_before_op0
);
12333 md5_init_ctx (&ctx
);
12334 fold_checksum_tree (op1
, &ctx
, &ht
);
12335 md5_finish_ctx (&ctx
, checksum_before_op1
);
12338 md5_init_ctx (&ctx
);
12339 fold_checksum_tree (op2
, &ctx
, &ht
);
12340 md5_finish_ctx (&ctx
, checksum_before_op2
);
12344 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
12345 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
12347 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
12349 #ifdef ENABLE_FOLD_CHECKING
12350 md5_init_ctx (&ctx
);
12351 fold_checksum_tree (op0
, &ctx
, &ht
);
12352 md5_finish_ctx (&ctx
, checksum_after_op0
);
12355 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
12356 fold_check_failed (op0
, tem
);
12358 md5_init_ctx (&ctx
);
12359 fold_checksum_tree (op1
, &ctx
, &ht
);
12360 md5_finish_ctx (&ctx
, checksum_after_op1
);
12363 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
12364 fold_check_failed (op1
, tem
);
12366 md5_init_ctx (&ctx
);
12367 fold_checksum_tree (op2
, &ctx
, &ht
);
12368 md5_finish_ctx (&ctx
, checksum_after_op2
);
12370 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
12371 fold_check_failed (op2
, tem
);
12376 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12377 arguments in ARGARRAY, and a null static chain.
12378 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12379 of type TYPE from the given operands as constructed by build_call_array. */
12382 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
12383 int nargs
, tree
*argarray
)
12386 #ifdef ENABLE_FOLD_CHECKING
12387 unsigned char checksum_before_fn
[16],
12388 checksum_before_arglist
[16],
12389 checksum_after_fn
[16],
12390 checksum_after_arglist
[16];
12391 struct md5_ctx ctx
;
12392 hash_table
<nofree_ptr_hash
<const tree_node
> > ht (32);
12395 md5_init_ctx (&ctx
);
12396 fold_checksum_tree (fn
, &ctx
, &ht
);
12397 md5_finish_ctx (&ctx
, checksum_before_fn
);
12400 md5_init_ctx (&ctx
);
12401 for (i
= 0; i
< nargs
; i
++)
12402 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
12403 md5_finish_ctx (&ctx
, checksum_before_arglist
);
12407 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
12409 tem
= build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
12411 #ifdef ENABLE_FOLD_CHECKING
12412 md5_init_ctx (&ctx
);
12413 fold_checksum_tree (fn
, &ctx
, &ht
);
12414 md5_finish_ctx (&ctx
, checksum_after_fn
);
12417 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
12418 fold_check_failed (fn
, tem
);
12420 md5_init_ctx (&ctx
);
12421 for (i
= 0; i
< nargs
; i
++)
12422 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
12423 md5_finish_ctx (&ctx
, checksum_after_arglist
);
12425 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
12426 fold_check_failed (NULL_TREE
, tem
);
12431 /* Perform constant folding and related simplification of initializer
12432 expression EXPR. These behave identically to "fold_buildN" but ignore
12433 potential run-time traps and exceptions that fold must preserve. */
12435 #define START_FOLD_INIT \
12436 int saved_signaling_nans = flag_signaling_nans;\
12437 int saved_trapping_math = flag_trapping_math;\
12438 int saved_rounding_math = flag_rounding_math;\
12439 int saved_trapv = flag_trapv;\
12440 int saved_folding_initializer = folding_initializer;\
12441 flag_signaling_nans = 0;\
12442 flag_trapping_math = 0;\
12443 flag_rounding_math = 0;\
12445 folding_initializer = 1;
12447 #define END_FOLD_INIT \
12448 flag_signaling_nans = saved_signaling_nans;\
12449 flag_trapping_math = saved_trapping_math;\
12450 flag_rounding_math = saved_rounding_math;\
12451 flag_trapv = saved_trapv;\
12452 folding_initializer = saved_folding_initializer;
12455 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
12456 tree type
, tree op
)
12461 result
= fold_build1_loc (loc
, code
, type
, op
);
12468 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
12469 tree type
, tree op0
, tree op1
)
12474 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
12481 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
12482 int nargs
, tree
*argarray
)
12487 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
12493 #undef START_FOLD_INIT
12494 #undef END_FOLD_INIT
12496 /* Determine if first argument is a multiple of second argument. Return 0 if
12497 it is not, or we cannot easily determined it to be.
12499 An example of the sort of thing we care about (at this point; this routine
12500 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12501 fold cases do now) is discovering that
12503 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12509 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12511 This code also handles discovering that
12513 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12515 is a multiple of 8 so we don't have to worry about dealing with a
12516 possible remainder.
12518 Note that we *look* inside a SAVE_EXPR only to determine how it was
12519 calculated; it is not safe for fold to do much of anything else with the
12520 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12521 at run time. For example, the latter example above *cannot* be implemented
12522 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12523 evaluation time of the original SAVE_EXPR is not necessarily the same at
12524 the time the new expression is evaluated. The only optimization of this
12525 sort that would be valid is changing
12527 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12531 SAVE_EXPR (I) * SAVE_EXPR (J)
12533 (where the same SAVE_EXPR (J) is used in the original and the
12534 transformed version). */
12537 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
12542 if (operand_equal_p (top
, bottom
, 0))
12545 if (TREE_CODE (type
) != INTEGER_TYPE
)
12548 switch (TREE_CODE (top
))
12551 /* Bitwise and provides a power of two multiple. If the mask is
12552 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12553 if (!integer_pow2p (bottom
))
12558 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
12559 || multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
12562 /* It is impossible to prove if op0 - op1 is multiple of bottom
12563 precisely, so be conservative here checking if both op0 and op1
12564 are multiple of bottom. Note we check the second operand first
12565 since it's usually simpler. */
12566 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
12567 && multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
12570 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12571 as op0 - 3 if the expression has unsigned type. For example,
12572 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
12573 op1
= TREE_OPERAND (top
, 1);
12574 if (TYPE_UNSIGNED (type
)
12575 && TREE_CODE (op1
) == INTEGER_CST
&& tree_int_cst_sign_bit (op1
))
12576 op1
= fold_build1 (NEGATE_EXPR
, type
, op1
);
12577 return (multiple_of_p (type
, op1
, bottom
)
12578 && multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
));
12581 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
12583 op1
= TREE_OPERAND (top
, 1);
12584 /* const_binop may not detect overflow correctly,
12585 so check for it explicitly here. */
12586 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
12587 && 0 != (t1
= fold_convert (type
,
12588 const_binop (LSHIFT_EXPR
,
12591 && !TREE_OVERFLOW (t1
))
12592 return multiple_of_p (type
, t1
, bottom
);
12597 /* Can't handle conversions from non-integral or wider integral type. */
12598 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
12599 || (TYPE_PRECISION (type
)
12600 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
12606 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
12609 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
12610 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
12613 if (TREE_CODE (bottom
) != INTEGER_CST
12614 || integer_zerop (bottom
)
12615 || (TYPE_UNSIGNED (type
)
12616 && (tree_int_cst_sgn (top
) < 0
12617 || tree_int_cst_sgn (bottom
) < 0)))
12619 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
12623 if (TREE_CODE (bottom
) == INTEGER_CST
12624 && (stmt
= SSA_NAME_DEF_STMT (top
)) != NULL
12625 && gimple_code (stmt
) == GIMPLE_ASSIGN
)
12627 enum tree_code code
= gimple_assign_rhs_code (stmt
);
12629 /* Check for special cases to see if top is defined as multiple
12632 top = (X & ~(bottom - 1) ; bottom is power of 2
12638 if (code
== BIT_AND_EXPR
12639 && (op2
= gimple_assign_rhs2 (stmt
)) != NULL_TREE
12640 && TREE_CODE (op2
) == INTEGER_CST
12641 && integer_pow2p (bottom
)
12642 && wi::multiple_of_p (wi::to_widest (op2
),
12643 wi::to_widest (bottom
), UNSIGNED
))
12646 op1
= gimple_assign_rhs1 (stmt
);
12647 if (code
== MINUS_EXPR
12648 && (op2
= gimple_assign_rhs2 (stmt
)) != NULL_TREE
12649 && TREE_CODE (op2
) == SSA_NAME
12650 && (stmt
= SSA_NAME_DEF_STMT (op2
)) != NULL
12651 && gimple_code (stmt
) == GIMPLE_ASSIGN
12652 && (code
= gimple_assign_rhs_code (stmt
)) == TRUNC_MOD_EXPR
12653 && operand_equal_p (op1
, gimple_assign_rhs1 (stmt
), 0)
12654 && operand_equal_p (bottom
, gimple_assign_rhs2 (stmt
), 0))
12665 #define tree_expr_nonnegative_warnv_p(X, Y) \
12666 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12668 #define RECURSE(X) \
12669 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12671 /* Return true if CODE or TYPE is known to be non-negative. */
12674 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
12676 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
12677 && truth_value_p (code
))
12678 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12679 have a signed:1 type (where the value is -1 and 0). */
12684 /* Return true if (CODE OP0) is known to be non-negative. If the return
12685 value is based on the assumption that signed overflow is undefined,
12686 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12687 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12690 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
12691 bool *strict_overflow_p
, int depth
)
12693 if (TYPE_UNSIGNED (type
))
12699 /* We can't return 1 if flag_wrapv is set because
12700 ABS_EXPR<INT_MIN> = INT_MIN. */
12701 if (!ANY_INTEGRAL_TYPE_P (type
))
12703 if (TYPE_OVERFLOW_UNDEFINED (type
))
12705 *strict_overflow_p
= true;
12710 case NON_LVALUE_EXPR
:
12712 case FIX_TRUNC_EXPR
:
12713 return RECURSE (op0
);
12717 tree inner_type
= TREE_TYPE (op0
);
12718 tree outer_type
= type
;
12720 if (TREE_CODE (outer_type
) == REAL_TYPE
)
12722 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12723 return RECURSE (op0
);
12724 if (INTEGRAL_TYPE_P (inner_type
))
12726 if (TYPE_UNSIGNED (inner_type
))
12728 return RECURSE (op0
);
12731 else if (INTEGRAL_TYPE_P (outer_type
))
12733 if (TREE_CODE (inner_type
) == REAL_TYPE
)
12734 return RECURSE (op0
);
12735 if (INTEGRAL_TYPE_P (inner_type
))
12736 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
12737 && TYPE_UNSIGNED (inner_type
);
12743 return tree_simple_nonnegative_warnv_p (code
, type
);
12746 /* We don't know sign of `t', so be conservative and return false. */
12750 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12751 value is based on the assumption that signed overflow is undefined,
12752 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12753 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12756 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
12757 tree op1
, bool *strict_overflow_p
,
12760 if (TYPE_UNSIGNED (type
))
12765 case POINTER_PLUS_EXPR
:
12767 if (FLOAT_TYPE_P (type
))
12768 return RECURSE (op0
) && RECURSE (op1
);
12770 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12771 both unsigned and at least 2 bits shorter than the result. */
12772 if (TREE_CODE (type
) == INTEGER_TYPE
12773 && TREE_CODE (op0
) == NOP_EXPR
12774 && TREE_CODE (op1
) == NOP_EXPR
)
12776 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
12777 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
12778 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
12779 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
12781 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
12782 TYPE_PRECISION (inner2
)) + 1;
12783 return prec
< TYPE_PRECISION (type
);
12789 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12791 /* x * x is always non-negative for floating point x
12792 or without overflow. */
12793 if (operand_equal_p (op0
, op1
, 0)
12794 || (RECURSE (op0
) && RECURSE (op1
)))
12796 if (ANY_INTEGRAL_TYPE_P (type
)
12797 && TYPE_OVERFLOW_UNDEFINED (type
))
12798 *strict_overflow_p
= true;
12803 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12804 both unsigned and their total bits is shorter than the result. */
12805 if (TREE_CODE (type
) == INTEGER_TYPE
12806 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
12807 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
12809 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
12810 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
12812 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
12813 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
12816 bool unsigned0
= TYPE_UNSIGNED (inner0
);
12817 bool unsigned1
= TYPE_UNSIGNED (inner1
);
12819 if (TREE_CODE (op0
) == INTEGER_CST
)
12820 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
12822 if (TREE_CODE (op1
) == INTEGER_CST
)
12823 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
12825 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
12826 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
12828 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
12829 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
12830 : TYPE_PRECISION (inner0
);
12832 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
12833 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
12834 : TYPE_PRECISION (inner1
);
12836 return precision0
+ precision1
< TYPE_PRECISION (type
);
12843 return RECURSE (op0
) || RECURSE (op1
);
12849 case TRUNC_DIV_EXPR
:
12850 case CEIL_DIV_EXPR
:
12851 case FLOOR_DIV_EXPR
:
12852 case ROUND_DIV_EXPR
:
12853 return RECURSE (op0
) && RECURSE (op1
);
12855 case TRUNC_MOD_EXPR
:
12856 return RECURSE (op0
);
12858 case FLOOR_MOD_EXPR
:
12859 return RECURSE (op1
);
12861 case CEIL_MOD_EXPR
:
12862 case ROUND_MOD_EXPR
:
12864 return tree_simple_nonnegative_warnv_p (code
, type
);
12867 /* We don't know sign of `t', so be conservative and return false. */
12871 /* Return true if T is known to be non-negative. If the return
12872 value is based on the assumption that signed overflow is undefined,
12873 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12874 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12877 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
12879 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
12882 switch (TREE_CODE (t
))
12885 return tree_int_cst_sgn (t
) >= 0;
12888 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
12891 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
12894 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
12897 /* Limit the depth of recursion to avoid quadratic behavior.
12898 This is expected to catch almost all occurrences in practice.
12899 If this code misses important cases that unbounded recursion
12900 would not, passes that need this information could be revised
12901 to provide it through dataflow propagation. */
12902 return (!name_registered_for_update_p (t
)
12903 && depth
< PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH
)
12904 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t
),
12905 strict_overflow_p
, depth
));
12908 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
12912 /* Return true if T is known to be non-negative. If the return
12913 value is based on the assumption that signed overflow is undefined,
12914 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12915 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12918 tree_call_nonnegative_warnv_p (tree type
, combined_fn fn
, tree arg0
, tree arg1
,
12919 bool *strict_overflow_p
, int depth
)
12940 case CFN_BUILT_IN_BSWAP32
:
12941 case CFN_BUILT_IN_BSWAP64
:
12946 /* sqrt(-0.0) is -0.0. */
12947 if (!HONOR_SIGNED_ZEROS (element_mode (type
)))
12949 return RECURSE (arg0
);
12975 CASE_CFN_NEARBYINT
:
12982 CASE_CFN_SIGNIFICAND
:
12986 /* True if the 1st argument is nonnegative. */
12987 return RECURSE (arg0
);
12990 /* True if the 1st OR 2nd arguments are nonnegative. */
12991 return RECURSE (arg0
) || RECURSE (arg1
);
12994 /* True if the 1st AND 2nd arguments are nonnegative. */
12995 return RECURSE (arg0
) && RECURSE (arg1
);
12998 /* True if the 2nd argument is nonnegative. */
12999 return RECURSE (arg1
);
13002 /* True if the 1st argument is nonnegative or the second
13003 argument is an even integer. */
13004 if (TREE_CODE (arg1
) == INTEGER_CST
13005 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
13007 return RECURSE (arg0
);
13010 /* True if the 1st argument is nonnegative or the second
13011 argument is an even integer valued real. */
13012 if (TREE_CODE (arg1
) == REAL_CST
)
13017 c
= TREE_REAL_CST (arg1
);
13018 n
= real_to_integer (&c
);
13021 REAL_VALUE_TYPE cint
;
13022 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
13023 if (real_identical (&c
, &cint
))
13027 return RECURSE (arg0
);
13032 return tree_simple_nonnegative_warnv_p (CALL_EXPR
, type
);
13035 /* Return true if T is known to be non-negative. If the return
13036 value is based on the assumption that signed overflow is undefined,
13037 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13038 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13041 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
13043 enum tree_code code
= TREE_CODE (t
);
13044 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
13051 tree temp
= TARGET_EXPR_SLOT (t
);
13052 t
= TARGET_EXPR_INITIAL (t
);
13054 /* If the initializer is non-void, then it's a normal expression
13055 that will be assigned to the slot. */
13056 if (!VOID_TYPE_P (t
))
13057 return RECURSE (t
);
13059 /* Otherwise, the initializer sets the slot in some way. One common
13060 way is an assignment statement at the end of the initializer. */
13063 if (TREE_CODE (t
) == BIND_EXPR
)
13064 t
= expr_last (BIND_EXPR_BODY (t
));
13065 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
13066 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
13067 t
= expr_last (TREE_OPERAND (t
, 0));
13068 else if (TREE_CODE (t
) == STATEMENT_LIST
)
13073 if (TREE_CODE (t
) == MODIFY_EXPR
13074 && TREE_OPERAND (t
, 0) == temp
)
13075 return RECURSE (TREE_OPERAND (t
, 1));
13082 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
13083 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
13085 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
13086 get_call_combined_fn (t
),
13089 strict_overflow_p
, depth
);
13091 case COMPOUND_EXPR
:
13093 return RECURSE (TREE_OPERAND (t
, 1));
13096 return RECURSE (expr_last (TREE_OPERAND (t
, 1)));
13099 return RECURSE (TREE_OPERAND (t
, 0));
13102 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
), TREE_TYPE (t
));
13107 #undef tree_expr_nonnegative_warnv_p
13109 /* Return true if T is known to be non-negative. If the return
13110 value is based on the assumption that signed overflow is undefined,
13111 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13112 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13115 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
, int depth
)
13117 enum tree_code code
;
13118 if (t
== error_mark_node
)
13121 code
= TREE_CODE (t
);
13122 switch (TREE_CODE_CLASS (code
))
13125 case tcc_comparison
:
13126 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
13128 TREE_OPERAND (t
, 0),
13129 TREE_OPERAND (t
, 1),
13130 strict_overflow_p
, depth
);
13133 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
13135 TREE_OPERAND (t
, 0),
13136 strict_overflow_p
, depth
);
13139 case tcc_declaration
:
13140 case tcc_reference
:
13141 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13149 case TRUTH_AND_EXPR
:
13150 case TRUTH_OR_EXPR
:
13151 case TRUTH_XOR_EXPR
:
13152 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
13154 TREE_OPERAND (t
, 0),
13155 TREE_OPERAND (t
, 1),
13156 strict_overflow_p
, depth
);
13157 case TRUTH_NOT_EXPR
:
13158 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
13160 TREE_OPERAND (t
, 0),
13161 strict_overflow_p
, depth
);
13168 case WITH_SIZE_EXPR
:
13170 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13173 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
, depth
);
13177 /* Return true if `t' is known to be non-negative. Handle warnings
13178 about undefined signed overflow. */
13181 tree_expr_nonnegative_p (tree t
)
13183 bool ret
, strict_overflow_p
;
13185 strict_overflow_p
= false;
13186 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
13187 if (strict_overflow_p
)
13188 fold_overflow_warning (("assuming signed overflow does not occur when "
13189 "determining that expression is always "
13191 WARN_STRICT_OVERFLOW_MISC
);
13196 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13197 For floating point we further ensure that T is not denormal.
13198 Similar logic is present in nonzero_address in rtlanal.h.
13200 If the return value is based on the assumption that signed overflow
13201 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13202 change *STRICT_OVERFLOW_P. */
13205 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
13206 bool *strict_overflow_p
)
13211 return tree_expr_nonzero_warnv_p (op0
,
13212 strict_overflow_p
);
13216 tree inner_type
= TREE_TYPE (op0
);
13217 tree outer_type
= type
;
13219 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
13220 && tree_expr_nonzero_warnv_p (op0
,
13221 strict_overflow_p
));
13225 case NON_LVALUE_EXPR
:
13226 return tree_expr_nonzero_warnv_p (op0
,
13227 strict_overflow_p
);
13236 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13237 For floating point we further ensure that T is not denormal.
13238 Similar logic is present in nonzero_address in rtlanal.h.
13240 If the return value is based on the assumption that signed overflow
13241 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13242 change *STRICT_OVERFLOW_P. */
13245 tree_binary_nonzero_warnv_p (enum tree_code code
,
13248 tree op1
, bool *strict_overflow_p
)
13250 bool sub_strict_overflow_p
;
13253 case POINTER_PLUS_EXPR
:
13255 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_UNDEFINED (type
))
13257 /* With the presence of negative values it is hard
13258 to say something. */
13259 sub_strict_overflow_p
= false;
13260 if (!tree_expr_nonnegative_warnv_p (op0
,
13261 &sub_strict_overflow_p
)
13262 || !tree_expr_nonnegative_warnv_p (op1
,
13263 &sub_strict_overflow_p
))
13265 /* One of operands must be positive and the other non-negative. */
13266 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13267 overflows, on a twos-complement machine the sum of two
13268 nonnegative numbers can never be zero. */
13269 return (tree_expr_nonzero_warnv_p (op0
,
13271 || tree_expr_nonzero_warnv_p (op1
,
13272 strict_overflow_p
));
13277 if (TYPE_OVERFLOW_UNDEFINED (type
))
13279 if (tree_expr_nonzero_warnv_p (op0
,
13281 && tree_expr_nonzero_warnv_p (op1
,
13282 strict_overflow_p
))
13284 *strict_overflow_p
= true;
13291 sub_strict_overflow_p
= false;
13292 if (tree_expr_nonzero_warnv_p (op0
,
13293 &sub_strict_overflow_p
)
13294 && tree_expr_nonzero_warnv_p (op1
,
13295 &sub_strict_overflow_p
))
13297 if (sub_strict_overflow_p
)
13298 *strict_overflow_p
= true;
13303 sub_strict_overflow_p
= false;
13304 if (tree_expr_nonzero_warnv_p (op0
,
13305 &sub_strict_overflow_p
))
13307 if (sub_strict_overflow_p
)
13308 *strict_overflow_p
= true;
13310 /* When both operands are nonzero, then MAX must be too. */
13311 if (tree_expr_nonzero_warnv_p (op1
,
13312 strict_overflow_p
))
13315 /* MAX where operand 0 is positive is positive. */
13316 return tree_expr_nonnegative_warnv_p (op0
,
13317 strict_overflow_p
);
13319 /* MAX where operand 1 is positive is positive. */
13320 else if (tree_expr_nonzero_warnv_p (op1
,
13321 &sub_strict_overflow_p
)
13322 && tree_expr_nonnegative_warnv_p (op1
,
13323 &sub_strict_overflow_p
))
13325 if (sub_strict_overflow_p
)
13326 *strict_overflow_p
= true;
13332 return (tree_expr_nonzero_warnv_p (op1
,
13334 || tree_expr_nonzero_warnv_p (op0
,
13335 strict_overflow_p
));
13344 /* Return true when T is an address and is known to be nonzero.
13345 For floating point we further ensure that T is not denormal.
13346 Similar logic is present in nonzero_address in rtlanal.h.
13348 If the return value is based on the assumption that signed overflow
13349 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13350 change *STRICT_OVERFLOW_P. */
13353 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
13355 bool sub_strict_overflow_p
;
13356 switch (TREE_CODE (t
))
13359 return !integer_zerop (t
);
13363 tree base
= TREE_OPERAND (t
, 0);
13365 if (!DECL_P (base
))
13366 base
= get_base_address (base
);
13368 if (base
&& TREE_CODE (base
) == TARGET_EXPR
)
13369 base
= TARGET_EXPR_SLOT (base
);
13374 /* For objects in symbol table check if we know they are non-zero.
13375 Don't do anything for variables and functions before symtab is built;
13376 it is quite possible that they will be declared weak later. */
13377 int nonzero_addr
= maybe_nonzero_address (base
);
13378 if (nonzero_addr
>= 0)
13379 return nonzero_addr
;
13381 /* Constants are never weak. */
13382 if (CONSTANT_CLASS_P (base
))
13389 sub_strict_overflow_p
= false;
13390 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
13391 &sub_strict_overflow_p
)
13392 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
13393 &sub_strict_overflow_p
))
13395 if (sub_strict_overflow_p
)
13396 *strict_overflow_p
= true;
13407 #define integer_valued_real_p(X) \
13408 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13410 #define RECURSE(X) \
13411 ((integer_valued_real_p) (X, depth + 1))
13413 /* Return true if the floating point result of (CODE OP0) has an
13414 integer value. We also allow +Inf, -Inf and NaN to be considered
13415 integer values. Return false for signaling NaN.
13417 DEPTH is the current nesting depth of the query. */
13420 integer_valued_real_unary_p (tree_code code
, tree op0
, int depth
)
13428 return RECURSE (op0
);
13432 tree type
= TREE_TYPE (op0
);
13433 if (TREE_CODE (type
) == INTEGER_TYPE
)
13435 if (TREE_CODE (type
) == REAL_TYPE
)
13436 return RECURSE (op0
);
13446 /* Return true if the floating point result of (CODE OP0 OP1) has an
13447 integer value. We also allow +Inf, -Inf and NaN to be considered
13448 integer values. Return false for signaling NaN.
13450 DEPTH is the current nesting depth of the query. */
13453 integer_valued_real_binary_p (tree_code code
, tree op0
, tree op1
, int depth
)
13462 return RECURSE (op0
) && RECURSE (op1
);
13470 /* Return true if the floating point result of calling FNDECL with arguments
13471 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13472 considered integer values. Return false for signaling NaN. If FNDECL
13473 takes fewer than 2 arguments, the remaining ARGn are null.
13475 DEPTH is the current nesting depth of the query. */
13478 integer_valued_real_call_p (combined_fn fn
, tree arg0
, tree arg1
, int depth
)
13484 CASE_CFN_NEARBYINT
:
13492 return RECURSE (arg0
) && RECURSE (arg1
);
13500 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13501 has an integer value. We also allow +Inf, -Inf and NaN to be
13502 considered integer values. Return false for signaling NaN.
13504 DEPTH is the current nesting depth of the query. */
13507 integer_valued_real_single_p (tree t
, int depth
)
13509 switch (TREE_CODE (t
))
13512 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
13515 return RECURSE (TREE_OPERAND (t
, 1)) && RECURSE (TREE_OPERAND (t
, 2));
13518 /* Limit the depth of recursion to avoid quadratic behavior.
13519 This is expected to catch almost all occurrences in practice.
13520 If this code misses important cases that unbounded recursion
13521 would not, passes that need this information could be revised
13522 to provide it through dataflow propagation. */
13523 return (!name_registered_for_update_p (t
)
13524 && depth
< PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH
)
13525 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t
),
13534 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13535 has an integer value. We also allow +Inf, -Inf and NaN to be
13536 considered integer values. Return false for signaling NaN.
13538 DEPTH is the current nesting depth of the query. */
13541 integer_valued_real_invalid_p (tree t
, int depth
)
13543 switch (TREE_CODE (t
))
13545 case COMPOUND_EXPR
:
13548 return RECURSE (TREE_OPERAND (t
, 1));
13551 return RECURSE (TREE_OPERAND (t
, 0));
13560 #undef integer_valued_real_p
13562 /* Return true if the floating point expression T has an integer value.
13563 We also allow +Inf, -Inf and NaN to be considered integer values.
13564 Return false for signaling NaN.
13566 DEPTH is the current nesting depth of the query. */
13569 integer_valued_real_p (tree t
, int depth
)
13571 if (t
== error_mark_node
)
13574 tree_code code
= TREE_CODE (t
);
13575 switch (TREE_CODE_CLASS (code
))
13578 case tcc_comparison
:
13579 return integer_valued_real_binary_p (code
, TREE_OPERAND (t
, 0),
13580 TREE_OPERAND (t
, 1), depth
);
13583 return integer_valued_real_unary_p (code
, TREE_OPERAND (t
, 0), depth
);
13586 case tcc_declaration
:
13587 case tcc_reference
:
13588 return integer_valued_real_single_p (t
, depth
);
13598 return integer_valued_real_single_p (t
, depth
);
13602 tree arg0
= (call_expr_nargs (t
) > 0
13603 ? CALL_EXPR_ARG (t
, 0)
13605 tree arg1
= (call_expr_nargs (t
) > 1
13606 ? CALL_EXPR_ARG (t
, 1)
13608 return integer_valued_real_call_p (get_call_combined_fn (t
),
13609 arg0
, arg1
, depth
);
13613 return integer_valued_real_invalid_p (t
, depth
);
13617 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13618 attempt to fold the expression to a constant without modifying TYPE,
13621 If the expression could be simplified to a constant, then return
13622 the constant. If the expression would not be simplified to a
13623 constant, then return NULL_TREE. */
13626 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
13628 tree tem
= fold_binary (code
, type
, op0
, op1
);
13629 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
13632 /* Given the components of a unary expression CODE, TYPE and OP0,
13633 attempt to fold the expression to a constant without modifying
13636 If the expression could be simplified to a constant, then return
13637 the constant. If the expression would not be simplified to a
13638 constant, then return NULL_TREE. */
13641 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
13643 tree tem
= fold_unary (code
, type
, op0
);
13644 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
13647 /* If EXP represents referencing an element in a constant string
13648 (either via pointer arithmetic or array indexing), return the
13649 tree representing the value accessed, otherwise return NULL. */
13652 fold_read_from_constant_string (tree exp
)
13654 if ((TREE_CODE (exp
) == INDIRECT_REF
13655 || TREE_CODE (exp
) == ARRAY_REF
)
13656 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
13658 tree exp1
= TREE_OPERAND (exp
, 0);
13661 location_t loc
= EXPR_LOCATION (exp
);
13663 if (TREE_CODE (exp
) == INDIRECT_REF
)
13664 string
= string_constant (exp1
, &index
);
13667 tree low_bound
= array_ref_low_bound (exp
);
13668 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
13670 /* Optimize the special-case of a zero lower bound.
13672 We convert the low_bound to sizetype to avoid some problems
13673 with constant folding. (E.g. suppose the lower bound is 1,
13674 and its mode is QI. Without the conversion,l (ARRAY
13675 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13676 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13677 if (! integer_zerop (low_bound
))
13678 index
= size_diffop_loc (loc
, index
,
13679 fold_convert_loc (loc
, sizetype
, low_bound
));
13685 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
13686 && TREE_CODE (string
) == STRING_CST
13687 && TREE_CODE (index
) == INTEGER_CST
13688 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
13689 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
13691 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
13692 return build_int_cst_type (TREE_TYPE (exp
),
13693 (TREE_STRING_POINTER (string
)
13694 [TREE_INT_CST_LOW (index
)]));
13699 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13700 an integer constant, real, or fixed-point constant.
13702 TYPE is the type of the result. */
13705 fold_negate_const (tree arg0
, tree type
)
13707 tree t
= NULL_TREE
;
13709 switch (TREE_CODE (arg0
))
13714 wide_int val
= wi::neg (arg0
, &overflow
);
13715 t
= force_fit_type (type
, val
, 1,
13716 (overflow
| TREE_OVERFLOW (arg0
))
13717 && !TYPE_UNSIGNED (type
));
13722 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
13727 FIXED_VALUE_TYPE f
;
13728 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
13729 &(TREE_FIXED_CST (arg0
)), NULL
,
13730 TYPE_SATURATING (type
));
13731 t
= build_fixed (type
, f
);
13732 /* Propagate overflow flags. */
13733 if (overflow_p
| TREE_OVERFLOW (arg0
))
13734 TREE_OVERFLOW (t
) = 1;
13739 gcc_unreachable ();
13745 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13746 an integer constant or real constant.
13748 TYPE is the type of the result. */
13751 fold_abs_const (tree arg0
, tree type
)
13753 tree t
= NULL_TREE
;
13755 switch (TREE_CODE (arg0
))
13759 /* If the value is unsigned or non-negative, then the absolute value
13760 is the same as the ordinary value. */
13761 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
13764 /* If the value is negative, then the absolute value is
13769 wide_int val
= wi::neg (arg0
, &overflow
);
13770 t
= force_fit_type (type
, val
, -1,
13771 overflow
| TREE_OVERFLOW (arg0
));
13777 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
13778 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
13784 gcc_unreachable ();
13790 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13791 constant. TYPE is the type of the result. */
13794 fold_not_const (const_tree arg0
, tree type
)
13796 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
13798 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
13801 /* Given CODE, a relational operator, the target type, TYPE and two
13802 constant operands OP0 and OP1, return the result of the
13803 relational operation. If the result is not a compile time
13804 constant, then return NULL_TREE. */
13807 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
13809 int result
, invert
;
13811 /* From here on, the only cases we handle are when the result is
13812 known to be a constant. */
13814 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
13816 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
13817 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
13819 /* Handle the cases where either operand is a NaN. */
13820 if (real_isnan (c0
) || real_isnan (c1
))
13830 case UNORDERED_EXPR
:
13844 if (flag_trapping_math
)
13850 gcc_unreachable ();
13853 return constant_boolean_node (result
, type
);
13856 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
13859 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
13861 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
13862 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
13863 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
13866 /* Handle equality/inequality of complex constants. */
13867 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
13869 tree rcond
= fold_relational_const (code
, type
,
13870 TREE_REALPART (op0
),
13871 TREE_REALPART (op1
));
13872 tree icond
= fold_relational_const (code
, type
,
13873 TREE_IMAGPART (op0
),
13874 TREE_IMAGPART (op1
));
13875 if (code
== EQ_EXPR
)
13876 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
13877 else if (code
== NE_EXPR
)
13878 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
13883 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
13885 if (!VECTOR_TYPE_P (type
))
13887 /* Have vector comparison with scalar boolean result. */
13888 gcc_assert ((code
== EQ_EXPR
|| code
== NE_EXPR
)
13889 && VECTOR_CST_NELTS (op0
) == VECTOR_CST_NELTS (op1
));
13890 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (op0
); i
++)
13892 tree elem0
= VECTOR_CST_ELT (op0
, i
);
13893 tree elem1
= VECTOR_CST_ELT (op1
, i
);
13894 tree tmp
= fold_relational_const (code
, type
, elem0
, elem1
);
13895 if (tmp
== NULL_TREE
)
13897 if (integer_zerop (tmp
))
13898 return constant_boolean_node (false, type
);
13900 return constant_boolean_node (true, type
);
13902 unsigned count
= VECTOR_CST_NELTS (op0
);
13903 tree
*elts
= XALLOCAVEC (tree
, count
);
13904 gcc_assert (VECTOR_CST_NELTS (op1
) == count
13905 && TYPE_VECTOR_SUBPARTS (type
) == count
);
13907 for (unsigned i
= 0; i
< count
; i
++)
13909 tree elem_type
= TREE_TYPE (type
);
13910 tree elem0
= VECTOR_CST_ELT (op0
, i
);
13911 tree elem1
= VECTOR_CST_ELT (op1
, i
);
13913 tree tem
= fold_relational_const (code
, elem_type
,
13916 if (tem
== NULL_TREE
)
13919 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
13922 return build_vector (type
, elts
);
13925 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13927 To compute GT, swap the arguments and do LT.
13928 To compute GE, do LT and invert the result.
13929 To compute LE, swap the arguments, do LT and invert the result.
13930 To compute NE, do EQ and invert the result.
13932 Therefore, the code below must handle only EQ and LT. */
13934 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13936 std::swap (op0
, op1
);
13937 code
= swap_tree_comparison (code
);
13940 /* Note that it is safe to invert for real values here because we
13941 have already handled the one case that it matters. */
13944 if (code
== NE_EXPR
|| code
== GE_EXPR
)
13947 code
= invert_tree_comparison (code
, false);
13950 /* Compute a result for LT or EQ if args permit;
13951 Otherwise return T. */
13952 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
13954 if (code
== EQ_EXPR
)
13955 result
= tree_int_cst_equal (op0
, op1
);
13957 result
= tree_int_cst_lt (op0
, op1
);
13964 return constant_boolean_node (result
, type
);
13967 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13968 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13972 fold_build_cleanup_point_expr (tree type
, tree expr
)
13974 /* If the expression does not have side effects then we don't have to wrap
13975 it with a cleanup point expression. */
13976 if (!TREE_SIDE_EFFECTS (expr
))
13979 /* If the expression is a return, check to see if the expression inside the
13980 return has no side effects or the right hand side of the modify expression
13981 inside the return. If either don't have side effects set we don't need to
13982 wrap the expression in a cleanup point expression. Note we don't check the
13983 left hand side of the modify because it should always be a return decl. */
13984 if (TREE_CODE (expr
) == RETURN_EXPR
)
13986 tree op
= TREE_OPERAND (expr
, 0);
13987 if (!op
|| !TREE_SIDE_EFFECTS (op
))
13989 op
= TREE_OPERAND (op
, 1);
13990 if (!TREE_SIDE_EFFECTS (op
))
13994 return build1_loc (EXPR_LOCATION (expr
), CLEANUP_POINT_EXPR
, type
, expr
);
13997 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13998 of an indirection through OP0, or NULL_TREE if no simplification is
14002 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
14008 subtype
= TREE_TYPE (sub
);
14009 if (!POINTER_TYPE_P (subtype
))
14012 if (TREE_CODE (sub
) == ADDR_EXPR
)
14014 tree op
= TREE_OPERAND (sub
, 0);
14015 tree optype
= TREE_TYPE (op
);
14016 /* *&CONST_DECL -> to the value of the const decl. */
14017 if (TREE_CODE (op
) == CONST_DECL
)
14018 return DECL_INITIAL (op
);
14019 /* *&p => p; make sure to handle *&"str"[cst] here. */
14020 if (type
== optype
)
14022 tree fop
= fold_read_from_constant_string (op
);
14028 /* *(foo *)&fooarray => fooarray[0] */
14029 else if (TREE_CODE (optype
) == ARRAY_TYPE
14030 && type
== TREE_TYPE (optype
)
14031 && (!in_gimple_form
14032 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
14034 tree type_domain
= TYPE_DOMAIN (optype
);
14035 tree min_val
= size_zero_node
;
14036 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14037 min_val
= TYPE_MIN_VALUE (type_domain
);
14039 && TREE_CODE (min_val
) != INTEGER_CST
)
14041 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
14042 NULL_TREE
, NULL_TREE
);
14044 /* *(foo *)&complexfoo => __real__ complexfoo */
14045 else if (TREE_CODE (optype
) == COMPLEX_TYPE
14046 && type
== TREE_TYPE (optype
))
14047 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
14048 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14049 else if (TREE_CODE (optype
) == VECTOR_TYPE
14050 && type
== TREE_TYPE (optype
))
14052 tree part_width
= TYPE_SIZE (type
);
14053 tree index
= bitsize_int (0);
14054 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
14058 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
14059 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
14061 tree op00
= TREE_OPERAND (sub
, 0);
14062 tree op01
= TREE_OPERAND (sub
, 1);
14065 if (TREE_CODE (op00
) == ADDR_EXPR
)
14068 op00
= TREE_OPERAND (op00
, 0);
14069 op00type
= TREE_TYPE (op00
);
14071 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14072 if (TREE_CODE (op00type
) == VECTOR_TYPE
14073 && type
== TREE_TYPE (op00type
))
14075 tree part_width
= TYPE_SIZE (type
);
14076 unsigned HOST_WIDE_INT max_offset
14077 = (tree_to_uhwi (part_width
) / BITS_PER_UNIT
14078 * TYPE_VECTOR_SUBPARTS (op00type
));
14079 if (tree_int_cst_sign_bit (op01
) == 0
14080 && compare_tree_int (op01
, max_offset
) == -1)
14082 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (op01
);
14083 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
14084 tree index
= bitsize_int (indexi
);
14085 return fold_build3_loc (loc
,
14086 BIT_FIELD_REF
, type
, op00
,
14087 part_width
, index
);
14090 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14091 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
14092 && type
== TREE_TYPE (op00type
))
14094 tree size
= TYPE_SIZE_UNIT (type
);
14095 if (tree_int_cst_equal (size
, op01
))
14096 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
14098 /* ((foo *)&fooarray)[1] => fooarray[1] */
14099 else if (TREE_CODE (op00type
) == ARRAY_TYPE
14100 && type
== TREE_TYPE (op00type
))
14102 tree type_domain
= TYPE_DOMAIN (op00type
);
14103 tree min_val
= size_zero_node
;
14104 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14105 min_val
= TYPE_MIN_VALUE (type_domain
);
14106 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
14107 TYPE_SIZE_UNIT (type
));
14108 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
14109 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
14110 NULL_TREE
, NULL_TREE
);
14115 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14116 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
14117 && type
== TREE_TYPE (TREE_TYPE (subtype
))
14118 && (!in_gimple_form
14119 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
14122 tree min_val
= size_zero_node
;
14123 sub
= build_fold_indirect_ref_loc (loc
, sub
);
14124 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
14125 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
14126 min_val
= TYPE_MIN_VALUE (type_domain
);
14128 && TREE_CODE (min_val
) != INTEGER_CST
)
14130 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
14137 /* Builds an expression for an indirection through T, simplifying some
14141 build_fold_indirect_ref_loc (location_t loc
, tree t
)
14143 tree type
= TREE_TYPE (TREE_TYPE (t
));
14144 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
14149 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
14152 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14155 fold_indirect_ref_loc (location_t loc
, tree t
)
14157 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
14165 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14166 whose result is ignored. The type of the returned tree need not be
14167 the same as the original expression. */
14170 fold_ignored_result (tree t
)
14172 if (!TREE_SIDE_EFFECTS (t
))
14173 return integer_zero_node
;
14176 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
14179 t
= TREE_OPERAND (t
, 0);
14183 case tcc_comparison
:
14184 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14185 t
= TREE_OPERAND (t
, 0);
14186 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
14187 t
= TREE_OPERAND (t
, 1);
14192 case tcc_expression
:
14193 switch (TREE_CODE (t
))
14195 case COMPOUND_EXPR
:
14196 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
14198 t
= TREE_OPERAND (t
, 0);
14202 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
14203 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
14205 t
= TREE_OPERAND (t
, 0);
14218 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14221 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
14223 tree div
= NULL_TREE
;
14228 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14229 have to do anything. Only do this when we are not given a const,
14230 because in that case, this check is more expensive than just
14232 if (TREE_CODE (value
) != INTEGER_CST
)
14234 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14236 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
14240 /* If divisor is a power of two, simplify this to bit manipulation. */
14241 if (pow2_or_zerop (divisor
))
14243 if (TREE_CODE (value
) == INTEGER_CST
)
14245 wide_int val
= value
;
14248 if ((val
& (divisor
- 1)) == 0)
14251 overflow_p
= TREE_OVERFLOW (value
);
14252 val
+= divisor
- 1;
14253 val
&= (int) -divisor
;
14257 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
14263 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
14264 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
14265 t
= build_int_cst (TREE_TYPE (value
), - (int) divisor
);
14266 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
14272 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14273 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
14274 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
14280 /* Likewise, but round down. */
14283 round_down_loc (location_t loc
, tree value
, int divisor
)
14285 tree div
= NULL_TREE
;
14287 gcc_assert (divisor
> 0);
14291 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14292 have to do anything. Only do this when we are not given a const,
14293 because in that case, this check is more expensive than just
14295 if (TREE_CODE (value
) != INTEGER_CST
)
14297 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14299 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
14303 /* If divisor is a power of two, simplify this to bit manipulation. */
14304 if (pow2_or_zerop (divisor
))
14308 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
14309 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
14314 div
= build_int_cst (TREE_TYPE (value
), divisor
);
14315 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
14316 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
14322 /* Returns the pointer to the base of the object addressed by EXP and
14323 extracts the information about the offset of the access, storing it
14324 to PBITPOS and POFFSET. */
14327 split_address_to_core_and_offset (tree exp
,
14328 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
14332 int unsignedp
, reversep
, volatilep
;
14333 HOST_WIDE_INT bitsize
;
14334 location_t loc
= EXPR_LOCATION (exp
);
14336 if (TREE_CODE (exp
) == ADDR_EXPR
)
14338 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
14339 poffset
, &mode
, &unsignedp
, &reversep
,
14341 core
= build_fold_addr_expr_loc (loc
, core
);
14347 *poffset
= NULL_TREE
;
14353 /* Returns true if addresses of E1 and E2 differ by a constant, false
14354 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14357 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
14360 HOST_WIDE_INT bitpos1
, bitpos2
;
14361 tree toffset1
, toffset2
, tdiff
, type
;
14363 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
14364 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
14366 if (bitpos1
% BITS_PER_UNIT
!= 0
14367 || bitpos2
% BITS_PER_UNIT
!= 0
14368 || !operand_equal_p (core1
, core2
, 0))
14371 if (toffset1
&& toffset2
)
14373 type
= TREE_TYPE (toffset1
);
14374 if (type
!= TREE_TYPE (toffset2
))
14375 toffset2
= fold_convert (type
, toffset2
);
14377 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
14378 if (!cst_and_fits_in_hwi (tdiff
))
14381 *diff
= int_cst_value (tdiff
);
14383 else if (toffset1
|| toffset2
)
14385 /* If only one of the offsets is non-constant, the difference cannot
14392 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
14396 /* Return OFF converted to a pointer offset type suitable as offset for
14397 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14399 convert_to_ptrofftype_loc (location_t loc
, tree off
)
14401 return fold_convert_loc (loc
, sizetype
, off
);
14404 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14406 fold_build_pointer_plus_loc (location_t loc
, tree ptr
, tree off
)
14408 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
14409 ptr
, convert_to_ptrofftype_loc (loc
, off
));
14412 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14414 fold_build_pointer_plus_hwi_loc (location_t loc
, tree ptr
, HOST_WIDE_INT off
)
14416 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, TREE_TYPE (ptr
),
14417 ptr
, size_int (off
));
14420 /* Return a char pointer for a C string if it is a string constant
14421 or sum of string constant and integer constant. We only support
14422 string constants properly terminated with '\0' character.
14423 If STRLEN is a valid pointer, length (including terminating character)
14424 of returned string is stored to the argument. */
14427 c_getstr (tree src
, unsigned HOST_WIDE_INT
*strlen
)
14434 src
= string_constant (src
, &offset_node
);
14438 unsigned HOST_WIDE_INT offset
= 0;
14439 if (offset_node
!= NULL_TREE
)
14441 if (!tree_fits_uhwi_p (offset_node
))
14444 offset
= tree_to_uhwi (offset_node
);
14447 unsigned HOST_WIDE_INT string_length
= TREE_STRING_LENGTH (src
);
14448 const char *string
= TREE_STRING_POINTER (src
);
14450 /* Support only properly null-terminated strings. */
14451 if (string_length
== 0
14452 || string
[string_length
- 1] != '\0'
14453 || offset
>= string_length
)
14457 *strlen
= string_length
- offset
;
14458 return string
+ offset
;
14463 namespace selftest
{
14465 /* Helper functions for writing tests of folding trees. */
14467 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
14470 assert_binop_folds_to_const (tree lhs
, enum tree_code code
, tree rhs
,
14473 ASSERT_EQ (constant
, fold_build2 (code
, TREE_TYPE (lhs
), lhs
, rhs
));
14476 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14477 wrapping WRAPPED_EXPR. */
14480 assert_binop_folds_to_nonlvalue (tree lhs
, enum tree_code code
, tree rhs
,
14483 tree result
= fold_build2 (code
, TREE_TYPE (lhs
), lhs
, rhs
);
14484 ASSERT_NE (wrapped_expr
, result
);
14485 ASSERT_EQ (NON_LVALUE_EXPR
, TREE_CODE (result
));
14486 ASSERT_EQ (wrapped_expr
, TREE_OPERAND (result
, 0));
14489 /* Verify that various arithmetic binary operations are folded
14493 test_arithmetic_folding ()
14495 tree type
= integer_type_node
;
14496 tree x
= create_tmp_var_raw (type
, "x");
14497 tree zero
= build_zero_cst (type
);
14498 tree one
= build_int_cst (type
, 1);
14501 /* 1 <-- (0 + 1) */
14502 assert_binop_folds_to_const (zero
, PLUS_EXPR
, one
,
14504 assert_binop_folds_to_const (one
, PLUS_EXPR
, zero
,
14507 /* (nonlvalue)x <-- (x + 0) */
14508 assert_binop_folds_to_nonlvalue (x
, PLUS_EXPR
, zero
,
14512 /* 0 <-- (x - x) */
14513 assert_binop_folds_to_const (x
, MINUS_EXPR
, x
,
14515 assert_binop_folds_to_nonlvalue (x
, MINUS_EXPR
, zero
,
14518 /* Multiplication. */
14519 /* 0 <-- (x * 0) */
14520 assert_binop_folds_to_const (x
, MULT_EXPR
, zero
,
14523 /* (nonlvalue)x <-- (x * 1) */
14524 assert_binop_folds_to_nonlvalue (x
, MULT_EXPR
, one
,
14528 /* Verify that various binary operations on vectors are folded
14532 test_vector_folding ()
14534 tree inner_type
= integer_type_node
;
14535 tree type
= build_vector_type (inner_type
, 4);
14536 tree zero
= build_zero_cst (type
);
14537 tree one
= build_one_cst (type
);
14539 /* Verify equality tests that return a scalar boolean result. */
14540 tree res_type
= boolean_type_node
;
14541 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
, zero
, one
)));
14542 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR
, res_type
, zero
, zero
)));
14543 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, zero
, one
)));
14544 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR
, res_type
, one
, one
)));
14547 /* Run all of the selftests within this file. */
14550 fold_const_c_tests ()
14552 test_arithmetic_folding ();
14553 test_vector_folding ();
14556 } // namespace selftest
14558 #endif /* CHECKING_P */