1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
49 #include "stor-layout.h"
51 #include "tree-iterator.h"
57 #include "diagnostic-core.h"
59 #include "langhooks.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
65 #include "gimple-expr.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
73 #include "generic-match.h"
75 /* Nonzero if we are folding constants inside an initializer; zero
77 int folding_initializer
= 0;
79 /* The following constants represent a bit based encoding of GCC's
80 comparison operators. This encoding simplifies transformations
81 on relational comparison operators, such as AND and OR. */
82 enum comparison_code
{
101 static bool negate_mathfn_p (enum built_in_function
);
102 static bool negate_expr_p (tree
);
103 static tree
negate_expr (tree
);
104 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
105 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
106 static tree
const_binop (enum tree_code
, tree
, tree
);
107 static enum comparison_code
comparison_to_compcode (enum tree_code
);
108 static enum tree_code
compcode_to_comparison (enum comparison_code
);
109 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
110 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
111 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
112 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
113 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
114 static tree
make_bit_field_ref (location_t
, tree
, tree
,
115 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
116 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
118 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
120 enum machine_mode
*, int *, int *,
122 static tree
sign_bit_p (tree
, const_tree
);
123 static int simple_operand_p (const_tree
);
124 static bool simple_operand_p_2 (tree
);
125 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
126 static tree
range_predecessor (tree
);
127 static tree
range_successor (tree
);
128 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
129 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
130 static tree
unextend (tree
, int, int, tree
);
131 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
133 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
134 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
135 static tree
fold_binary_op_with_conditional_arg (location_t
,
136 enum tree_code
, tree
,
139 static tree
fold_mathfn_compare (location_t
,
140 enum built_in_function
, enum tree_code
,
142 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
143 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
144 static bool reorder_operands_p (const_tree
, const_tree
);
145 static tree
fold_negate_const (tree
, tree
);
146 static tree
fold_not_const (const_tree
, tree
);
147 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
148 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
150 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
151 Otherwise, return LOC. */
154 expr_location_or (tree t
, location_t loc
)
156 location_t tloc
= EXPR_LOCATION (t
);
157 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
160 /* Similar to protected_set_expr_location, but never modify x in place,
161 if location can and needs to be set, unshare it. */
164 protected_set_expr_location_unshare (tree x
, location_t loc
)
166 if (CAN_HAVE_LOCATION_P (x
)
167 && EXPR_LOCATION (x
) != loc
168 && !(TREE_CODE (x
) == SAVE_EXPR
169 || TREE_CODE (x
) == TARGET_EXPR
170 || TREE_CODE (x
) == BIND_EXPR
))
173 SET_EXPR_LOCATION (x
, loc
);
178 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
179 division and returns the quotient. Otherwise returns
183 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
187 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
189 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
194 /* This is nonzero if we should defer warnings about undefined
195 overflow. This facility exists because these warnings are a
196 special case. The code to estimate loop iterations does not want
197 to issue any warnings, since it works with expressions which do not
198 occur in user code. Various bits of cleanup code call fold(), but
199 only use the result if it has certain characteristics (e.g., is a
200 constant); that code only wants to issue a warning if the result is
203 static int fold_deferring_overflow_warnings
;
205 /* If a warning about undefined overflow is deferred, this is the
206 warning. Note that this may cause us to turn two warnings into
207 one, but that is fine since it is sufficient to only give one
208 warning per expression. */
210 static const char* fold_deferred_overflow_warning
;
212 /* If a warning about undefined overflow is deferred, this is the
213 level at which the warning should be emitted. */
215 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
217 /* Start deferring overflow warnings. We could use a stack here to
218 permit nested calls, but at present it is not necessary. */
221 fold_defer_overflow_warnings (void)
223 ++fold_deferring_overflow_warnings
;
226 /* Stop deferring overflow warnings. If there is a pending warning,
227 and ISSUE is true, then issue the warning if appropriate. STMT is
228 the statement with which the warning should be associated (used for
229 location information); STMT may be NULL. CODE is the level of the
230 warning--a warn_strict_overflow_code value. This function will use
231 the smaller of CODE and the deferred code when deciding whether to
232 issue the warning. CODE may be zero to mean to always use the
236 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
241 gcc_assert (fold_deferring_overflow_warnings
> 0);
242 --fold_deferring_overflow_warnings
;
243 if (fold_deferring_overflow_warnings
> 0)
245 if (fold_deferred_overflow_warning
!= NULL
247 && code
< (int) fold_deferred_overflow_code
)
248 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
252 warnmsg
= fold_deferred_overflow_warning
;
253 fold_deferred_overflow_warning
= NULL
;
255 if (!issue
|| warnmsg
== NULL
)
258 if (gimple_no_warning_p (stmt
))
261 /* Use the smallest code level when deciding to issue the
263 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
264 code
= fold_deferred_overflow_code
;
266 if (!issue_strict_overflow_warning (code
))
270 locus
= input_location
;
272 locus
= gimple_location (stmt
);
273 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
276 /* Stop deferring overflow warnings, ignoring any deferred
280 fold_undefer_and_ignore_overflow_warnings (void)
282 fold_undefer_overflow_warnings (false, NULL
, 0);
285 /* Whether we are deferring overflow warnings. */
288 fold_deferring_overflow_warnings_p (void)
290 return fold_deferring_overflow_warnings
> 0;
293 /* This is called when we fold something based on the fact that signed
294 overflow is undefined. */
297 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
299 if (fold_deferring_overflow_warnings
> 0)
301 if (fold_deferred_overflow_warning
== NULL
302 || wc
< fold_deferred_overflow_code
)
304 fold_deferred_overflow_warning
= gmsgid
;
305 fold_deferred_overflow_code
= wc
;
308 else if (issue_strict_overflow_warning (wc
))
309 warning (OPT_Wstrict_overflow
, gmsgid
);
312 /* Return true if the built-in mathematical function specified by CODE
313 is odd, i.e. -f(x) == f(-x). */
316 negate_mathfn_p (enum built_in_function code
)
320 CASE_FLT_FN (BUILT_IN_ASIN
):
321 CASE_FLT_FN (BUILT_IN_ASINH
):
322 CASE_FLT_FN (BUILT_IN_ATAN
):
323 CASE_FLT_FN (BUILT_IN_ATANH
):
324 CASE_FLT_FN (BUILT_IN_CASIN
):
325 CASE_FLT_FN (BUILT_IN_CASINH
):
326 CASE_FLT_FN (BUILT_IN_CATAN
):
327 CASE_FLT_FN (BUILT_IN_CATANH
):
328 CASE_FLT_FN (BUILT_IN_CBRT
):
329 CASE_FLT_FN (BUILT_IN_CPROJ
):
330 CASE_FLT_FN (BUILT_IN_CSIN
):
331 CASE_FLT_FN (BUILT_IN_CSINH
):
332 CASE_FLT_FN (BUILT_IN_CTAN
):
333 CASE_FLT_FN (BUILT_IN_CTANH
):
334 CASE_FLT_FN (BUILT_IN_ERF
):
335 CASE_FLT_FN (BUILT_IN_LLROUND
):
336 CASE_FLT_FN (BUILT_IN_LROUND
):
337 CASE_FLT_FN (BUILT_IN_ROUND
):
338 CASE_FLT_FN (BUILT_IN_SIN
):
339 CASE_FLT_FN (BUILT_IN_SINH
):
340 CASE_FLT_FN (BUILT_IN_TAN
):
341 CASE_FLT_FN (BUILT_IN_TANH
):
342 CASE_FLT_FN (BUILT_IN_TRUNC
):
345 CASE_FLT_FN (BUILT_IN_LLRINT
):
346 CASE_FLT_FN (BUILT_IN_LRINT
):
347 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
348 CASE_FLT_FN (BUILT_IN_RINT
):
349 return !flag_rounding_math
;
357 /* Check whether we may negate an integer constant T without causing
361 may_negate_without_overflow_p (const_tree t
)
365 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
367 type
= TREE_TYPE (t
);
368 if (TYPE_UNSIGNED (type
))
371 return !wi::only_sign_bit_p (t
);
374 /* Determine whether an expression T can be cheaply negated using
375 the function negate_expr without introducing undefined overflow. */
378 negate_expr_p (tree t
)
385 type
= TREE_TYPE (t
);
388 switch (TREE_CODE (t
))
391 if (TYPE_OVERFLOW_WRAPS (type
))
394 /* Check that -CST will not overflow type. */
395 return may_negate_without_overflow_p (t
);
397 return (INTEGRAL_TYPE_P (type
)
398 && TYPE_OVERFLOW_WRAPS (type
));
405 /* We want to canonicalize to positive real constants. Pretend
406 that only negative ones can be easily negated. */
407 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
410 return negate_expr_p (TREE_REALPART (t
))
411 && negate_expr_p (TREE_IMAGPART (t
));
415 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
418 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
420 for (i
= 0; i
< count
; i
++)
421 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
428 return negate_expr_p (TREE_OPERAND (t
, 0))
429 && negate_expr_p (TREE_OPERAND (t
, 1));
432 return negate_expr_p (TREE_OPERAND (t
, 0));
435 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
436 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
438 /* -(A + B) -> (-B) - A. */
439 if (negate_expr_p (TREE_OPERAND (t
, 1))
440 && reorder_operands_p (TREE_OPERAND (t
, 0),
441 TREE_OPERAND (t
, 1)))
443 /* -(A + B) -> (-A) - B. */
444 return negate_expr_p (TREE_OPERAND (t
, 0));
447 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
448 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
449 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
450 && reorder_operands_p (TREE_OPERAND (t
, 0),
451 TREE_OPERAND (t
, 1));
454 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
460 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
461 return negate_expr_p (TREE_OPERAND (t
, 1))
462 || negate_expr_p (TREE_OPERAND (t
, 0));
468 /* In general we can't negate A / B, because if A is INT_MIN and
469 B is 1, we may turn this into INT_MIN / -1 which is undefined
470 and actually traps on some architectures. But if overflow is
471 undefined, we can negate, because - (INT_MIN / 1) is an
473 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
475 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
477 /* If overflow is undefined then we have to be careful because
478 we ask whether it's ok to associate the negate with the
479 division which is not ok for example for
480 -((a - b) / c) where (-(a - b)) / c may invoke undefined
481 overflow because of negating INT_MIN. So do not use
482 negate_expr_p here but open-code the two important cases. */
483 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NEGATE_EXPR
484 || (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
485 && may_negate_without_overflow_p (TREE_OPERAND (t
, 0))))
488 else if (negate_expr_p (TREE_OPERAND (t
, 0)))
490 return negate_expr_p (TREE_OPERAND (t
, 1));
493 /* Negate -((double)float) as (double)(-float). */
494 if (TREE_CODE (type
) == REAL_TYPE
)
496 tree tem
= strip_float_extensions (t
);
498 return negate_expr_p (tem
);
503 /* Negate -f(x) as f(-x). */
504 if (negate_mathfn_p (builtin_mathfn_code (t
)))
505 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
509 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
510 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
512 tree op1
= TREE_OPERAND (t
, 1);
513 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
524 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
525 simplification is possible.
526 If negate_expr_p would return true for T, NULL_TREE will never be
530 fold_negate_expr (location_t loc
, tree t
)
532 tree type
= TREE_TYPE (t
);
535 switch (TREE_CODE (t
))
537 /* Convert - (~A) to A + 1. */
539 if (INTEGRAL_TYPE_P (type
))
540 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
541 build_one_cst (type
));
545 tem
= fold_negate_const (t
, type
);
546 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
547 || !TYPE_OVERFLOW_TRAPS (type
))
552 tem
= fold_negate_const (t
, type
);
553 /* Two's complement FP formats, such as c4x, may overflow. */
554 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
559 tem
= fold_negate_const (t
, type
);
564 tree rpart
= negate_expr (TREE_REALPART (t
));
565 tree ipart
= negate_expr (TREE_IMAGPART (t
));
567 if ((TREE_CODE (rpart
) == REAL_CST
568 && TREE_CODE (ipart
) == REAL_CST
)
569 || (TREE_CODE (rpart
) == INTEGER_CST
570 && TREE_CODE (ipart
) == INTEGER_CST
))
571 return build_complex (type
, rpart
, ipart
);
577 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
578 tree
*elts
= XALLOCAVEC (tree
, count
);
580 for (i
= 0; i
< count
; i
++)
582 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
583 if (elts
[i
] == NULL_TREE
)
587 return build_vector (type
, elts
);
591 if (negate_expr_p (t
))
592 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
593 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
594 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
598 if (negate_expr_p (t
))
599 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
600 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
604 return TREE_OPERAND (t
, 0);
607 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
608 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
610 /* -(A + B) -> (-B) - A. */
611 if (negate_expr_p (TREE_OPERAND (t
, 1))
612 && reorder_operands_p (TREE_OPERAND (t
, 0),
613 TREE_OPERAND (t
, 1)))
615 tem
= negate_expr (TREE_OPERAND (t
, 1));
616 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
617 tem
, TREE_OPERAND (t
, 0));
620 /* -(A + B) -> (-A) - B. */
621 if (negate_expr_p (TREE_OPERAND (t
, 0)))
623 tem
= negate_expr (TREE_OPERAND (t
, 0));
624 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
625 tem
, TREE_OPERAND (t
, 1));
631 /* - (A - B) -> B - A */
632 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
633 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
634 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
635 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
636 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
640 if (TYPE_UNSIGNED (type
))
646 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
648 tem
= TREE_OPERAND (t
, 1);
649 if (negate_expr_p (tem
))
650 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
651 TREE_OPERAND (t
, 0), negate_expr (tem
));
652 tem
= TREE_OPERAND (t
, 0);
653 if (negate_expr_p (tem
))
654 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
655 negate_expr (tem
), TREE_OPERAND (t
, 1));
662 /* In general we can't negate A / B, because if A is INT_MIN and
663 B is 1, we may turn this into INT_MIN / -1 which is undefined
664 and actually traps on some architectures. But if overflow is
665 undefined, we can negate, because - (INT_MIN / 1) is an
667 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
669 const char * const warnmsg
= G_("assuming signed overflow does not "
670 "occur when negating a division");
671 tem
= TREE_OPERAND (t
, 1);
672 if (negate_expr_p (tem
))
674 if (INTEGRAL_TYPE_P (type
)
675 && (TREE_CODE (tem
) != INTEGER_CST
676 || integer_onep (tem
)))
677 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
678 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
679 TREE_OPERAND (t
, 0), negate_expr (tem
));
681 /* If overflow is undefined then we have to be careful because
682 we ask whether it's ok to associate the negate with the
683 division which is not ok for example for
684 -((a - b) / c) where (-(a - b)) / c may invoke undefined
685 overflow because of negating INT_MIN. So do not use
686 negate_expr_p here but open-code the two important cases. */
687 tem
= TREE_OPERAND (t
, 0);
688 if ((INTEGRAL_TYPE_P (type
)
689 && (TREE_CODE (tem
) == NEGATE_EXPR
690 || (TREE_CODE (tem
) == INTEGER_CST
691 && may_negate_without_overflow_p (tem
))))
692 || !INTEGRAL_TYPE_P (type
))
693 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
694 negate_expr (tem
), TREE_OPERAND (t
, 1));
699 /* Convert -((double)float) into (double)(-float). */
700 if (TREE_CODE (type
) == REAL_TYPE
)
702 tem
= strip_float_extensions (t
);
703 if (tem
!= t
&& negate_expr_p (tem
))
704 return fold_convert_loc (loc
, type
, negate_expr (tem
));
709 /* Negate -f(x) as f(-x). */
710 if (negate_mathfn_p (builtin_mathfn_code (t
))
711 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
715 fndecl
= get_callee_fndecl (t
);
716 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
717 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
722 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
723 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
725 tree op1
= TREE_OPERAND (t
, 1);
726 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
728 tree ntype
= TYPE_UNSIGNED (type
)
729 ? signed_type_for (type
)
730 : unsigned_type_for (type
);
731 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
732 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
733 return fold_convert_loc (loc
, type
, temp
);
745 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
746 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
758 loc
= EXPR_LOCATION (t
);
759 type
= TREE_TYPE (t
);
762 tem
= fold_negate_expr (loc
, t
);
764 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
765 return fold_convert_loc (loc
, type
, tem
);
768 /* Split a tree IN into a constant, literal and variable parts that could be
769 combined with CODE to make IN. "constant" means an expression with
770 TREE_CONSTANT but that isn't an actual constant. CODE must be a
771 commutative arithmetic operation. Store the constant part into *CONP,
772 the literal in *LITP and return the variable part. If a part isn't
773 present, set it to null. If the tree does not decompose in this way,
774 return the entire tree as the variable part and the other parts as null.
776 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
777 case, we negate an operand that was subtracted. Except if it is a
778 literal for which we use *MINUS_LITP instead.
780 If NEGATE_P is true, we are negating all of IN, again except a literal
781 for which we use *MINUS_LITP instead.
783 If IN is itself a literal or constant, return it as appropriate.
785 Note that we do not guarantee that any of the three values will be the
786 same type as IN, but they will have the same signedness and mode. */
789 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
790 tree
*minus_litp
, int negate_p
)
798 /* Strip any conversions that don't change the machine mode or signedness. */
799 STRIP_SIGN_NOPS (in
);
801 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
802 || TREE_CODE (in
) == FIXED_CST
)
804 else if (TREE_CODE (in
) == code
805 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
806 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
807 /* We can associate addition and subtraction together (even
808 though the C standard doesn't say so) for integers because
809 the value is not affected. For reals, the value might be
810 affected, so we can't. */
811 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
812 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
814 tree op0
= TREE_OPERAND (in
, 0);
815 tree op1
= TREE_OPERAND (in
, 1);
816 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
817 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
819 /* First see if either of the operands is a literal, then a constant. */
820 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
821 || TREE_CODE (op0
) == FIXED_CST
)
822 *litp
= op0
, op0
= 0;
823 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
824 || TREE_CODE (op1
) == FIXED_CST
)
825 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
827 if (op0
!= 0 && TREE_CONSTANT (op0
))
828 *conp
= op0
, op0
= 0;
829 else if (op1
!= 0 && TREE_CONSTANT (op1
))
830 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
832 /* If we haven't dealt with either operand, this is not a case we can
833 decompose. Otherwise, VAR is either of the ones remaining, if any. */
834 if (op0
!= 0 && op1
!= 0)
839 var
= op1
, neg_var_p
= neg1_p
;
841 /* Now do any needed negations. */
843 *minus_litp
= *litp
, *litp
= 0;
845 *conp
= negate_expr (*conp
);
847 var
= negate_expr (var
);
849 else if (TREE_CODE (in
) == BIT_NOT_EXPR
850 && code
== PLUS_EXPR
)
852 /* -X - 1 is folded to ~X, undo that here. */
853 *minus_litp
= build_one_cst (TREE_TYPE (in
));
854 var
= negate_expr (TREE_OPERAND (in
, 0));
856 else if (TREE_CONSTANT (in
))
864 *minus_litp
= *litp
, *litp
= 0;
865 else if (*minus_litp
)
866 *litp
= *minus_litp
, *minus_litp
= 0;
867 *conp
= negate_expr (*conp
);
868 var
= negate_expr (var
);
874 /* Re-associate trees split by the above function. T1 and T2 are
875 either expressions to associate or null. Return the new
876 expression, if any. LOC is the location of the new expression. If
877 we build an operation, do it in TYPE and with CODE. */
880 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
887 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
888 try to fold this since we will have infinite recursion. But do
889 deal with any NEGATE_EXPRs. */
890 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
891 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
893 if (code
== PLUS_EXPR
)
895 if (TREE_CODE (t1
) == NEGATE_EXPR
)
896 return build2_loc (loc
, MINUS_EXPR
, type
,
897 fold_convert_loc (loc
, type
, t2
),
898 fold_convert_loc (loc
, type
,
899 TREE_OPERAND (t1
, 0)));
900 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
901 return build2_loc (loc
, MINUS_EXPR
, type
,
902 fold_convert_loc (loc
, type
, t1
),
903 fold_convert_loc (loc
, type
,
904 TREE_OPERAND (t2
, 0)));
905 else if (integer_zerop (t2
))
906 return fold_convert_loc (loc
, type
, t1
);
908 else if (code
== MINUS_EXPR
)
910 if (integer_zerop (t2
))
911 return fold_convert_loc (loc
, type
, t1
);
914 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
915 fold_convert_loc (loc
, type
, t2
));
918 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
919 fold_convert_loc (loc
, type
, t2
));
922 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
923 for use in int_const_binop, size_binop and size_diffop. */
926 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
928 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
930 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
945 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
946 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
947 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
951 /* Combine two integer constants ARG1 and ARG2 under operation CODE
952 to produce a new constant. Return NULL_TREE if we don't know how
953 to evaluate CODE at compile-time. */
956 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
961 tree type
= TREE_TYPE (arg1
);
962 signop sign
= TYPE_SIGN (type
);
963 bool overflow
= false;
965 wide_int arg2
= wide_int::from (parg2
, TYPE_PRECISION (type
),
966 TYPE_SIGN (TREE_TYPE (parg2
)));
971 res
= wi::bit_or (arg1
, arg2
);
975 res
= wi::bit_xor (arg1
, arg2
);
979 res
= wi::bit_and (arg1
, arg2
);
984 if (wi::neg_p (arg2
))
987 if (code
== RSHIFT_EXPR
)
993 if (code
== RSHIFT_EXPR
)
994 /* It's unclear from the C standard whether shifts can overflow.
995 The following code ignores overflow; perhaps a C standard
996 interpretation ruling is needed. */
997 res
= wi::rshift (arg1
, arg2
, sign
);
999 res
= wi::lshift (arg1
, arg2
);
1004 if (wi::neg_p (arg2
))
1007 if (code
== RROTATE_EXPR
)
1008 code
= LROTATE_EXPR
;
1010 code
= RROTATE_EXPR
;
1013 if (code
== RROTATE_EXPR
)
1014 res
= wi::rrotate (arg1
, arg2
);
1016 res
= wi::lrotate (arg1
, arg2
);
1020 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1024 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1028 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1031 case MULT_HIGHPART_EXPR
:
1032 res
= wi::mul_high (arg1
, arg2
, sign
);
1035 case TRUNC_DIV_EXPR
:
1036 case EXACT_DIV_EXPR
:
1039 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1042 case FLOOR_DIV_EXPR
:
1045 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1051 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1054 case ROUND_DIV_EXPR
:
1057 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1060 case TRUNC_MOD_EXPR
:
1063 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1066 case FLOOR_MOD_EXPR
:
1069 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1075 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1078 case ROUND_MOD_EXPR
:
1081 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1085 res
= wi::min (arg1
, arg2
, sign
);
1089 res
= wi::max (arg1
, arg2
, sign
);
1096 t
= force_fit_type (type
, res
, overflowable
,
1097 (((sign
== SIGNED
|| overflowable
== -1)
1099 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1105 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1107 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1110 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1111 constant. We assume ARG1 and ARG2 have the same data type, or at least
1112 are the same kind of constant and the same machine mode. Return zero if
1113 combining the constants is not allowed in the current operating mode. */
1116 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1118 /* Sanity check for the recursive cases. */
1125 if (TREE_CODE (arg1
) == INTEGER_CST
)
1126 return int_const_binop (code
, arg1
, arg2
);
1128 if (TREE_CODE (arg1
) == REAL_CST
)
1130 enum machine_mode mode
;
1133 REAL_VALUE_TYPE value
;
1134 REAL_VALUE_TYPE result
;
1138 /* The following codes are handled by real_arithmetic. */
1153 d1
= TREE_REAL_CST (arg1
);
1154 d2
= TREE_REAL_CST (arg2
);
1156 type
= TREE_TYPE (arg1
);
1157 mode
= TYPE_MODE (type
);
1159 /* Don't perform operation if we honor signaling NaNs and
1160 either operand is a NaN. */
1161 if (HONOR_SNANS (mode
)
1162 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1165 /* Don't perform operation if it would raise a division
1166 by zero exception. */
1167 if (code
== RDIV_EXPR
1168 && REAL_VALUES_EQUAL (d2
, dconst0
)
1169 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1172 /* If either operand is a NaN, just return it. Otherwise, set up
1173 for floating-point trap; we return an overflow. */
1174 if (REAL_VALUE_ISNAN (d1
))
1176 else if (REAL_VALUE_ISNAN (d2
))
1179 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1180 real_convert (&result
, mode
, &value
);
1182 /* Don't constant fold this floating point operation if
1183 the result has overflowed and flag_trapping_math. */
1184 if (flag_trapping_math
1185 && MODE_HAS_INFINITIES (mode
)
1186 && REAL_VALUE_ISINF (result
)
1187 && !REAL_VALUE_ISINF (d1
)
1188 && !REAL_VALUE_ISINF (d2
))
1191 /* Don't constant fold this floating point operation if the
1192 result may dependent upon the run-time rounding mode and
1193 flag_rounding_math is set, or if GCC's software emulation
1194 is unable to accurately represent the result. */
1195 if ((flag_rounding_math
1196 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1197 && (inexact
|| !real_identical (&result
, &value
)))
1200 t
= build_real (type
, result
);
1202 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1206 if (TREE_CODE (arg1
) == FIXED_CST
)
1208 FIXED_VALUE_TYPE f1
;
1209 FIXED_VALUE_TYPE f2
;
1210 FIXED_VALUE_TYPE result
;
1215 /* The following codes are handled by fixed_arithmetic. */
1221 case TRUNC_DIV_EXPR
:
1222 f2
= TREE_FIXED_CST (arg2
);
1229 f2
.data
.high
= w2
.elt (1);
1230 f2
.data
.low
= w2
.elt (0);
1239 f1
= TREE_FIXED_CST (arg1
);
1240 type
= TREE_TYPE (arg1
);
1241 sat_p
= TYPE_SATURATING (type
);
1242 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1243 t
= build_fixed (type
, result
);
1244 /* Propagate overflow flags. */
1245 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1246 TREE_OVERFLOW (t
) = 1;
1250 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1252 tree type
= TREE_TYPE (arg1
);
1253 tree r1
= TREE_REALPART (arg1
);
1254 tree i1
= TREE_IMAGPART (arg1
);
1255 tree r2
= TREE_REALPART (arg2
);
1256 tree i2
= TREE_IMAGPART (arg2
);
1263 real
= const_binop (code
, r1
, r2
);
1264 imag
= const_binop (code
, i1
, i2
);
1268 if (COMPLEX_FLOAT_TYPE_P (type
))
1269 return do_mpc_arg2 (arg1
, arg2
, type
,
1270 /* do_nonfinite= */ folding_initializer
,
1273 real
= const_binop (MINUS_EXPR
,
1274 const_binop (MULT_EXPR
, r1
, r2
),
1275 const_binop (MULT_EXPR
, i1
, i2
));
1276 imag
= const_binop (PLUS_EXPR
,
1277 const_binop (MULT_EXPR
, r1
, i2
),
1278 const_binop (MULT_EXPR
, i1
, r2
));
1282 if (COMPLEX_FLOAT_TYPE_P (type
))
1283 return do_mpc_arg2 (arg1
, arg2
, type
,
1284 /* do_nonfinite= */ folding_initializer
,
1287 case TRUNC_DIV_EXPR
:
1289 case FLOOR_DIV_EXPR
:
1290 case ROUND_DIV_EXPR
:
1291 if (flag_complex_method
== 0)
1293 /* Keep this algorithm in sync with
1294 tree-complex.c:expand_complex_div_straight().
1296 Expand complex division to scalars, straightforward algorithm.
1297 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1301 = const_binop (PLUS_EXPR
,
1302 const_binop (MULT_EXPR
, r2
, r2
),
1303 const_binop (MULT_EXPR
, i2
, i2
));
1305 = const_binop (PLUS_EXPR
,
1306 const_binop (MULT_EXPR
, r1
, r2
),
1307 const_binop (MULT_EXPR
, i1
, i2
));
1309 = const_binop (MINUS_EXPR
,
1310 const_binop (MULT_EXPR
, i1
, r2
),
1311 const_binop (MULT_EXPR
, r1
, i2
));
1313 real
= const_binop (code
, t1
, magsquared
);
1314 imag
= const_binop (code
, t2
, magsquared
);
1318 /* Keep this algorithm in sync with
1319 tree-complex.c:expand_complex_div_wide().
1321 Expand complex division to scalars, modified algorithm to minimize
1322 overflow with wide input ranges. */
1323 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1324 fold_abs_const (r2
, TREE_TYPE (type
)),
1325 fold_abs_const (i2
, TREE_TYPE (type
)));
1327 if (integer_nonzerop (compare
))
1329 /* In the TRUE branch, we compute
1331 div = (br * ratio) + bi;
1332 tr = (ar * ratio) + ai;
1333 ti = (ai * ratio) - ar;
1336 tree ratio
= const_binop (code
, r2
, i2
);
1337 tree div
= const_binop (PLUS_EXPR
, i2
,
1338 const_binop (MULT_EXPR
, r2
, ratio
));
1339 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1340 real
= const_binop (PLUS_EXPR
, real
, i1
);
1341 real
= const_binop (code
, real
, div
);
1343 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1344 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1345 imag
= const_binop (code
, imag
, div
);
1349 /* In the FALSE branch, we compute
1351 divisor = (d * ratio) + c;
1352 tr = (b * ratio) + a;
1353 ti = b - (a * ratio);
1356 tree ratio
= const_binop (code
, i2
, r2
);
1357 tree div
= const_binop (PLUS_EXPR
, r2
,
1358 const_binop (MULT_EXPR
, i2
, ratio
));
1360 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1361 real
= const_binop (PLUS_EXPR
, real
, r1
);
1362 real
= const_binop (code
, real
, div
);
1364 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1365 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1366 imag
= const_binop (code
, imag
, div
);
1376 return build_complex (type
, real
, imag
);
1379 if (TREE_CODE (arg1
) == VECTOR_CST
1380 && TREE_CODE (arg2
) == VECTOR_CST
)
1382 tree type
= TREE_TYPE (arg1
);
1383 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1384 tree
*elts
= XALLOCAVEC (tree
, count
);
1386 for (i
= 0; i
< count
; i
++)
1388 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1389 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1391 elts
[i
] = const_binop (code
, elem1
, elem2
);
1393 /* It is possible that const_binop cannot handle the given
1394 code and return NULL_TREE */
1395 if (elts
[i
] == NULL_TREE
)
1399 return build_vector (type
, elts
);
1402 /* Shifts allow a scalar offset for a vector. */
1403 if (TREE_CODE (arg1
) == VECTOR_CST
1404 && TREE_CODE (arg2
) == INTEGER_CST
)
1406 tree type
= TREE_TYPE (arg1
);
1407 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1408 tree
*elts
= XALLOCAVEC (tree
, count
);
1410 if (code
== VEC_LSHIFT_EXPR
1411 || code
== VEC_RSHIFT_EXPR
)
1413 if (!tree_fits_uhwi_p (arg2
))
1416 unsigned HOST_WIDE_INT shiftc
= tree_to_uhwi (arg2
);
1417 unsigned HOST_WIDE_INT outerc
= tree_to_uhwi (TYPE_SIZE (type
));
1418 unsigned HOST_WIDE_INT innerc
1419 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type
)));
1420 if (shiftc
>= outerc
|| (shiftc
% innerc
) != 0)
1422 int offset
= shiftc
/ innerc
;
1423 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1424 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1425 for !BYTES_BIG_ENDIAN picks first vector element, but
1426 for BYTES_BIG_ENDIAN last element from the vector. */
1427 if ((code
== VEC_RSHIFT_EXPR
) ^ (!BYTES_BIG_ENDIAN
))
1429 tree zero
= build_zero_cst (TREE_TYPE (type
));
1430 for (i
= 0; i
< count
; i
++)
1432 if (i
+ offset
< 0 || i
+ offset
>= count
)
1435 elts
[i
] = VECTOR_CST_ELT (arg1
, i
+ offset
);
1439 for (i
= 0; i
< count
; i
++)
1441 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1443 elts
[i
] = const_binop (code
, elem1
, arg2
);
1445 /* It is possible that const_binop cannot handle the given
1446 code and return NULL_TREE */
1447 if (elts
[i
] == NULL_TREE
)
1451 return build_vector (type
, elts
);
1456 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1457 indicates which particular sizetype to create. */
1460 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1462 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1465 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1466 is a tree code. The type of the result is taken from the operands.
1467 Both must be equivalent integer types, ala int_binop_types_match_p.
1468 If the operands are constant, so is the result. */
1471 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1473 tree type
= TREE_TYPE (arg0
);
1475 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1476 return error_mark_node
;
1478 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1481 /* Handle the special case of two integer constants faster. */
1482 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1484 /* And some specific cases even faster than that. */
1485 if (code
== PLUS_EXPR
)
1487 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1489 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1492 else if (code
== MINUS_EXPR
)
1494 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1497 else if (code
== MULT_EXPR
)
1499 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1503 /* Handle general case of two integer constants. For sizetype
1504 constant calculations we always want to know about overflow,
1505 even in the unsigned case. */
1506 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1509 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1512 /* Given two values, either both of sizetype or both of bitsizetype,
1513 compute the difference between the two values. Return the value
1514 in signed type corresponding to the type of the operands. */
1517 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1519 tree type
= TREE_TYPE (arg0
);
1522 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1525 /* If the type is already signed, just do the simple thing. */
1526 if (!TYPE_UNSIGNED (type
))
1527 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1529 if (type
== sizetype
)
1531 else if (type
== bitsizetype
)
1532 ctype
= sbitsizetype
;
1534 ctype
= signed_type_for (type
);
1536 /* If either operand is not a constant, do the conversions to the signed
1537 type and subtract. The hardware will do the right thing with any
1538 overflow in the subtraction. */
1539 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1540 return size_binop_loc (loc
, MINUS_EXPR
,
1541 fold_convert_loc (loc
, ctype
, arg0
),
1542 fold_convert_loc (loc
, ctype
, arg1
));
1544 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1545 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1546 overflow) and negate (which can't either). Special-case a result
1547 of zero while we're here. */
1548 if (tree_int_cst_equal (arg0
, arg1
))
1549 return build_int_cst (ctype
, 0);
1550 else if (tree_int_cst_lt (arg1
, arg0
))
1551 return fold_convert_loc (loc
, ctype
,
1552 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1554 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1555 fold_convert_loc (loc
, ctype
,
1556 size_binop_loc (loc
,
1561 /* A subroutine of fold_convert_const handling conversions of an
1562 INTEGER_CST to another integer type. */
1565 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1567 /* Given an integer constant, make new constant with new type,
1568 appropriately sign-extended or truncated. Use widest_int
1569 so that any extension is done according ARG1's type. */
1570 return force_fit_type (type
, wi::to_widest (arg1
),
1571 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1572 TREE_OVERFLOW (arg1
));
1575 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1576 to an integer type. */
1579 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1581 bool overflow
= false;
1584 /* The following code implements the floating point to integer
1585 conversion rules required by the Java Language Specification,
1586 that IEEE NaNs are mapped to zero and values that overflow
1587 the target precision saturate, i.e. values greater than
1588 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1589 are mapped to INT_MIN. These semantics are allowed by the
1590 C and C++ standards that simply state that the behavior of
1591 FP-to-integer conversion is unspecified upon overflow. */
1595 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1599 case FIX_TRUNC_EXPR
:
1600 real_trunc (&r
, VOIDmode
, &x
);
1607 /* If R is NaN, return zero and show we have an overflow. */
1608 if (REAL_VALUE_ISNAN (r
))
1611 val
= wi::zero (TYPE_PRECISION (type
));
1614 /* See if R is less than the lower bound or greater than the
1619 tree lt
= TYPE_MIN_VALUE (type
);
1620 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1621 if (REAL_VALUES_LESS (r
, l
))
1630 tree ut
= TYPE_MAX_VALUE (type
);
1633 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1634 if (REAL_VALUES_LESS (u
, r
))
1643 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1645 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1649 /* A subroutine of fold_convert_const handling conversions of a
1650 FIXED_CST to an integer type. */
1653 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1656 double_int temp
, temp_trunc
;
1659 /* Right shift FIXED_CST to temp by fbit. */
1660 temp
= TREE_FIXED_CST (arg1
).data
;
1661 mode
= TREE_FIXED_CST (arg1
).mode
;
1662 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1664 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1665 HOST_BITS_PER_DOUBLE_INT
,
1666 SIGNED_FIXED_POINT_MODE_P (mode
));
1668 /* Left shift temp to temp_trunc by fbit. */
1669 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1670 HOST_BITS_PER_DOUBLE_INT
,
1671 SIGNED_FIXED_POINT_MODE_P (mode
));
1675 temp
= double_int_zero
;
1676 temp_trunc
= double_int_zero
;
1679 /* If FIXED_CST is negative, we need to round the value toward 0.
1680 By checking if the fractional bits are not zero to add 1 to temp. */
1681 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1682 && temp_trunc
.is_negative ()
1683 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1684 temp
+= double_int_one
;
1686 /* Given a fixed-point constant, make new constant with new type,
1687 appropriately sign-extended or truncated. */
1688 t
= force_fit_type (type
, temp
, -1,
1689 (temp
.is_negative ()
1690 && (TYPE_UNSIGNED (type
)
1691 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1692 | TREE_OVERFLOW (arg1
));
1697 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1698 to another floating point type. */
1701 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1703 REAL_VALUE_TYPE value
;
1706 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1707 t
= build_real (type
, value
);
1709 /* If converting an infinity or NAN to a representation that doesn't
1710 have one, set the overflow bit so that we can produce some kind of
1711 error message at the appropriate point if necessary. It's not the
1712 most user-friendly message, but it's better than nothing. */
1713 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1714 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1715 TREE_OVERFLOW (t
) = 1;
1716 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1717 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1718 TREE_OVERFLOW (t
) = 1;
1719 /* Regular overflow, conversion produced an infinity in a mode that
1720 can't represent them. */
1721 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1722 && REAL_VALUE_ISINF (value
)
1723 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1724 TREE_OVERFLOW (t
) = 1;
1726 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1730 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1731 to a floating point type. */
1734 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1736 REAL_VALUE_TYPE value
;
1739 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1740 t
= build_real (type
, value
);
1742 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1746 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1747 to another fixed-point type. */
1750 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1752 FIXED_VALUE_TYPE value
;
1756 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1757 TYPE_SATURATING (type
));
1758 t
= build_fixed (type
, value
);
1760 /* Propagate overflow flags. */
1761 if (overflow_p
| TREE_OVERFLOW (arg1
))
1762 TREE_OVERFLOW (t
) = 1;
1766 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1767 to a fixed-point type. */
1770 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1772 FIXED_VALUE_TYPE value
;
1777 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
1779 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
1780 if (TREE_INT_CST_NUNITS (arg1
) == 1)
1781 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? (HOST_WIDE_INT
) -1 : 0;
1783 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
1785 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
1786 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1787 TYPE_SATURATING (type
));
1788 t
= build_fixed (type
, value
);
1790 /* Propagate overflow flags. */
1791 if (overflow_p
| TREE_OVERFLOW (arg1
))
1792 TREE_OVERFLOW (t
) = 1;
1796 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1797 to a fixed-point type. */
1800 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1802 FIXED_VALUE_TYPE value
;
1806 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1807 &TREE_REAL_CST (arg1
),
1808 TYPE_SATURATING (type
));
1809 t
= build_fixed (type
, value
);
1811 /* Propagate overflow flags. */
1812 if (overflow_p
| TREE_OVERFLOW (arg1
))
1813 TREE_OVERFLOW (t
) = 1;
1817 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1818 type TYPE. If no simplification can be done return NULL_TREE. */
1821 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1823 if (TREE_TYPE (arg1
) == type
)
1826 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1827 || TREE_CODE (type
) == OFFSET_TYPE
)
1829 if (TREE_CODE (arg1
) == INTEGER_CST
)
1830 return fold_convert_const_int_from_int (type
, arg1
);
1831 else if (TREE_CODE (arg1
) == REAL_CST
)
1832 return fold_convert_const_int_from_real (code
, type
, arg1
);
1833 else if (TREE_CODE (arg1
) == FIXED_CST
)
1834 return fold_convert_const_int_from_fixed (type
, arg1
);
1836 else if (TREE_CODE (type
) == REAL_TYPE
)
1838 if (TREE_CODE (arg1
) == INTEGER_CST
)
1839 return build_real_from_int_cst (type
, arg1
);
1840 else if (TREE_CODE (arg1
) == REAL_CST
)
1841 return fold_convert_const_real_from_real (type
, arg1
);
1842 else if (TREE_CODE (arg1
) == FIXED_CST
)
1843 return fold_convert_const_real_from_fixed (type
, arg1
);
1845 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1847 if (TREE_CODE (arg1
) == FIXED_CST
)
1848 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1849 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1850 return fold_convert_const_fixed_from_int (type
, arg1
);
1851 else if (TREE_CODE (arg1
) == REAL_CST
)
1852 return fold_convert_const_fixed_from_real (type
, arg1
);
1857 /* Construct a vector of zero elements of vector type TYPE. */
1860 build_zero_vector (tree type
)
1864 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1865 return build_vector_from_val (type
, t
);
1868 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1871 fold_convertible_p (const_tree type
, const_tree arg
)
1873 tree orig
= TREE_TYPE (arg
);
1878 if (TREE_CODE (arg
) == ERROR_MARK
1879 || TREE_CODE (type
) == ERROR_MARK
1880 || TREE_CODE (orig
) == ERROR_MARK
)
1883 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1886 switch (TREE_CODE (type
))
1888 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1889 case POINTER_TYPE
: case REFERENCE_TYPE
:
1891 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1892 || TREE_CODE (orig
) == OFFSET_TYPE
)
1894 return (TREE_CODE (orig
) == VECTOR_TYPE
1895 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1898 case FIXED_POINT_TYPE
:
1902 return TREE_CODE (type
) == TREE_CODE (orig
);
1909 /* Convert expression ARG to type TYPE. Used by the middle-end for
1910 simple conversions in preference to calling the front-end's convert. */
1913 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1915 tree orig
= TREE_TYPE (arg
);
1921 if (TREE_CODE (arg
) == ERROR_MARK
1922 || TREE_CODE (type
) == ERROR_MARK
1923 || TREE_CODE (orig
) == ERROR_MARK
)
1924 return error_mark_node
;
1926 switch (TREE_CODE (type
))
1929 case REFERENCE_TYPE
:
1930 /* Handle conversions between pointers to different address spaces. */
1931 if (POINTER_TYPE_P (orig
)
1932 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1933 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1934 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1937 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1939 if (TREE_CODE (arg
) == INTEGER_CST
)
1941 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1942 if (tem
!= NULL_TREE
)
1945 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1946 || TREE_CODE (orig
) == OFFSET_TYPE
)
1947 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1948 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1949 return fold_convert_loc (loc
, type
,
1950 fold_build1_loc (loc
, REALPART_EXPR
,
1951 TREE_TYPE (orig
), arg
));
1952 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1953 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1954 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1957 if (TREE_CODE (arg
) == INTEGER_CST
)
1959 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1960 if (tem
!= NULL_TREE
)
1963 else if (TREE_CODE (arg
) == REAL_CST
)
1965 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1966 if (tem
!= NULL_TREE
)
1969 else if (TREE_CODE (arg
) == FIXED_CST
)
1971 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1972 if (tem
!= NULL_TREE
)
1976 switch (TREE_CODE (orig
))
1979 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1980 case POINTER_TYPE
: case REFERENCE_TYPE
:
1981 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1984 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1986 case FIXED_POINT_TYPE
:
1987 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1990 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1991 return fold_convert_loc (loc
, type
, tem
);
1997 case FIXED_POINT_TYPE
:
1998 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1999 || TREE_CODE (arg
) == REAL_CST
)
2001 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2002 if (tem
!= NULL_TREE
)
2003 goto fold_convert_exit
;
2006 switch (TREE_CODE (orig
))
2008 case FIXED_POINT_TYPE
:
2013 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2016 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2017 return fold_convert_loc (loc
, type
, tem
);
2024 switch (TREE_CODE (orig
))
2027 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2028 case POINTER_TYPE
: case REFERENCE_TYPE
:
2030 case FIXED_POINT_TYPE
:
2031 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2032 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2033 fold_convert_loc (loc
, TREE_TYPE (type
),
2034 integer_zero_node
));
2039 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2041 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2042 TREE_OPERAND (arg
, 0));
2043 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2044 TREE_OPERAND (arg
, 1));
2045 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2048 arg
= save_expr (arg
);
2049 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2050 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2051 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2052 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2053 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2061 if (integer_zerop (arg
))
2062 return build_zero_vector (type
);
2063 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2064 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2065 || TREE_CODE (orig
) == VECTOR_TYPE
);
2066 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2069 tem
= fold_ignored_result (arg
);
2070 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2073 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2074 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2078 protected_set_expr_location_unshare (tem
, loc
);
2082 /* Return false if expr can be assumed not to be an lvalue, true
2086 maybe_lvalue_p (const_tree x
)
2088 /* We only need to wrap lvalue tree codes. */
2089 switch (TREE_CODE (x
))
2102 case ARRAY_RANGE_REF
:
2108 case PREINCREMENT_EXPR
:
2109 case PREDECREMENT_EXPR
:
2111 case TRY_CATCH_EXPR
:
2112 case WITH_CLEANUP_EXPR
:
2121 /* Assume the worst for front-end tree codes. */
2122 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2130 /* Return an expr equal to X but certainly not valid as an lvalue. */
2133 non_lvalue_loc (location_t loc
, tree x
)
2135 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2140 if (! maybe_lvalue_p (x
))
2142 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2145 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2146 Zero means allow extended lvalues. */
2148 int pedantic_lvalues
;
2150 /* When pedantic, return an expr equal to X but certainly not valid as a
2151 pedantic lvalue. Otherwise, return X. */
2154 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2156 if (pedantic_lvalues
)
2157 return non_lvalue_loc (loc
, x
);
2159 return protected_set_expr_location_unshare (x
, loc
);
2162 /* Given a tree comparison code, return the code that is the logical inverse.
2163 It is generally not safe to do this for floating-point comparisons, except
2164 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2165 ERROR_MARK in this case. */
2168 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2170 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2171 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2181 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2183 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2185 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2187 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2201 return UNORDERED_EXPR
;
2202 case UNORDERED_EXPR
:
2203 return ORDERED_EXPR
;
2209 /* Similar, but return the comparison that results if the operands are
2210 swapped. This is safe for floating-point. */
2213 swap_tree_comparison (enum tree_code code
)
2220 case UNORDERED_EXPR
:
2246 /* Convert a comparison tree code from an enum tree_code representation
2247 into a compcode bit-based encoding. This function is the inverse of
2248 compcode_to_comparison. */
2250 static enum comparison_code
2251 comparison_to_compcode (enum tree_code code
)
2268 return COMPCODE_ORD
;
2269 case UNORDERED_EXPR
:
2270 return COMPCODE_UNORD
;
2272 return COMPCODE_UNLT
;
2274 return COMPCODE_UNEQ
;
2276 return COMPCODE_UNLE
;
2278 return COMPCODE_UNGT
;
2280 return COMPCODE_LTGT
;
2282 return COMPCODE_UNGE
;
2288 /* Convert a compcode bit-based encoding of a comparison operator back
2289 to GCC's enum tree_code representation. This function is the
2290 inverse of comparison_to_compcode. */
2292 static enum tree_code
2293 compcode_to_comparison (enum comparison_code code
)
2310 return ORDERED_EXPR
;
2311 case COMPCODE_UNORD
:
2312 return UNORDERED_EXPR
;
2330 /* Return a tree for the comparison which is the combination of
2331 doing the AND or OR (depending on CODE) of the two operations LCODE
2332 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2333 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2334 if this makes the transformation invalid. */
2337 combine_comparisons (location_t loc
,
2338 enum tree_code code
, enum tree_code lcode
,
2339 enum tree_code rcode
, tree truth_type
,
2340 tree ll_arg
, tree lr_arg
)
2342 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2343 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2344 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2349 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2350 compcode
= lcompcode
& rcompcode
;
2353 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2354 compcode
= lcompcode
| rcompcode
;
2363 /* Eliminate unordered comparisons, as well as LTGT and ORD
2364 which are not used unless the mode has NaNs. */
2365 compcode
&= ~COMPCODE_UNORD
;
2366 if (compcode
== COMPCODE_LTGT
)
2367 compcode
= COMPCODE_NE
;
2368 else if (compcode
== COMPCODE_ORD
)
2369 compcode
= COMPCODE_TRUE
;
2371 else if (flag_trapping_math
)
2373 /* Check that the original operation and the optimized ones will trap
2374 under the same condition. */
2375 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2376 && (lcompcode
!= COMPCODE_EQ
)
2377 && (lcompcode
!= COMPCODE_ORD
);
2378 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2379 && (rcompcode
!= COMPCODE_EQ
)
2380 && (rcompcode
!= COMPCODE_ORD
);
2381 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2382 && (compcode
!= COMPCODE_EQ
)
2383 && (compcode
!= COMPCODE_ORD
);
2385 /* In a short-circuited boolean expression the LHS might be
2386 such that the RHS, if evaluated, will never trap. For
2387 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2388 if neither x nor y is NaN. (This is a mixed blessing: for
2389 example, the expression above will never trap, hence
2390 optimizing it to x < y would be invalid). */
2391 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2392 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2395 /* If the comparison was short-circuited, and only the RHS
2396 trapped, we may now generate a spurious trap. */
2398 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2401 /* If we changed the conditions that cause a trap, we lose. */
2402 if ((ltrap
|| rtrap
) != trap
)
2406 if (compcode
== COMPCODE_TRUE
)
2407 return constant_boolean_node (true, truth_type
);
2408 else if (compcode
== COMPCODE_FALSE
)
2409 return constant_boolean_node (false, truth_type
);
2412 enum tree_code tcode
;
2414 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2415 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2419 /* Return nonzero if two operands (typically of the same tree node)
2420 are necessarily equal. If either argument has side-effects this
2421 function returns zero. FLAGS modifies behavior as follows:
2423 If OEP_ONLY_CONST is set, only return nonzero for constants.
2424 This function tests whether the operands are indistinguishable;
2425 it does not test whether they are equal using C's == operation.
2426 The distinction is important for IEEE floating point, because
2427 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2428 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2430 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2431 even though it may hold multiple values during a function.
2432 This is because a GCC tree node guarantees that nothing else is
2433 executed between the evaluation of its "operands" (which may often
2434 be evaluated in arbitrary order). Hence if the operands themselves
2435 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2436 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2437 unset means assuming isochronic (or instantaneous) tree equivalence.
2438 Unless comparing arbitrary expression trees, such as from different
2439 statements, this flag can usually be left unset.
2441 If OEP_PURE_SAME is set, then pure functions with identical arguments
2442 are considered the same. It is used when the caller has other ways
2443 to ensure that global memory is unchanged in between. */
2446 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2448 /* If either is ERROR_MARK, they aren't equal. */
2449 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2450 || TREE_TYPE (arg0
) == error_mark_node
2451 || TREE_TYPE (arg1
) == error_mark_node
)
2454 /* Similar, if either does not have a type (like a released SSA name),
2455 they aren't equal. */
2456 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2459 /* Check equality of integer constants before bailing out due to
2460 precision differences. */
2461 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2462 return tree_int_cst_equal (arg0
, arg1
);
2464 /* If both types don't have the same signedness, then we can't consider
2465 them equal. We must check this before the STRIP_NOPS calls
2466 because they may change the signedness of the arguments. As pointers
2467 strictly don't have a signedness, require either two pointers or
2468 two non-pointers as well. */
2469 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2470 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2473 /* We cannot consider pointers to different address space equal. */
2474 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2475 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2476 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2479 /* If both types don't have the same precision, then it is not safe
2481 if (element_precision (TREE_TYPE (arg0
))
2482 != element_precision (TREE_TYPE (arg1
)))
2488 /* In case both args are comparisons but with different comparison
2489 code, try to swap the comparison operands of one arg to produce
2490 a match and compare that variant. */
2491 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2492 && COMPARISON_CLASS_P (arg0
)
2493 && COMPARISON_CLASS_P (arg1
))
2495 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2497 if (TREE_CODE (arg0
) == swap_code
)
2498 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2499 TREE_OPERAND (arg1
, 1), flags
)
2500 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2501 TREE_OPERAND (arg1
, 0), flags
);
2504 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2505 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2506 && !(CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
)))
2509 /* This is needed for conversions and for COMPONENT_REF.
2510 Might as well play it safe and always test this. */
2511 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2512 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2513 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2516 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2517 We don't care about side effects in that case because the SAVE_EXPR
2518 takes care of that for us. In all other cases, two expressions are
2519 equal if they have no side effects. If we have two identical
2520 expressions with side effects that should be treated the same due
2521 to the only side effects being identical SAVE_EXPR's, that will
2522 be detected in the recursive calls below.
2523 If we are taking an invariant address of two identical objects
2524 they are necessarily equal as well. */
2525 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2526 && (TREE_CODE (arg0
) == SAVE_EXPR
2527 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2528 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2531 /* Next handle constant cases, those for which we can return 1 even
2532 if ONLY_CONST is set. */
2533 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2534 switch (TREE_CODE (arg0
))
2537 return tree_int_cst_equal (arg0
, arg1
);
2540 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2541 TREE_FIXED_CST (arg1
));
2544 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2545 TREE_REAL_CST (arg1
)))
2549 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2551 /* If we do not distinguish between signed and unsigned zero,
2552 consider them equal. */
2553 if (real_zerop (arg0
) && real_zerop (arg1
))
2562 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2565 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2567 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2568 VECTOR_CST_ELT (arg1
, i
), flags
))
2575 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2577 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2581 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2582 && ! memcmp (TREE_STRING_POINTER (arg0
),
2583 TREE_STRING_POINTER (arg1
),
2584 TREE_STRING_LENGTH (arg0
)));
2587 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2588 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2589 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2594 if (flags
& OEP_ONLY_CONST
)
2597 /* Define macros to test an operand from arg0 and arg1 for equality and a
2598 variant that allows null and views null as being different from any
2599 non-null value. In the latter case, if either is null, the both
2600 must be; otherwise, do the normal comparison. */
2601 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2602 TREE_OPERAND (arg1, N), flags)
2604 #define OP_SAME_WITH_NULL(N) \
2605 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2606 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2608 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2611 /* Two conversions are equal only if signedness and modes match. */
2612 switch (TREE_CODE (arg0
))
2615 case FIX_TRUNC_EXPR
:
2616 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2617 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2627 case tcc_comparison
:
2629 if (OP_SAME (0) && OP_SAME (1))
2632 /* For commutative ops, allow the other order. */
2633 return (commutative_tree_code (TREE_CODE (arg0
))
2634 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2635 TREE_OPERAND (arg1
, 1), flags
)
2636 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2637 TREE_OPERAND (arg1
, 0), flags
));
2640 /* If either of the pointer (or reference) expressions we are
2641 dereferencing contain a side effect, these cannot be equal,
2642 but their addresses can be. */
2643 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2644 && (TREE_SIDE_EFFECTS (arg0
)
2645 || TREE_SIDE_EFFECTS (arg1
)))
2648 switch (TREE_CODE (arg0
))
2651 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2658 case TARGET_MEM_REF
:
2659 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2660 /* Require equal extra operands and then fall through to MEM_REF
2661 handling of the two common operands. */
2662 if (!OP_SAME_WITH_NULL (2)
2663 || !OP_SAME_WITH_NULL (3)
2664 || !OP_SAME_WITH_NULL (4))
2668 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2669 /* Require equal access sizes, and similar pointer types.
2670 We can have incomplete types for array references of
2671 variable-sized arrays from the Fortran frontend
2672 though. Also verify the types are compatible. */
2673 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2674 || (TYPE_SIZE (TREE_TYPE (arg0
))
2675 && TYPE_SIZE (TREE_TYPE (arg1
))
2676 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2677 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2678 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2679 && alias_ptr_types_compatible_p
2680 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2681 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2682 && OP_SAME (0) && OP_SAME (1));
2685 case ARRAY_RANGE_REF
:
2686 /* Operands 2 and 3 may be null.
2687 Compare the array index by value if it is constant first as we
2688 may have different types but same value here. */
2691 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2692 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2693 TREE_OPERAND (arg1
, 1))
2695 && OP_SAME_WITH_NULL (2)
2696 && OP_SAME_WITH_NULL (3));
2699 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2700 may be NULL when we're called to compare MEM_EXPRs. */
2701 if (!OP_SAME_WITH_NULL (0)
2704 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2705 return OP_SAME_WITH_NULL (2);
2710 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2711 return OP_SAME (1) && OP_SAME (2);
2717 case tcc_expression
:
2718 switch (TREE_CODE (arg0
))
2721 case TRUTH_NOT_EXPR
:
2724 case TRUTH_ANDIF_EXPR
:
2725 case TRUTH_ORIF_EXPR
:
2726 return OP_SAME (0) && OP_SAME (1);
2729 case WIDEN_MULT_PLUS_EXPR
:
2730 case WIDEN_MULT_MINUS_EXPR
:
2733 /* The multiplcation operands are commutative. */
2736 case TRUTH_AND_EXPR
:
2738 case TRUTH_XOR_EXPR
:
2739 if (OP_SAME (0) && OP_SAME (1))
2742 /* Otherwise take into account this is a commutative operation. */
2743 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2744 TREE_OPERAND (arg1
, 1), flags
)
2745 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2746 TREE_OPERAND (arg1
, 0), flags
));
2751 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2758 switch (TREE_CODE (arg0
))
2761 /* If the CALL_EXPRs call different functions, then they
2762 clearly can not be equal. */
2763 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2768 unsigned int cef
= call_expr_flags (arg0
);
2769 if (flags
& OEP_PURE_SAME
)
2770 cef
&= ECF_CONST
| ECF_PURE
;
2777 /* Now see if all the arguments are the same. */
2779 const_call_expr_arg_iterator iter0
, iter1
;
2781 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2782 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2784 a0
= next_const_call_expr_arg (&iter0
),
2785 a1
= next_const_call_expr_arg (&iter1
))
2786 if (! operand_equal_p (a0
, a1
, flags
))
2789 /* If we get here and both argument lists are exhausted
2790 then the CALL_EXPRs are equal. */
2791 return ! (a0
|| a1
);
2797 case tcc_declaration
:
2798 /* Consider __builtin_sqrt equal to sqrt. */
2799 return (TREE_CODE (arg0
) == FUNCTION_DECL
2800 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2801 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2802 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2809 #undef OP_SAME_WITH_NULL
2812 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2813 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2815 When in doubt, return 0. */
2818 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2820 int unsignedp1
, unsignedpo
;
2821 tree primarg0
, primarg1
, primother
;
2822 unsigned int correct_width
;
2824 if (operand_equal_p (arg0
, arg1
, 0))
2827 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2828 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2831 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2832 and see if the inner values are the same. This removes any
2833 signedness comparison, which doesn't matter here. */
2834 primarg0
= arg0
, primarg1
= arg1
;
2835 STRIP_NOPS (primarg0
);
2836 STRIP_NOPS (primarg1
);
2837 if (operand_equal_p (primarg0
, primarg1
, 0))
2840 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2841 actual comparison operand, ARG0.
2843 First throw away any conversions to wider types
2844 already present in the operands. */
2846 primarg1
= get_narrower (arg1
, &unsignedp1
);
2847 primother
= get_narrower (other
, &unsignedpo
);
2849 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2850 if (unsignedp1
== unsignedpo
2851 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2852 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2854 tree type
= TREE_TYPE (arg0
);
2856 /* Make sure shorter operand is extended the right way
2857 to match the longer operand. */
2858 primarg1
= fold_convert (signed_or_unsigned_type_for
2859 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2861 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2868 /* See if ARG is an expression that is either a comparison or is performing
2869 arithmetic on comparisons. The comparisons must only be comparing
2870 two different values, which will be stored in *CVAL1 and *CVAL2; if
2871 they are nonzero it means that some operands have already been found.
2872 No variables may be used anywhere else in the expression except in the
2873 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2874 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2876 If this is true, return 1. Otherwise, return zero. */
2879 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2881 enum tree_code code
= TREE_CODE (arg
);
2882 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2884 /* We can handle some of the tcc_expression cases here. */
2885 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2887 else if (tclass
== tcc_expression
2888 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2889 || code
== COMPOUND_EXPR
))
2890 tclass
= tcc_binary
;
2892 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2893 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2895 /* If we've already found a CVAL1 or CVAL2, this expression is
2896 two complex to handle. */
2897 if (*cval1
|| *cval2
)
2907 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2910 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2911 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2912 cval1
, cval2
, save_p
));
2917 case tcc_expression
:
2918 if (code
== COND_EXPR
)
2919 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2920 cval1
, cval2
, save_p
)
2921 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2922 cval1
, cval2
, save_p
)
2923 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2924 cval1
, cval2
, save_p
));
2927 case tcc_comparison
:
2928 /* First see if we can handle the first operand, then the second. For
2929 the second operand, we know *CVAL1 can't be zero. It must be that
2930 one side of the comparison is each of the values; test for the
2931 case where this isn't true by failing if the two operands
2934 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2935 TREE_OPERAND (arg
, 1), 0))
2939 *cval1
= TREE_OPERAND (arg
, 0);
2940 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2942 else if (*cval2
== 0)
2943 *cval2
= TREE_OPERAND (arg
, 0);
2944 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2949 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2951 else if (*cval2
== 0)
2952 *cval2
= TREE_OPERAND (arg
, 1);
2953 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2965 /* ARG is a tree that is known to contain just arithmetic operations and
2966 comparisons. Evaluate the operations in the tree substituting NEW0 for
2967 any occurrence of OLD0 as an operand of a comparison and likewise for
2971 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2972 tree old1
, tree new1
)
2974 tree type
= TREE_TYPE (arg
);
2975 enum tree_code code
= TREE_CODE (arg
);
2976 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2978 /* We can handle some of the tcc_expression cases here. */
2979 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2981 else if (tclass
== tcc_expression
2982 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2983 tclass
= tcc_binary
;
2988 return fold_build1_loc (loc
, code
, type
,
2989 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2990 old0
, new0
, old1
, new1
));
2993 return fold_build2_loc (loc
, code
, type
,
2994 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2995 old0
, new0
, old1
, new1
),
2996 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2997 old0
, new0
, old1
, new1
));
2999 case tcc_expression
:
3003 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3007 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3011 return fold_build3_loc (loc
, code
, type
,
3012 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3013 old0
, new0
, old1
, new1
),
3014 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3015 old0
, new0
, old1
, new1
),
3016 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3017 old0
, new0
, old1
, new1
));
3021 /* Fall through - ??? */
3023 case tcc_comparison
:
3025 tree arg0
= TREE_OPERAND (arg
, 0);
3026 tree arg1
= TREE_OPERAND (arg
, 1);
3028 /* We need to check both for exact equality and tree equality. The
3029 former will be true if the operand has a side-effect. In that
3030 case, we know the operand occurred exactly once. */
3032 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3034 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3037 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3039 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3042 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3050 /* Return a tree for the case when the result of an expression is RESULT
3051 converted to TYPE and OMITTED was previously an operand of the expression
3052 but is now not needed (e.g., we folded OMITTED * 0).
3054 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3055 the conversion of RESULT to TYPE. */
3058 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3060 tree t
= fold_convert_loc (loc
, type
, result
);
3062 /* If the resulting operand is an empty statement, just return the omitted
3063 statement casted to void. */
3064 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3065 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3066 fold_ignored_result (omitted
));
3068 if (TREE_SIDE_EFFECTS (omitted
))
3069 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3070 fold_ignored_result (omitted
), t
);
3072 return non_lvalue_loc (loc
, t
);
3075 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3078 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
3081 tree t
= fold_convert_loc (loc
, type
, result
);
3083 /* If the resulting operand is an empty statement, just return the omitted
3084 statement casted to void. */
3085 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3086 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3087 fold_ignored_result (omitted
));
3089 if (TREE_SIDE_EFFECTS (omitted
))
3090 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3091 fold_ignored_result (omitted
), t
);
3093 return pedantic_non_lvalue_loc (loc
, t
);
3096 /* Return a tree for the case when the result of an expression is RESULT
3097 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3098 of the expression but are now not needed.
3100 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3101 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3102 evaluated before OMITTED2. Otherwise, if neither has side effects,
3103 just do the conversion of RESULT to TYPE. */
3106 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3107 tree omitted1
, tree omitted2
)
3109 tree t
= fold_convert_loc (loc
, type
, result
);
3111 if (TREE_SIDE_EFFECTS (omitted2
))
3112 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3113 if (TREE_SIDE_EFFECTS (omitted1
))
3114 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3116 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3120 /* Return a simplified tree node for the truth-negation of ARG. This
3121 never alters ARG itself. We assume that ARG is an operation that
3122 returns a truth value (0 or 1).
3124 FIXME: one would think we would fold the result, but it causes
3125 problems with the dominator optimizer. */
3128 fold_truth_not_expr (location_t loc
, tree arg
)
3130 tree type
= TREE_TYPE (arg
);
3131 enum tree_code code
= TREE_CODE (arg
);
3132 location_t loc1
, loc2
;
3134 /* If this is a comparison, we can simply invert it, except for
3135 floating-point non-equality comparisons, in which case we just
3136 enclose a TRUTH_NOT_EXPR around what we have. */
3138 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3140 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3141 if (FLOAT_TYPE_P (op_type
)
3142 && flag_trapping_math
3143 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3144 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3147 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3148 if (code
== ERROR_MARK
)
3151 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3152 TREE_OPERAND (arg
, 1));
3158 return constant_boolean_node (integer_zerop (arg
), type
);
3160 case TRUTH_AND_EXPR
:
3161 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3162 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3163 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3164 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3165 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3168 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3169 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3170 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3171 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3172 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3174 case TRUTH_XOR_EXPR
:
3175 /* Here we can invert either operand. We invert the first operand
3176 unless the second operand is a TRUTH_NOT_EXPR in which case our
3177 result is the XOR of the first operand with the inside of the
3178 negation of the second operand. */
3180 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3181 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3182 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3184 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3185 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3186 TREE_OPERAND (arg
, 1));
3188 case TRUTH_ANDIF_EXPR
:
3189 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3190 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3191 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3192 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3193 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3195 case TRUTH_ORIF_EXPR
:
3196 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3197 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3198 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3199 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3200 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3202 case TRUTH_NOT_EXPR
:
3203 return TREE_OPERAND (arg
, 0);
3207 tree arg1
= TREE_OPERAND (arg
, 1);
3208 tree arg2
= TREE_OPERAND (arg
, 2);
3210 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3211 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3213 /* A COND_EXPR may have a throw as one operand, which
3214 then has void type. Just leave void operands
3216 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3217 VOID_TYPE_P (TREE_TYPE (arg1
))
3218 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3219 VOID_TYPE_P (TREE_TYPE (arg2
))
3220 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3224 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3225 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3226 TREE_OPERAND (arg
, 0),
3227 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3229 case NON_LVALUE_EXPR
:
3230 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3231 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3234 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3235 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3237 /* ... fall through ... */
3240 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3241 return build1_loc (loc
, TREE_CODE (arg
), type
,
3242 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3245 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3247 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3250 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3252 case CLEANUP_POINT_EXPR
:
3253 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3254 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3255 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3262 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3263 assume that ARG is an operation that returns a truth value (0 or 1
3264 for scalars, 0 or -1 for vectors). Return the folded expression if
3265 folding is successful. Otherwise, return NULL_TREE. */
3268 fold_invert_truthvalue (location_t loc
, tree arg
)
3270 tree type
= TREE_TYPE (arg
);
3271 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3277 /* Return a simplified tree node for the truth-negation of ARG. This
3278 never alters ARG itself. We assume that ARG is an operation that
3279 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3282 invert_truthvalue_loc (location_t loc
, tree arg
)
3284 if (TREE_CODE (arg
) == ERROR_MARK
)
3287 tree type
= TREE_TYPE (arg
);
3288 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3294 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3295 operands are another bit-wise operation with a common input. If so,
3296 distribute the bit operations to save an operation and possibly two if
3297 constants are involved. For example, convert
3298 (A | B) & (A | C) into A | (B & C)
3299 Further simplification will occur if B and C are constants.
3301 If this optimization cannot be done, 0 will be returned. */
3304 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3305 tree arg0
, tree arg1
)
3310 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3311 || TREE_CODE (arg0
) == code
3312 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3313 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3316 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3318 common
= TREE_OPERAND (arg0
, 0);
3319 left
= TREE_OPERAND (arg0
, 1);
3320 right
= TREE_OPERAND (arg1
, 1);
3322 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3324 common
= TREE_OPERAND (arg0
, 0);
3325 left
= TREE_OPERAND (arg0
, 1);
3326 right
= TREE_OPERAND (arg1
, 0);
3328 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3330 common
= TREE_OPERAND (arg0
, 1);
3331 left
= TREE_OPERAND (arg0
, 0);
3332 right
= TREE_OPERAND (arg1
, 1);
3334 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3336 common
= TREE_OPERAND (arg0
, 1);
3337 left
= TREE_OPERAND (arg0
, 0);
3338 right
= TREE_OPERAND (arg1
, 0);
3343 common
= fold_convert_loc (loc
, type
, common
);
3344 left
= fold_convert_loc (loc
, type
, left
);
3345 right
= fold_convert_loc (loc
, type
, right
);
3346 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3347 fold_build2_loc (loc
, code
, type
, left
, right
));
3350 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3351 with code CODE. This optimization is unsafe. */
3353 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3354 tree arg0
, tree arg1
)
3356 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3357 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3359 /* (A / C) +- (B / C) -> (A +- B) / C. */
3361 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3362 TREE_OPERAND (arg1
, 1), 0))
3363 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3364 fold_build2_loc (loc
, code
, type
,
3365 TREE_OPERAND (arg0
, 0),
3366 TREE_OPERAND (arg1
, 0)),
3367 TREE_OPERAND (arg0
, 1));
3369 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3370 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3371 TREE_OPERAND (arg1
, 0), 0)
3372 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3373 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3375 REAL_VALUE_TYPE r0
, r1
;
3376 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3377 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3379 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3381 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3382 real_arithmetic (&r0
, code
, &r0
, &r1
);
3383 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3384 TREE_OPERAND (arg0
, 0),
3385 build_real (type
, r0
));
3391 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3392 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3395 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3396 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3398 tree result
, bftype
;
3402 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3403 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3404 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3405 && tree_fits_shwi_p (size
)
3406 && tree_to_shwi (size
) == bitsize
)
3407 return fold_convert_loc (loc
, type
, inner
);
3411 if (TYPE_PRECISION (bftype
) != bitsize
3412 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3413 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3415 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3416 size_int (bitsize
), bitsize_int (bitpos
));
3419 result
= fold_convert_loc (loc
, type
, result
);
3424 /* Optimize a bit-field compare.
3426 There are two cases: First is a compare against a constant and the
3427 second is a comparison of two items where the fields are at the same
3428 bit position relative to the start of a chunk (byte, halfword, word)
3429 large enough to contain it. In these cases we can avoid the shift
3430 implicit in bitfield extractions.
3432 For constants, we emit a compare of the shifted constant with the
3433 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3434 compared. For two fields at the same position, we do the ANDs with the
3435 similar mask and compare the result of the ANDs.
3437 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3438 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3439 are the left and right operands of the comparison, respectively.
3441 If the optimization described above can be done, we return the resulting
3442 tree. Otherwise we return zero. */
3445 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3446 tree compare_type
, tree lhs
, tree rhs
)
3448 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3449 tree type
= TREE_TYPE (lhs
);
3451 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3452 enum machine_mode lmode
, rmode
, nmode
;
3453 int lunsignedp
, runsignedp
;
3454 int lvolatilep
= 0, rvolatilep
= 0;
3455 tree linner
, rinner
= NULL_TREE
;
3459 /* Get all the information about the extractions being done. If the bit size
3460 if the same as the size of the underlying object, we aren't doing an
3461 extraction at all and so can do nothing. We also don't want to
3462 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3463 then will no longer be able to replace it. */
3464 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3465 &lunsignedp
, &lvolatilep
, false);
3466 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3467 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3472 /* If this is not a constant, we can only do something if bit positions,
3473 sizes, and signedness are the same. */
3474 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3475 &runsignedp
, &rvolatilep
, false);
3477 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3478 || lunsignedp
!= runsignedp
|| offset
!= 0
3479 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3483 /* See if we can find a mode to refer to this field. We should be able to,
3484 but fail if we can't. */
3485 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3486 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3487 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3488 TYPE_ALIGN (TREE_TYPE (rinner
))),
3490 if (nmode
== VOIDmode
)
3493 /* Set signed and unsigned types of the precision of this mode for the
3495 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3497 /* Compute the bit position and size for the new reference and our offset
3498 within it. If the new reference is the same size as the original, we
3499 won't optimize anything, so return zero. */
3500 nbitsize
= GET_MODE_BITSIZE (nmode
);
3501 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3503 if (nbitsize
== lbitsize
)
3506 if (BYTES_BIG_ENDIAN
)
3507 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3509 /* Make the mask to be used against the extracted field. */
3510 mask
= build_int_cst_type (unsigned_type
, -1);
3511 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3512 mask
= const_binop (RSHIFT_EXPR
, mask
,
3513 size_int (nbitsize
- lbitsize
- lbitpos
));
3516 /* If not comparing with constant, just rework the comparison
3518 return fold_build2_loc (loc
, code
, compare_type
,
3519 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3520 make_bit_field_ref (loc
, linner
,
3525 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3526 make_bit_field_ref (loc
, rinner
,
3532 /* Otherwise, we are handling the constant case. See if the constant is too
3533 big for the field. Warn and return a tree of for 0 (false) if so. We do
3534 this not only for its own sake, but to avoid having to test for this
3535 error case below. If we didn't, we might generate wrong code.
3537 For unsigned fields, the constant shifted right by the field length should
3538 be all zero. For signed fields, the high-order bits should agree with
3543 if (wi::lrshift (rhs
, lbitsize
) != 0)
3545 warning (0, "comparison is always %d due to width of bit-field",
3547 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3552 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
3553 if (tem
!= 0 && tem
!= -1)
3555 warning (0, "comparison is always %d due to width of bit-field",
3557 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3561 /* Single-bit compares should always be against zero. */
3562 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3564 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3565 rhs
= build_int_cst (type
, 0);
3568 /* Make a new bitfield reference, shift the constant over the
3569 appropriate number of bits and mask it with the computed mask
3570 (in case this was a signed field). If we changed it, make a new one. */
3571 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3573 rhs
= const_binop (BIT_AND_EXPR
,
3574 const_binop (LSHIFT_EXPR
,
3575 fold_convert_loc (loc
, unsigned_type
, rhs
),
3576 size_int (lbitpos
)),
3579 lhs
= build2_loc (loc
, code
, compare_type
,
3580 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3584 /* Subroutine for fold_truth_andor_1: decode a field reference.
3586 If EXP is a comparison reference, we return the innermost reference.
3588 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3589 set to the starting bit number.
3591 If the innermost field can be completely contained in a mode-sized
3592 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3594 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3595 otherwise it is not changed.
3597 *PUNSIGNEDP is set to the signedness of the field.
3599 *PMASK is set to the mask used. This is either contained in a
3600 BIT_AND_EXPR or derived from the width of the field.
3602 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3604 Return 0 if this is not a component reference or is one that we can't
3605 do anything with. */
3608 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3609 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3610 int *punsignedp
, int *pvolatilep
,
3611 tree
*pmask
, tree
*pand_mask
)
3613 tree outer_type
= 0;
3615 tree mask
, inner
, offset
;
3617 unsigned int precision
;
3619 /* All the optimizations using this function assume integer fields.
3620 There are problems with FP fields since the type_for_size call
3621 below can fail for, e.g., XFmode. */
3622 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3625 /* We are interested in the bare arrangement of bits, so strip everything
3626 that doesn't affect the machine mode. However, record the type of the
3627 outermost expression if it may matter below. */
3628 if (CONVERT_EXPR_P (exp
)
3629 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3630 outer_type
= TREE_TYPE (exp
);
3633 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3635 and_mask
= TREE_OPERAND (exp
, 1);
3636 exp
= TREE_OPERAND (exp
, 0);
3637 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3638 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3642 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3643 punsignedp
, pvolatilep
, false);
3644 if ((inner
== exp
&& and_mask
== 0)
3645 || *pbitsize
< 0 || offset
!= 0
3646 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3649 /* If the number of bits in the reference is the same as the bitsize of
3650 the outer type, then the outer type gives the signedness. Otherwise
3651 (in case of a small bitfield) the signedness is unchanged. */
3652 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3653 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3655 /* Compute the mask to access the bitfield. */
3656 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3657 precision
= TYPE_PRECISION (unsigned_type
);
3659 mask
= build_int_cst_type (unsigned_type
, -1);
3661 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3662 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3664 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3666 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3667 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3670 *pand_mask
= and_mask
;
3674 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3675 bit positions and MASK is SIGNED. */
3678 all_ones_mask_p (const_tree mask
, unsigned int size
)
3680 tree type
= TREE_TYPE (mask
);
3681 unsigned int precision
= TYPE_PRECISION (type
);
3683 /* If this function returns true when the type of the mask is
3684 UNSIGNED, then there will be errors. In particular see
3685 gcc.c-torture/execute/990326-1.c. There does not appear to be
3686 any documentation paper trail as to why this is so. But the pre
3687 wide-int worked with that restriction and it has been preserved
3689 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
3692 return wi::mask (size
, false, precision
) == mask
;
3695 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3696 represents the sign bit of EXP's type. If EXP represents a sign
3697 or zero extension, also test VAL against the unextended type.
3698 The return value is the (sub)expression whose sign bit is VAL,
3699 or NULL_TREE otherwise. */
3702 sign_bit_p (tree exp
, const_tree val
)
3707 /* Tree EXP must have an integral type. */
3708 t
= TREE_TYPE (exp
);
3709 if (! INTEGRAL_TYPE_P (t
))
3712 /* Tree VAL must be an integer constant. */
3713 if (TREE_CODE (val
) != INTEGER_CST
3714 || TREE_OVERFLOW (val
))
3717 width
= TYPE_PRECISION (t
);
3718 if (wi::only_sign_bit_p (val
, width
))
3721 /* Handle extension from a narrower type. */
3722 if (TREE_CODE (exp
) == NOP_EXPR
3723 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3724 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3729 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3730 to be evaluated unconditionally. */
3733 simple_operand_p (const_tree exp
)
3735 /* Strip any conversions that don't change the machine mode. */
3738 return (CONSTANT_CLASS_P (exp
)
3739 || TREE_CODE (exp
) == SSA_NAME
3741 && ! TREE_ADDRESSABLE (exp
)
3742 && ! TREE_THIS_VOLATILE (exp
)
3743 && ! DECL_NONLOCAL (exp
)
3744 /* Don't regard global variables as simple. They may be
3745 allocated in ways unknown to the compiler (shared memory,
3746 #pragma weak, etc). */
3747 && ! TREE_PUBLIC (exp
)
3748 && ! DECL_EXTERNAL (exp
)
3749 /* Weakrefs are not safe to be read, since they can be NULL.
3750 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3751 have DECL_WEAK flag set. */
3752 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
3753 /* Loading a static variable is unduly expensive, but global
3754 registers aren't expensive. */
3755 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3758 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3759 to be evaluated unconditionally.
3760 I addition to simple_operand_p, we assume that comparisons, conversions,
3761 and logic-not operations are simple, if their operands are simple, too. */
3764 simple_operand_p_2 (tree exp
)
3766 enum tree_code code
;
3768 if (TREE_SIDE_EFFECTS (exp
)
3769 || tree_could_trap_p (exp
))
3772 while (CONVERT_EXPR_P (exp
))
3773 exp
= TREE_OPERAND (exp
, 0);
3775 code
= TREE_CODE (exp
);
3777 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3778 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3779 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3781 if (code
== TRUTH_NOT_EXPR
)
3782 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3784 return simple_operand_p (exp
);
3788 /* The following functions are subroutines to fold_range_test and allow it to
3789 try to change a logical combination of comparisons into a range test.
3792 X == 2 || X == 3 || X == 4 || X == 5
3796 (unsigned) (X - 2) <= 3
3798 We describe each set of comparisons as being either inside or outside
3799 a range, using a variable named like IN_P, and then describe the
3800 range with a lower and upper bound. If one of the bounds is omitted,
3801 it represents either the highest or lowest value of the type.
3803 In the comments below, we represent a range by two numbers in brackets
3804 preceded by a "+" to designate being inside that range, or a "-" to
3805 designate being outside that range, so the condition can be inverted by
3806 flipping the prefix. An omitted bound is represented by a "-". For
3807 example, "- [-, 10]" means being outside the range starting at the lowest
3808 possible value and ending at 10, in other words, being greater than 10.
3809 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3812 We set up things so that the missing bounds are handled in a consistent
3813 manner so neither a missing bound nor "true" and "false" need to be
3814 handled using a special case. */
3816 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3817 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3818 and UPPER1_P are nonzero if the respective argument is an upper bound
3819 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3820 must be specified for a comparison. ARG1 will be converted to ARG0's
3821 type if both are specified. */
3824 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3825 tree arg1
, int upper1_p
)
3831 /* If neither arg represents infinity, do the normal operation.
3832 Else, if not a comparison, return infinity. Else handle the special
3833 comparison rules. Note that most of the cases below won't occur, but
3834 are handled for consistency. */
3836 if (arg0
!= 0 && arg1
!= 0)
3838 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3839 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3841 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3844 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3847 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3848 for neither. In real maths, we cannot assume open ended ranges are
3849 the same. But, this is computer arithmetic, where numbers are finite.
3850 We can therefore make the transformation of any unbounded range with
3851 the value Z, Z being greater than any representable number. This permits
3852 us to treat unbounded ranges as equal. */
3853 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3854 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3858 result
= sgn0
== sgn1
;
3861 result
= sgn0
!= sgn1
;
3864 result
= sgn0
< sgn1
;
3867 result
= sgn0
<= sgn1
;
3870 result
= sgn0
> sgn1
;
3873 result
= sgn0
>= sgn1
;
3879 return constant_boolean_node (result
, type
);
3882 /* Helper routine for make_range. Perform one step for it, return
3883 new expression if the loop should continue or NULL_TREE if it should
3887 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3888 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3889 bool *strict_overflow_p
)
3891 tree arg0_type
= TREE_TYPE (arg0
);
3892 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3893 int in_p
= *p_in_p
, n_in_p
;
3897 case TRUTH_NOT_EXPR
:
3898 /* We can only do something if the range is testing for zero. */
3899 if (low
== NULL_TREE
|| high
== NULL_TREE
3900 || ! integer_zerop (low
) || ! integer_zerop (high
))
3905 case EQ_EXPR
: case NE_EXPR
:
3906 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3907 /* We can only do something if the range is testing for zero
3908 and if the second operand is an integer constant. Note that
3909 saying something is "in" the range we make is done by
3910 complementing IN_P since it will set in the initial case of
3911 being not equal to zero; "out" is leaving it alone. */
3912 if (low
== NULL_TREE
|| high
== NULL_TREE
3913 || ! integer_zerop (low
) || ! integer_zerop (high
)
3914 || TREE_CODE (arg1
) != INTEGER_CST
)
3919 case NE_EXPR
: /* - [c, c] */
3922 case EQ_EXPR
: /* + [c, c] */
3923 in_p
= ! in_p
, low
= high
= arg1
;
3925 case GT_EXPR
: /* - [-, c] */
3926 low
= 0, high
= arg1
;
3928 case GE_EXPR
: /* + [c, -] */
3929 in_p
= ! in_p
, low
= arg1
, high
= 0;
3931 case LT_EXPR
: /* - [c, -] */
3932 low
= arg1
, high
= 0;
3934 case LE_EXPR
: /* + [-, c] */
3935 in_p
= ! in_p
, low
= 0, high
= arg1
;
3941 /* If this is an unsigned comparison, we also know that EXP is
3942 greater than or equal to zero. We base the range tests we make
3943 on that fact, so we record it here so we can parse existing
3944 range tests. We test arg0_type since often the return type
3945 of, e.g. EQ_EXPR, is boolean. */
3946 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3948 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3950 build_int_cst (arg0_type
, 0),
3954 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3956 /* If the high bound is missing, but we have a nonzero low
3957 bound, reverse the range so it goes from zero to the low bound
3959 if (high
== 0 && low
&& ! integer_zerop (low
))
3962 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3963 build_int_cst (TREE_TYPE (low
), 1), 0);
3964 low
= build_int_cst (arg0_type
, 0);
3974 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3975 low and high are non-NULL, then normalize will DTRT. */
3976 if (!TYPE_UNSIGNED (arg0_type
)
3977 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3979 if (low
== NULL_TREE
)
3980 low
= TYPE_MIN_VALUE (arg0_type
);
3981 if (high
== NULL_TREE
)
3982 high
= TYPE_MAX_VALUE (arg0_type
);
3985 /* (-x) IN [a,b] -> x in [-b, -a] */
3986 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3987 build_int_cst (exp_type
, 0),
3989 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3990 build_int_cst (exp_type
, 0),
3992 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
3998 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3999 build_int_cst (exp_type
, 1));
4003 if (TREE_CODE (arg1
) != INTEGER_CST
)
4006 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4007 move a constant to the other side. */
4008 if (!TYPE_UNSIGNED (arg0_type
)
4009 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4012 /* If EXP is signed, any overflow in the computation is undefined,
4013 so we don't worry about it so long as our computations on
4014 the bounds don't overflow. For unsigned, overflow is defined
4015 and this is exactly the right thing. */
4016 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4017 arg0_type
, low
, 0, arg1
, 0);
4018 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4019 arg0_type
, high
, 1, arg1
, 0);
4020 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4021 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4024 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4025 *strict_overflow_p
= true;
4028 /* Check for an unsigned range which has wrapped around the maximum
4029 value thus making n_high < n_low, and normalize it. */
4030 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4032 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4033 build_int_cst (TREE_TYPE (n_high
), 1), 0);
4034 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4035 build_int_cst (TREE_TYPE (n_low
), 1), 0);
4037 /* If the range is of the form +/- [ x+1, x ], we won't
4038 be able to normalize it. But then, it represents the
4039 whole range or the empty set, so make it
4041 if (tree_int_cst_equal (n_low
, low
)
4042 && tree_int_cst_equal (n_high
, high
))
4048 low
= n_low
, high
= n_high
;
4056 case NON_LVALUE_EXPR
:
4057 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4060 if (! INTEGRAL_TYPE_P (arg0_type
)
4061 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4062 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4065 n_low
= low
, n_high
= high
;
4068 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4071 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4073 /* If we're converting arg0 from an unsigned type, to exp,
4074 a signed type, we will be doing the comparison as unsigned.
4075 The tests above have already verified that LOW and HIGH
4078 So we have to ensure that we will handle large unsigned
4079 values the same way that the current signed bounds treat
4082 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4086 /* For fixed-point modes, we need to pass the saturating flag
4087 as the 2nd parameter. */
4088 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4090 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4091 TYPE_SATURATING (arg0_type
));
4094 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4096 /* A range without an upper bound is, naturally, unbounded.
4097 Since convert would have cropped a very large value, use
4098 the max value for the destination type. */
4100 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4101 : TYPE_MAX_VALUE (arg0_type
);
4103 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4104 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4105 fold_convert_loc (loc
, arg0_type
,
4107 build_int_cst (arg0_type
, 1));
4109 /* If the low bound is specified, "and" the range with the
4110 range for which the original unsigned value will be
4114 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4115 1, fold_convert_loc (loc
, arg0_type
,
4120 in_p
= (n_in_p
== in_p
);
4124 /* Otherwise, "or" the range with the range of the input
4125 that will be interpreted as negative. */
4126 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4127 1, fold_convert_loc (loc
, arg0_type
,
4132 in_p
= (in_p
!= n_in_p
);
4146 /* Given EXP, a logical expression, set the range it is testing into
4147 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4148 actually being tested. *PLOW and *PHIGH will be made of the same
4149 type as the returned expression. If EXP is not a comparison, we
4150 will most likely not be returning a useful value and range. Set
4151 *STRICT_OVERFLOW_P to true if the return value is only valid
4152 because signed overflow is undefined; otherwise, do not change
4153 *STRICT_OVERFLOW_P. */
4156 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4157 bool *strict_overflow_p
)
4159 enum tree_code code
;
4160 tree arg0
, arg1
= NULL_TREE
;
4161 tree exp_type
, nexp
;
4164 location_t loc
= EXPR_LOCATION (exp
);
4166 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4167 and see if we can refine the range. Some of the cases below may not
4168 happen, but it doesn't seem worth worrying about this. We "continue"
4169 the outer loop when we've changed something; otherwise we "break"
4170 the switch, which will "break" the while. */
4173 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4177 code
= TREE_CODE (exp
);
4178 exp_type
= TREE_TYPE (exp
);
4181 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4183 if (TREE_OPERAND_LENGTH (exp
) > 0)
4184 arg0
= TREE_OPERAND (exp
, 0);
4185 if (TREE_CODE_CLASS (code
) == tcc_binary
4186 || TREE_CODE_CLASS (code
) == tcc_comparison
4187 || (TREE_CODE_CLASS (code
) == tcc_expression
4188 && TREE_OPERAND_LENGTH (exp
) > 1))
4189 arg1
= TREE_OPERAND (exp
, 1);
4191 if (arg0
== NULL_TREE
)
4194 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4195 &high
, &in_p
, strict_overflow_p
);
4196 if (nexp
== NULL_TREE
)
4201 /* If EXP is a constant, we can evaluate whether this is true or false. */
4202 if (TREE_CODE (exp
) == INTEGER_CST
)
4204 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4206 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4212 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4216 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4217 type, TYPE, return an expression to test if EXP is in (or out of, depending
4218 on IN_P) the range. Return 0 if the test couldn't be created. */
4221 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4222 tree low
, tree high
)
4224 tree etype
= TREE_TYPE (exp
), value
;
4226 #ifdef HAVE_canonicalize_funcptr_for_compare
4227 /* Disable this optimization for function pointer expressions
4228 on targets that require function pointer canonicalization. */
4229 if (HAVE_canonicalize_funcptr_for_compare
4230 && TREE_CODE (etype
) == POINTER_TYPE
4231 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4237 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4239 return invert_truthvalue_loc (loc
, value
);
4244 if (low
== 0 && high
== 0)
4245 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4248 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4249 fold_convert_loc (loc
, etype
, high
));
4252 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4253 fold_convert_loc (loc
, etype
, low
));
4255 if (operand_equal_p (low
, high
, 0))
4256 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4257 fold_convert_loc (loc
, etype
, low
));
4259 if (integer_zerop (low
))
4261 if (! TYPE_UNSIGNED (etype
))
4263 etype
= unsigned_type_for (etype
);
4264 high
= fold_convert_loc (loc
, etype
, high
);
4265 exp
= fold_convert_loc (loc
, etype
, exp
);
4267 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4270 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4271 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4273 int prec
= TYPE_PRECISION (etype
);
4275 if (wi::mask (prec
- 1, false, prec
) == high
)
4277 if (TYPE_UNSIGNED (etype
))
4279 tree signed_etype
= signed_type_for (etype
);
4280 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4282 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4284 etype
= signed_etype
;
4285 exp
= fold_convert_loc (loc
, etype
, exp
);
4287 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4288 build_int_cst (etype
, 0));
4292 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4293 This requires wrap-around arithmetics for the type of the expression.
4294 First make sure that arithmetics in this type is valid, then make sure
4295 that it wraps around. */
4296 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4297 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4298 TYPE_UNSIGNED (etype
));
4300 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4302 tree utype
, minv
, maxv
;
4304 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4305 for the type in question, as we rely on this here. */
4306 utype
= unsigned_type_for (etype
);
4307 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4308 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4309 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4310 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4312 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4319 high
= fold_convert_loc (loc
, etype
, high
);
4320 low
= fold_convert_loc (loc
, etype
, low
);
4321 exp
= fold_convert_loc (loc
, etype
, exp
);
4323 value
= const_binop (MINUS_EXPR
, high
, low
);
4326 if (POINTER_TYPE_P (etype
))
4328 if (value
!= 0 && !TREE_OVERFLOW (value
))
4330 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4331 return build_range_check (loc
, type
,
4332 fold_build_pointer_plus_loc (loc
, exp
, low
),
4333 1, build_int_cst (etype
, 0), value
);
4338 if (value
!= 0 && !TREE_OVERFLOW (value
))
4339 return build_range_check (loc
, type
,
4340 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4341 1, build_int_cst (etype
, 0), value
);
4346 /* Return the predecessor of VAL in its type, handling the infinite case. */
4349 range_predecessor (tree val
)
4351 tree type
= TREE_TYPE (val
);
4353 if (INTEGRAL_TYPE_P (type
)
4354 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4357 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4358 build_int_cst (TREE_TYPE (val
), 1), 0);
4361 /* Return the successor of VAL in its type, handling the infinite case. */
4364 range_successor (tree val
)
4366 tree type
= TREE_TYPE (val
);
4368 if (INTEGRAL_TYPE_P (type
)
4369 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4372 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4373 build_int_cst (TREE_TYPE (val
), 1), 0);
4376 /* Given two ranges, see if we can merge them into one. Return 1 if we
4377 can, 0 if we can't. Set the output range into the specified parameters. */
4380 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4381 tree high0
, int in1_p
, tree low1
, tree high1
)
4389 int lowequal
= ((low0
== 0 && low1
== 0)
4390 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4391 low0
, 0, low1
, 0)));
4392 int highequal
= ((high0
== 0 && high1
== 0)
4393 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4394 high0
, 1, high1
, 1)));
4396 /* Make range 0 be the range that starts first, or ends last if they
4397 start at the same value. Swap them if it isn't. */
4398 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4401 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4402 high1
, 1, high0
, 1))))
4404 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4405 tem
= low0
, low0
= low1
, low1
= tem
;
4406 tem
= high0
, high0
= high1
, high1
= tem
;
4409 /* Now flag two cases, whether the ranges are disjoint or whether the
4410 second range is totally subsumed in the first. Note that the tests
4411 below are simplified by the ones above. */
4412 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4413 high0
, 1, low1
, 0));
4414 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4415 high1
, 1, high0
, 1));
4417 /* We now have four cases, depending on whether we are including or
4418 excluding the two ranges. */
4421 /* If they don't overlap, the result is false. If the second range
4422 is a subset it is the result. Otherwise, the range is from the start
4423 of the second to the end of the first. */
4425 in_p
= 0, low
= high
= 0;
4427 in_p
= 1, low
= low1
, high
= high1
;
4429 in_p
= 1, low
= low1
, high
= high0
;
4432 else if (in0_p
&& ! in1_p
)
4434 /* If they don't overlap, the result is the first range. If they are
4435 equal, the result is false. If the second range is a subset of the
4436 first, and the ranges begin at the same place, we go from just after
4437 the end of the second range to the end of the first. If the second
4438 range is not a subset of the first, or if it is a subset and both
4439 ranges end at the same place, the range starts at the start of the
4440 first range and ends just before the second range.
4441 Otherwise, we can't describe this as a single range. */
4443 in_p
= 1, low
= low0
, high
= high0
;
4444 else if (lowequal
&& highequal
)
4445 in_p
= 0, low
= high
= 0;
4446 else if (subset
&& lowequal
)
4448 low
= range_successor (high1
);
4453 /* We are in the weird situation where high0 > high1 but
4454 high1 has no successor. Punt. */
4458 else if (! subset
|| highequal
)
4461 high
= range_predecessor (low1
);
4465 /* low0 < low1 but low1 has no predecessor. Punt. */
4473 else if (! in0_p
&& in1_p
)
4475 /* If they don't overlap, the result is the second range. If the second
4476 is a subset of the first, the result is false. Otherwise,
4477 the range starts just after the first range and ends at the
4478 end of the second. */
4480 in_p
= 1, low
= low1
, high
= high1
;
4481 else if (subset
|| highequal
)
4482 in_p
= 0, low
= high
= 0;
4485 low
= range_successor (high0
);
4490 /* high1 > high0 but high0 has no successor. Punt. */
4498 /* The case where we are excluding both ranges. Here the complex case
4499 is if they don't overlap. In that case, the only time we have a
4500 range is if they are adjacent. If the second is a subset of the
4501 first, the result is the first. Otherwise, the range to exclude
4502 starts at the beginning of the first range and ends at the end of the
4506 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4507 range_successor (high0
),
4509 in_p
= 0, low
= low0
, high
= high1
;
4512 /* Canonicalize - [min, x] into - [-, x]. */
4513 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4514 switch (TREE_CODE (TREE_TYPE (low0
)))
4517 if (TYPE_PRECISION (TREE_TYPE (low0
))
4518 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4522 if (tree_int_cst_equal (low0
,
4523 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4527 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4528 && integer_zerop (low0
))
4535 /* Canonicalize - [x, max] into - [x, -]. */
4536 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4537 switch (TREE_CODE (TREE_TYPE (high1
)))
4540 if (TYPE_PRECISION (TREE_TYPE (high1
))
4541 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4545 if (tree_int_cst_equal (high1
,
4546 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4550 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4551 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4553 build_int_cst (TREE_TYPE (high1
), 1),
4561 /* The ranges might be also adjacent between the maximum and
4562 minimum values of the given type. For
4563 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4564 return + [x + 1, y - 1]. */
4565 if (low0
== 0 && high1
== 0)
4567 low
= range_successor (high0
);
4568 high
= range_predecessor (low1
);
4569 if (low
== 0 || high
== 0)
4579 in_p
= 0, low
= low0
, high
= high0
;
4581 in_p
= 0, low
= low0
, high
= high1
;
4584 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4589 /* Subroutine of fold, looking inside expressions of the form
4590 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4591 of the COND_EXPR. This function is being used also to optimize
4592 A op B ? C : A, by reversing the comparison first.
4594 Return a folded expression whose code is not a COND_EXPR
4595 anymore, or NULL_TREE if no folding opportunity is found. */
4598 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4599 tree arg0
, tree arg1
, tree arg2
)
4601 enum tree_code comp_code
= TREE_CODE (arg0
);
4602 tree arg00
= TREE_OPERAND (arg0
, 0);
4603 tree arg01
= TREE_OPERAND (arg0
, 1);
4604 tree arg1_type
= TREE_TYPE (arg1
);
4610 /* If we have A op 0 ? A : -A, consider applying the following
4613 A == 0? A : -A same as -A
4614 A != 0? A : -A same as A
4615 A >= 0? A : -A same as abs (A)
4616 A > 0? A : -A same as abs (A)
4617 A <= 0? A : -A same as -abs (A)
4618 A < 0? A : -A same as -abs (A)
4620 None of these transformations work for modes with signed
4621 zeros. If A is +/-0, the first two transformations will
4622 change the sign of the result (from +0 to -0, or vice
4623 versa). The last four will fix the sign of the result,
4624 even though the original expressions could be positive or
4625 negative, depending on the sign of A.
4627 Note that all these transformations are correct if A is
4628 NaN, since the two alternatives (A and -A) are also NaNs. */
4629 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4630 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4631 ? real_zerop (arg01
)
4632 : integer_zerop (arg01
))
4633 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4634 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4635 /* In the case that A is of the form X-Y, '-A' (arg2) may
4636 have already been folded to Y-X, check for that. */
4637 || (TREE_CODE (arg1
) == MINUS_EXPR
4638 && TREE_CODE (arg2
) == MINUS_EXPR
4639 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4640 TREE_OPERAND (arg2
, 1), 0)
4641 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4642 TREE_OPERAND (arg2
, 0), 0))))
4647 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4648 return pedantic_non_lvalue_loc (loc
,
4649 fold_convert_loc (loc
, type
,
4650 negate_expr (tem
)));
4653 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4656 if (flag_trapping_math
)
4661 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4662 arg1
= fold_convert_loc (loc
, signed_type_for
4663 (TREE_TYPE (arg1
)), arg1
);
4664 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4665 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4668 if (flag_trapping_math
)
4672 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4673 arg1
= fold_convert_loc (loc
, signed_type_for
4674 (TREE_TYPE (arg1
)), arg1
);
4675 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4676 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4678 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4682 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4683 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4684 both transformations are correct when A is NaN: A != 0
4685 is then true, and A == 0 is false. */
4687 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4688 && integer_zerop (arg01
) && integer_zerop (arg2
))
4690 if (comp_code
== NE_EXPR
)
4691 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4692 else if (comp_code
== EQ_EXPR
)
4693 return build_zero_cst (type
);
4696 /* Try some transformations of A op B ? A : B.
4698 A == B? A : B same as B
4699 A != B? A : B same as A
4700 A >= B? A : B same as max (A, B)
4701 A > B? A : B same as max (B, A)
4702 A <= B? A : B same as min (A, B)
4703 A < B? A : B same as min (B, A)
4705 As above, these transformations don't work in the presence
4706 of signed zeros. For example, if A and B are zeros of
4707 opposite sign, the first two transformations will change
4708 the sign of the result. In the last four, the original
4709 expressions give different results for (A=+0, B=-0) and
4710 (A=-0, B=+0), but the transformed expressions do not.
4712 The first two transformations are correct if either A or B
4713 is a NaN. In the first transformation, the condition will
4714 be false, and B will indeed be chosen. In the case of the
4715 second transformation, the condition A != B will be true,
4716 and A will be chosen.
4718 The conversions to max() and min() are not correct if B is
4719 a number and A is not. The conditions in the original
4720 expressions will be false, so all four give B. The min()
4721 and max() versions would give a NaN instead. */
4722 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4723 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4724 /* Avoid these transformations if the COND_EXPR may be used
4725 as an lvalue in the C++ front-end. PR c++/19199. */
4727 || VECTOR_TYPE_P (type
)
4728 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4729 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4730 || ! maybe_lvalue_p (arg1
)
4731 || ! maybe_lvalue_p (arg2
)))
4733 tree comp_op0
= arg00
;
4734 tree comp_op1
= arg01
;
4735 tree comp_type
= TREE_TYPE (comp_op0
);
4737 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4738 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4748 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4750 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4755 /* In C++ a ?: expression can be an lvalue, so put the
4756 operand which will be used if they are equal first
4757 so that we can convert this back to the
4758 corresponding COND_EXPR. */
4759 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4761 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4762 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4763 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4764 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4765 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4766 comp_op1
, comp_op0
);
4767 return pedantic_non_lvalue_loc (loc
,
4768 fold_convert_loc (loc
, type
, tem
));
4775 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4777 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4778 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4779 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4780 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4781 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4782 comp_op1
, comp_op0
);
4783 return pedantic_non_lvalue_loc (loc
,
4784 fold_convert_loc (loc
, type
, tem
));
4788 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4789 return pedantic_non_lvalue_loc (loc
,
4790 fold_convert_loc (loc
, type
, arg2
));
4793 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4794 return pedantic_non_lvalue_loc (loc
,
4795 fold_convert_loc (loc
, type
, arg1
));
4798 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4803 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4804 we might still be able to simplify this. For example,
4805 if C1 is one less or one more than C2, this might have started
4806 out as a MIN or MAX and been transformed by this function.
4807 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4809 if (INTEGRAL_TYPE_P (type
)
4810 && TREE_CODE (arg01
) == INTEGER_CST
4811 && TREE_CODE (arg2
) == INTEGER_CST
)
4815 if (TREE_CODE (arg1
) == INTEGER_CST
)
4817 /* We can replace A with C1 in this case. */
4818 arg1
= fold_convert_loc (loc
, type
, arg01
);
4819 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4822 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4823 MIN_EXPR, to preserve the signedness of the comparison. */
4824 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4826 && operand_equal_p (arg01
,
4827 const_binop (PLUS_EXPR
, arg2
,
4828 build_int_cst (type
, 1)),
4831 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4832 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4834 return pedantic_non_lvalue_loc (loc
,
4835 fold_convert_loc (loc
, type
, tem
));
4840 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4842 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4844 && operand_equal_p (arg01
,
4845 const_binop (MINUS_EXPR
, arg2
,
4846 build_int_cst (type
, 1)),
4849 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4850 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4852 return pedantic_non_lvalue_loc (loc
,
4853 fold_convert_loc (loc
, type
, tem
));
4858 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4859 MAX_EXPR, to preserve the signedness of the comparison. */
4860 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4862 && operand_equal_p (arg01
,
4863 const_binop (MINUS_EXPR
, arg2
,
4864 build_int_cst (type
, 1)),
4867 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4868 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4870 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4875 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4876 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4878 && operand_equal_p (arg01
,
4879 const_binop (PLUS_EXPR
, arg2
,
4880 build_int_cst (type
, 1)),
4883 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4884 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4886 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4900 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4901 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4902 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4906 /* EXP is some logical combination of boolean tests. See if we can
4907 merge it into some range test. Return the new tree if so. */
4910 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4913 int or_op
= (code
== TRUTH_ORIF_EXPR
4914 || code
== TRUTH_OR_EXPR
);
4915 int in0_p
, in1_p
, in_p
;
4916 tree low0
, low1
, low
, high0
, high1
, high
;
4917 bool strict_overflow_p
= false;
4919 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4920 "when simplifying range test");
4922 if (!INTEGRAL_TYPE_P (type
))
4925 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4926 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4928 /* If this is an OR operation, invert both sides; we will invert
4929 again at the end. */
4931 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4933 /* If both expressions are the same, if we can merge the ranges, and we
4934 can build the range test, return it or it inverted. If one of the
4935 ranges is always true or always false, consider it to be the same
4936 expression as the other. */
4937 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4938 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4940 && 0 != (tem
= (build_range_check (loc
, type
,
4942 : rhs
!= 0 ? rhs
: integer_zero_node
,
4945 if (strict_overflow_p
)
4946 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4947 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4950 /* On machines where the branch cost is expensive, if this is a
4951 short-circuited branch and the underlying object on both sides
4952 is the same, make a non-short-circuit operation. */
4953 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4954 && lhs
!= 0 && rhs
!= 0
4955 && (code
== TRUTH_ANDIF_EXPR
4956 || code
== TRUTH_ORIF_EXPR
)
4957 && operand_equal_p (lhs
, rhs
, 0))
4959 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4960 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4961 which cases we can't do this. */
4962 if (simple_operand_p (lhs
))
4963 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4964 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4967 else if (!lang_hooks
.decls
.global_bindings_p ()
4968 && !CONTAINS_PLACEHOLDER_P (lhs
))
4970 tree common
= save_expr (lhs
);
4972 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4973 or_op
? ! in0_p
: in0_p
,
4975 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4976 or_op
? ! in1_p
: in1_p
,
4979 if (strict_overflow_p
)
4980 fold_overflow_warning (warnmsg
,
4981 WARN_STRICT_OVERFLOW_COMPARISON
);
4982 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4983 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4992 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4993 bit value. Arrange things so the extra bits will be set to zero if and
4994 only if C is signed-extended to its full width. If MASK is nonzero,
4995 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4998 unextend (tree c
, int p
, int unsignedp
, tree mask
)
5000 tree type
= TREE_TYPE (c
);
5001 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5004 if (p
== modesize
|| unsignedp
)
5007 /* We work by getting just the sign bit into the low-order bit, then
5008 into the high-order bit, then sign-extend. We then XOR that value
5010 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
5012 /* We must use a signed type in order to get an arithmetic right shift.
5013 However, we must also avoid introducing accidental overflows, so that
5014 a subsequent call to integer_zerop will work. Hence we must
5015 do the type conversion here. At this point, the constant is either
5016 zero or one, and the conversion to a signed type can never overflow.
5017 We could get an overflow if this conversion is done anywhere else. */
5018 if (TYPE_UNSIGNED (type
))
5019 temp
= fold_convert (signed_type_for (type
), temp
);
5021 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5022 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5024 temp
= const_binop (BIT_AND_EXPR
, temp
,
5025 fold_convert (TREE_TYPE (c
), mask
));
5026 /* If necessary, convert the type back to match the type of C. */
5027 if (TYPE_UNSIGNED (type
))
5028 temp
= fold_convert (type
, temp
);
5030 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5033 /* For an expression that has the form
5037 we can drop one of the inner expressions and simplify to
5041 LOC is the location of the resulting expression. OP is the inner
5042 logical operation; the left-hand side in the examples above, while CMPOP
5043 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5044 removing a condition that guards another, as in
5045 (A != NULL && A->...) || A == NULL
5046 which we must not transform. If RHS_ONLY is true, only eliminate the
5047 right-most operand of the inner logical operation. */
5050 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5053 tree type
= TREE_TYPE (cmpop
);
5054 enum tree_code code
= TREE_CODE (cmpop
);
5055 enum tree_code truthop_code
= TREE_CODE (op
);
5056 tree lhs
= TREE_OPERAND (op
, 0);
5057 tree rhs
= TREE_OPERAND (op
, 1);
5058 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5059 enum tree_code rhs_code
= TREE_CODE (rhs
);
5060 enum tree_code lhs_code
= TREE_CODE (lhs
);
5061 enum tree_code inv_code
;
5063 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5066 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5069 if (rhs_code
== truthop_code
)
5071 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5072 if (newrhs
!= NULL_TREE
)
5075 rhs_code
= TREE_CODE (rhs
);
5078 if (lhs_code
== truthop_code
&& !rhs_only
)
5080 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5081 if (newlhs
!= NULL_TREE
)
5084 lhs_code
= TREE_CODE (lhs
);
5088 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5089 if (inv_code
== rhs_code
5090 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5091 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5093 if (!rhs_only
&& inv_code
== lhs_code
5094 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5095 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5097 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5098 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5103 /* Find ways of folding logical expressions of LHS and RHS:
5104 Try to merge two comparisons to the same innermost item.
5105 Look for range tests like "ch >= '0' && ch <= '9'".
5106 Look for combinations of simple terms on machines with expensive branches
5107 and evaluate the RHS unconditionally.
5109 For example, if we have p->a == 2 && p->b == 4 and we can make an
5110 object large enough to span both A and B, we can do this with a comparison
5111 against the object ANDed with the a mask.
5113 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5114 operations to do this with one comparison.
5116 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5117 function and the one above.
5119 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5120 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5122 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5125 We return the simplified tree or 0 if no optimization is possible. */
5128 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5131 /* If this is the "or" of two comparisons, we can do something if
5132 the comparisons are NE_EXPR. If this is the "and", we can do something
5133 if the comparisons are EQ_EXPR. I.e.,
5134 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5136 WANTED_CODE is this operation code. For single bit fields, we can
5137 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5138 comparison for one-bit fields. */
5140 enum tree_code wanted_code
;
5141 enum tree_code lcode
, rcode
;
5142 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5143 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5144 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5145 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5146 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5147 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5148 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5149 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5150 enum machine_mode lnmode
, rnmode
;
5151 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5152 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5153 tree l_const
, r_const
;
5154 tree lntype
, rntype
, result
;
5155 HOST_WIDE_INT first_bit
, end_bit
;
5158 /* Start by getting the comparison codes. Fail if anything is volatile.
5159 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5160 it were surrounded with a NE_EXPR. */
5162 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5165 lcode
= TREE_CODE (lhs
);
5166 rcode
= TREE_CODE (rhs
);
5168 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5170 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5171 build_int_cst (TREE_TYPE (lhs
), 0));
5175 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5177 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5178 build_int_cst (TREE_TYPE (rhs
), 0));
5182 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5183 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5186 ll_arg
= TREE_OPERAND (lhs
, 0);
5187 lr_arg
= TREE_OPERAND (lhs
, 1);
5188 rl_arg
= TREE_OPERAND (rhs
, 0);
5189 rr_arg
= TREE_OPERAND (rhs
, 1);
5191 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5192 if (simple_operand_p (ll_arg
)
5193 && simple_operand_p (lr_arg
))
5195 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5196 && operand_equal_p (lr_arg
, rr_arg
, 0))
5198 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5199 truth_type
, ll_arg
, lr_arg
);
5203 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5204 && operand_equal_p (lr_arg
, rl_arg
, 0))
5206 result
= combine_comparisons (loc
, code
, lcode
,
5207 swap_tree_comparison (rcode
),
5208 truth_type
, ll_arg
, lr_arg
);
5214 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5215 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5217 /* If the RHS can be evaluated unconditionally and its operands are
5218 simple, it wins to evaluate the RHS unconditionally on machines
5219 with expensive branches. In this case, this isn't a comparison
5220 that can be merged. */
5222 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5224 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5225 && simple_operand_p (rl_arg
)
5226 && simple_operand_p (rr_arg
))
5228 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5229 if (code
== TRUTH_OR_EXPR
5230 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5231 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5232 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5233 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5234 return build2_loc (loc
, NE_EXPR
, truth_type
,
5235 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5237 build_int_cst (TREE_TYPE (ll_arg
), 0));
5239 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5240 if (code
== TRUTH_AND_EXPR
5241 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5242 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5243 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5244 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5245 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5246 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5248 build_int_cst (TREE_TYPE (ll_arg
), 0));
5251 /* See if the comparisons can be merged. Then get all the parameters for
5254 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5255 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5259 ll_inner
= decode_field_reference (loc
, ll_arg
,
5260 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5261 &ll_unsignedp
, &volatilep
, &ll_mask
,
5263 lr_inner
= decode_field_reference (loc
, lr_arg
,
5264 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5265 &lr_unsignedp
, &volatilep
, &lr_mask
,
5267 rl_inner
= decode_field_reference (loc
, rl_arg
,
5268 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5269 &rl_unsignedp
, &volatilep
, &rl_mask
,
5271 rr_inner
= decode_field_reference (loc
, rr_arg
,
5272 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5273 &rr_unsignedp
, &volatilep
, &rr_mask
,
5276 /* It must be true that the inner operation on the lhs of each
5277 comparison must be the same if we are to be able to do anything.
5278 Then see if we have constants. If not, the same must be true for
5280 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5281 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5284 if (TREE_CODE (lr_arg
) == INTEGER_CST
5285 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5286 l_const
= lr_arg
, r_const
= rr_arg
;
5287 else if (lr_inner
== 0 || rr_inner
== 0
5288 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5291 l_const
= r_const
= 0;
5293 /* If either comparison code is not correct for our logical operation,
5294 fail. However, we can convert a one-bit comparison against zero into
5295 the opposite comparison against that bit being set in the field. */
5297 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5298 if (lcode
!= wanted_code
)
5300 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5302 /* Make the left operand unsigned, since we are only interested
5303 in the value of one bit. Otherwise we are doing the wrong
5312 /* This is analogous to the code for l_const above. */
5313 if (rcode
!= wanted_code
)
5315 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5324 /* See if we can find a mode that contains both fields being compared on
5325 the left. If we can't, fail. Otherwise, update all constants and masks
5326 to be relative to a field of that size. */
5327 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5328 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5329 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5330 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5332 if (lnmode
== VOIDmode
)
5335 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5336 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5337 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5338 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5340 if (BYTES_BIG_ENDIAN
)
5342 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5343 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5346 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5347 size_int (xll_bitpos
));
5348 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5349 size_int (xrl_bitpos
));
5353 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5354 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5355 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5356 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5357 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5360 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5362 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5367 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5368 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5369 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5370 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5371 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5374 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5376 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5380 /* If the right sides are not constant, do the same for it. Also,
5381 disallow this optimization if a size or signedness mismatch occurs
5382 between the left and right sides. */
5385 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5386 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5387 /* Make sure the two fields on the right
5388 correspond to the left without being swapped. */
5389 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5392 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5393 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5394 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5395 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5397 if (rnmode
== VOIDmode
)
5400 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5401 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5402 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5403 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5405 if (BYTES_BIG_ENDIAN
)
5407 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5408 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5411 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5413 size_int (xlr_bitpos
));
5414 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5416 size_int (xrr_bitpos
));
5418 /* Make a mask that corresponds to both fields being compared.
5419 Do this for both items being compared. If the operands are the
5420 same size and the bits being compared are in the same position
5421 then we can do this by masking both and comparing the masked
5423 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5424 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5425 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5427 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5428 ll_unsignedp
|| rl_unsignedp
);
5429 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5430 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5432 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5433 lr_unsignedp
|| rr_unsignedp
);
5434 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5435 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5437 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5440 /* There is still another way we can do something: If both pairs of
5441 fields being compared are adjacent, we may be able to make a wider
5442 field containing them both.
5444 Note that we still must mask the lhs/rhs expressions. Furthermore,
5445 the mask must be shifted to account for the shift done by
5446 make_bit_field_ref. */
5447 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5448 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5449 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5450 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5454 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5455 ll_bitsize
+ rl_bitsize
,
5456 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5457 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5458 lr_bitsize
+ rr_bitsize
,
5459 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5461 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5462 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5463 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5464 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5466 /* Convert to the smaller type before masking out unwanted bits. */
5468 if (lntype
!= rntype
)
5470 if (lnbitsize
> rnbitsize
)
5472 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5473 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5476 else if (lnbitsize
< rnbitsize
)
5478 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5479 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5484 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5485 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5487 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5488 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5490 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5496 /* Handle the case of comparisons with constants. If there is something in
5497 common between the masks, those bits of the constants must be the same.
5498 If not, the condition is always false. Test for this to avoid generating
5499 incorrect code below. */
5500 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5501 if (! integer_zerop (result
)
5502 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5503 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5505 if (wanted_code
== NE_EXPR
)
5507 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5508 return constant_boolean_node (true, truth_type
);
5512 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5513 return constant_boolean_node (false, truth_type
);
5517 /* Construct the expression we will return. First get the component
5518 reference we will make. Unless the mask is all ones the width of
5519 that field, perform the mask operation. Then compare with the
5521 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5522 ll_unsignedp
|| rl_unsignedp
);
5524 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5525 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5526 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5528 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5529 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5532 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5536 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5540 enum tree_code op_code
;
5543 int consts_equal
, consts_lt
;
5546 STRIP_SIGN_NOPS (arg0
);
5548 op_code
= TREE_CODE (arg0
);
5549 minmax_const
= TREE_OPERAND (arg0
, 1);
5550 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5551 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5552 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5553 inner
= TREE_OPERAND (arg0
, 0);
5555 /* If something does not permit us to optimize, return the original tree. */
5556 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5557 || TREE_CODE (comp_const
) != INTEGER_CST
5558 || TREE_OVERFLOW (comp_const
)
5559 || TREE_CODE (minmax_const
) != INTEGER_CST
5560 || TREE_OVERFLOW (minmax_const
))
5563 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5564 and GT_EXPR, doing the rest with recursive calls using logical
5568 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5571 = optimize_minmax_comparison (loc
,
5572 invert_tree_comparison (code
, false),
5575 return invert_truthvalue_loc (loc
, tem
);
5581 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5582 optimize_minmax_comparison
5583 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5584 optimize_minmax_comparison
5585 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5588 if (op_code
== MAX_EXPR
&& consts_equal
)
5589 /* MAX (X, 0) == 0 -> X <= 0 */
5590 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5592 else if (op_code
== MAX_EXPR
&& consts_lt
)
5593 /* MAX (X, 0) == 5 -> X == 5 */
5594 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5596 else if (op_code
== MAX_EXPR
)
5597 /* MAX (X, 0) == -1 -> false */
5598 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5600 else if (consts_equal
)
5601 /* MIN (X, 0) == 0 -> X >= 0 */
5602 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5605 /* MIN (X, 0) == 5 -> false */
5606 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5609 /* MIN (X, 0) == -1 -> X == -1 */
5610 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5613 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5614 /* MAX (X, 0) > 0 -> X > 0
5615 MAX (X, 0) > 5 -> X > 5 */
5616 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5618 else if (op_code
== MAX_EXPR
)
5619 /* MAX (X, 0) > -1 -> true */
5620 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5622 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5623 /* MIN (X, 0) > 0 -> false
5624 MIN (X, 0) > 5 -> false */
5625 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5628 /* MIN (X, 0) > -1 -> X > -1 */
5629 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5636 /* T is an integer expression that is being multiplied, divided, or taken a
5637 modulus (CODE says which and what kind of divide or modulus) by a
5638 constant C. See if we can eliminate that operation by folding it with
5639 other operations already in T. WIDE_TYPE, if non-null, is a type that
5640 should be used for the computation if wider than our type.
5642 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5643 (X * 2) + (Y * 4). We must, however, be assured that either the original
5644 expression would not overflow or that overflow is undefined for the type
5645 in the language in question.
5647 If we return a non-null expression, it is an equivalent form of the
5648 original computation, but need not be in the original type.
5650 We set *STRICT_OVERFLOW_P to true if the return values depends on
5651 signed overflow being undefined. Otherwise we do not change
5652 *STRICT_OVERFLOW_P. */
5655 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5656 bool *strict_overflow_p
)
5658 /* To avoid exponential search depth, refuse to allow recursion past
5659 three levels. Beyond that (1) it's highly unlikely that we'll find
5660 something interesting and (2) we've probably processed it before
5661 when we built the inner expression. */
5670 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5677 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5678 bool *strict_overflow_p
)
5680 tree type
= TREE_TYPE (t
);
5681 enum tree_code tcode
= TREE_CODE (t
);
5682 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5683 > GET_MODE_SIZE (TYPE_MODE (type
)))
5684 ? wide_type
: type
);
5686 int same_p
= tcode
== code
;
5687 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5688 bool sub_strict_overflow_p
;
5690 /* Don't deal with constants of zero here; they confuse the code below. */
5691 if (integer_zerop (c
))
5694 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5695 op0
= TREE_OPERAND (t
, 0);
5697 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5698 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5700 /* Note that we need not handle conditional operations here since fold
5701 already handles those cases. So just do arithmetic here. */
5705 /* For a constant, we can always simplify if we are a multiply
5706 or (for divide and modulus) if it is a multiple of our constant. */
5707 if (code
== MULT_EXPR
5708 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
5709 return const_binop (code
, fold_convert (ctype
, t
),
5710 fold_convert (ctype
, c
));
5713 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5714 /* If op0 is an expression ... */
5715 if ((COMPARISON_CLASS_P (op0
)
5716 || UNARY_CLASS_P (op0
)
5717 || BINARY_CLASS_P (op0
)
5718 || VL_EXP_CLASS_P (op0
)
5719 || EXPRESSION_CLASS_P (op0
))
5720 /* ... and has wrapping overflow, and its type is smaller
5721 than ctype, then we cannot pass through as widening. */
5722 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5723 && (TYPE_PRECISION (ctype
)
5724 > TYPE_PRECISION (TREE_TYPE (op0
))))
5725 /* ... or this is a truncation (t is narrower than op0),
5726 then we cannot pass through this narrowing. */
5727 || (TYPE_PRECISION (type
)
5728 < TYPE_PRECISION (TREE_TYPE (op0
)))
5729 /* ... or signedness changes for division or modulus,
5730 then we cannot pass through this conversion. */
5731 || (code
!= MULT_EXPR
5732 && (TYPE_UNSIGNED (ctype
)
5733 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5734 /* ... or has undefined overflow while the converted to
5735 type has not, we cannot do the operation in the inner type
5736 as that would introduce undefined overflow. */
5737 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5738 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5741 /* Pass the constant down and see if we can make a simplification. If
5742 we can, replace this expression with the inner simplification for
5743 possible later conversion to our or some other type. */
5744 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5745 && TREE_CODE (t2
) == INTEGER_CST
5746 && !TREE_OVERFLOW (t2
)
5747 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5749 ? ctype
: NULL_TREE
,
5750 strict_overflow_p
))))
5755 /* If widening the type changes it from signed to unsigned, then we
5756 must avoid building ABS_EXPR itself as unsigned. */
5757 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5759 tree cstype
= (*signed_type_for
) (ctype
);
5760 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5763 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5764 return fold_convert (ctype
, t1
);
5768 /* If the constant is negative, we cannot simplify this. */
5769 if (tree_int_cst_sgn (c
) == -1)
5773 /* For division and modulus, type can't be unsigned, as e.g.
5774 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5775 For signed types, even with wrapping overflow, this is fine. */
5776 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
5778 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5780 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5783 case MIN_EXPR
: case MAX_EXPR
:
5784 /* If widening the type changes the signedness, then we can't perform
5785 this optimization as that changes the result. */
5786 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5789 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5790 sub_strict_overflow_p
= false;
5791 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5792 &sub_strict_overflow_p
)) != 0
5793 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5794 &sub_strict_overflow_p
)) != 0)
5796 if (tree_int_cst_sgn (c
) < 0)
5797 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5798 if (sub_strict_overflow_p
)
5799 *strict_overflow_p
= true;
5800 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5801 fold_convert (ctype
, t2
));
5805 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5806 /* If the second operand is constant, this is a multiplication
5807 or floor division, by a power of two, so we can treat it that
5808 way unless the multiplier or divisor overflows. Signed
5809 left-shift overflow is implementation-defined rather than
5810 undefined in C90, so do not convert signed left shift into
5812 if (TREE_CODE (op1
) == INTEGER_CST
5813 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5814 /* const_binop may not detect overflow correctly,
5815 so check for it explicitly here. */
5816 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
5817 && 0 != (t1
= fold_convert (ctype
,
5818 const_binop (LSHIFT_EXPR
,
5821 && !TREE_OVERFLOW (t1
))
5822 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5823 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5825 fold_convert (ctype
, op0
),
5827 c
, code
, wide_type
, strict_overflow_p
);
5830 case PLUS_EXPR
: case MINUS_EXPR
:
5831 /* See if we can eliminate the operation on both sides. If we can, we
5832 can return a new PLUS or MINUS. If we can't, the only remaining
5833 cases where we can do anything are if the second operand is a
5835 sub_strict_overflow_p
= false;
5836 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5837 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5838 if (t1
!= 0 && t2
!= 0
5839 && (code
== MULT_EXPR
5840 /* If not multiplication, we can only do this if both operands
5841 are divisible by c. */
5842 || (multiple_of_p (ctype
, op0
, c
)
5843 && multiple_of_p (ctype
, op1
, c
))))
5845 if (sub_strict_overflow_p
)
5846 *strict_overflow_p
= true;
5847 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5848 fold_convert (ctype
, t2
));
5851 /* If this was a subtraction, negate OP1 and set it to be an addition.
5852 This simplifies the logic below. */
5853 if (tcode
== MINUS_EXPR
)
5855 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5856 /* If OP1 was not easily negatable, the constant may be OP0. */
5857 if (TREE_CODE (op0
) == INTEGER_CST
)
5868 if (TREE_CODE (op1
) != INTEGER_CST
)
5871 /* If either OP1 or C are negative, this optimization is not safe for
5872 some of the division and remainder types while for others we need
5873 to change the code. */
5874 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5876 if (code
== CEIL_DIV_EXPR
)
5877 code
= FLOOR_DIV_EXPR
;
5878 else if (code
== FLOOR_DIV_EXPR
)
5879 code
= CEIL_DIV_EXPR
;
5880 else if (code
!= MULT_EXPR
5881 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5885 /* If it's a multiply or a division/modulus operation of a multiple
5886 of our constant, do the operation and verify it doesn't overflow. */
5887 if (code
== MULT_EXPR
5888 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5890 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5891 fold_convert (ctype
, c
));
5892 /* We allow the constant to overflow with wrapping semantics. */
5894 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5900 /* If we have an unsigned type, we cannot widen the operation since it
5901 will change the result if the original computation overflowed. */
5902 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
5905 /* If we were able to eliminate our operation from the first side,
5906 apply our operation to the second side and reform the PLUS. */
5907 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5908 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5910 /* The last case is if we are a multiply. In that case, we can
5911 apply the distributive law to commute the multiply and addition
5912 if the multiplication of the constants doesn't overflow
5913 and overflow is defined. With undefined overflow
5914 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5915 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
5916 return fold_build2 (tcode
, ctype
,
5917 fold_build2 (code
, ctype
,
5918 fold_convert (ctype
, op0
),
5919 fold_convert (ctype
, c
)),
5925 /* We have a special case here if we are doing something like
5926 (C * 8) % 4 since we know that's zero. */
5927 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5928 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5929 /* If the multiplication can overflow we cannot optimize this. */
5930 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5931 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5932 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5934 *strict_overflow_p
= true;
5935 return omit_one_operand (type
, integer_zero_node
, op0
);
5938 /* ... fall through ... */
5940 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5941 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5942 /* If we can extract our operation from the LHS, do so and return a
5943 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5944 do something only if the second operand is a constant. */
5946 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5947 strict_overflow_p
)) != 0)
5948 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5949 fold_convert (ctype
, op1
));
5950 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5951 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5952 strict_overflow_p
)) != 0)
5953 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5954 fold_convert (ctype
, t1
));
5955 else if (TREE_CODE (op1
) != INTEGER_CST
)
5958 /* If these are the same operation types, we can associate them
5959 assuming no overflow. */
5962 bool overflow_p
= false;
5963 bool overflow_mul_p
;
5964 signop sign
= TYPE_SIGN (ctype
);
5965 wide_int mul
= wi::mul (op1
, c
, sign
, &overflow_mul_p
);
5966 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
5968 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
5971 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5972 wide_int_to_tree (ctype
, mul
));
5975 /* If these operations "cancel" each other, we have the main
5976 optimizations of this pass, which occur when either constant is a
5977 multiple of the other, in which case we replace this with either an
5978 operation or CODE or TCODE.
5980 If we have an unsigned type, we cannot do this since it will change
5981 the result if the original computation overflowed. */
5982 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
5983 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5984 || (tcode
== MULT_EXPR
5985 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5986 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5987 && code
!= MULT_EXPR
)))
5989 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5991 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5992 *strict_overflow_p
= true;
5993 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5994 fold_convert (ctype
,
5995 const_binop (TRUNC_DIV_EXPR
,
5998 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
6000 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
6001 *strict_overflow_p
= true;
6002 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6003 fold_convert (ctype
,
6004 const_binop (TRUNC_DIV_EXPR
,
6017 /* Return a node which has the indicated constant VALUE (either 0 or
6018 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6019 and is of the indicated TYPE. */
6022 constant_boolean_node (bool value
, tree type
)
6024 if (type
== integer_type_node
)
6025 return value
? integer_one_node
: integer_zero_node
;
6026 else if (type
== boolean_type_node
)
6027 return value
? boolean_true_node
: boolean_false_node
;
6028 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6029 return build_vector_from_val (type
,
6030 build_int_cst (TREE_TYPE (type
),
6033 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6037 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6038 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6039 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6040 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6041 COND is the first argument to CODE; otherwise (as in the example
6042 given here), it is the second argument. TYPE is the type of the
6043 original expression. Return NULL_TREE if no simplification is
6047 fold_binary_op_with_conditional_arg (location_t loc
,
6048 enum tree_code code
,
6049 tree type
, tree op0
, tree op1
,
6050 tree cond
, tree arg
, int cond_first_p
)
6052 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6053 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6054 tree test
, true_value
, false_value
;
6055 tree lhs
= NULL_TREE
;
6056 tree rhs
= NULL_TREE
;
6057 enum tree_code cond_code
= COND_EXPR
;
6059 if (TREE_CODE (cond
) == COND_EXPR
6060 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6062 test
= TREE_OPERAND (cond
, 0);
6063 true_value
= TREE_OPERAND (cond
, 1);
6064 false_value
= TREE_OPERAND (cond
, 2);
6065 /* If this operand throws an expression, then it does not make
6066 sense to try to perform a logical or arithmetic operation
6068 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6070 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6075 tree testtype
= TREE_TYPE (cond
);
6077 true_value
= constant_boolean_node (true, testtype
);
6078 false_value
= constant_boolean_node (false, testtype
);
6081 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6082 cond_code
= VEC_COND_EXPR
;
6084 /* This transformation is only worthwhile if we don't have to wrap ARG
6085 in a SAVE_EXPR and the operation can be simplified without recursing
6086 on at least one of the branches once its pushed inside the COND_EXPR. */
6087 if (!TREE_CONSTANT (arg
)
6088 && (TREE_SIDE_EFFECTS (arg
)
6089 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6090 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6093 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6096 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6098 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6100 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6104 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6106 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6108 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6111 /* Check that we have simplified at least one of the branches. */
6112 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6115 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6119 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6121 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6122 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6123 ADDEND is the same as X.
6125 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6126 and finite. The problematic cases are when X is zero, and its mode
6127 has signed zeros. In the case of rounding towards -infinity,
6128 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6129 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6132 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6134 if (!real_zerop (addend
))
6137 /* Don't allow the fold with -fsignaling-nans. */
6138 if (HONOR_SNANS (TYPE_MODE (type
)))
6141 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6142 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6145 /* In a vector or complex, we would need to check the sign of all zeros. */
6146 if (TREE_CODE (addend
) != REAL_CST
)
6149 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6150 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6153 /* The mode has signed zeros, and we have to honor their sign.
6154 In this situation, there is only one case we can return true for.
6155 X - 0 is the same as X unless rounding towards -infinity is
6157 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6160 /* Subroutine of fold() that checks comparisons of built-in math
6161 functions against real constants.
6163 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6164 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6165 is the type of the result and ARG0 and ARG1 are the operands of the
6166 comparison. ARG1 must be a TREE_REAL_CST.
6168 The function returns the constant folded tree if a simplification
6169 can be made, and NULL_TREE otherwise. */
6172 fold_mathfn_compare (location_t loc
,
6173 enum built_in_function fcode
, enum tree_code code
,
6174 tree type
, tree arg0
, tree arg1
)
6178 if (BUILTIN_SQRT_P (fcode
))
6180 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6181 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6183 c
= TREE_REAL_CST (arg1
);
6184 if (REAL_VALUE_NEGATIVE (c
))
6186 /* sqrt(x) < y is always false, if y is negative. */
6187 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6188 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6190 /* sqrt(x) > y is always true, if y is negative and we
6191 don't care about NaNs, i.e. negative values of x. */
6192 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6193 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6195 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6196 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6197 build_real (TREE_TYPE (arg
), dconst0
));
6199 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6203 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6204 real_convert (&c2
, mode
, &c2
);
6206 if (REAL_VALUE_ISINF (c2
))
6208 /* sqrt(x) > y is x == +Inf, when y is very large. */
6209 if (HONOR_INFINITIES (mode
))
6210 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6211 build_real (TREE_TYPE (arg
), c2
));
6213 /* sqrt(x) > y is always false, when y is very large
6214 and we don't care about infinities. */
6215 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6218 /* sqrt(x) > c is the same as x > c*c. */
6219 return fold_build2_loc (loc
, code
, type
, arg
,
6220 build_real (TREE_TYPE (arg
), c2
));
6222 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6226 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6227 real_convert (&c2
, mode
, &c2
);
6229 if (REAL_VALUE_ISINF (c2
))
6231 /* sqrt(x) < y is always true, when y is a very large
6232 value and we don't care about NaNs or Infinities. */
6233 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6234 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6236 /* sqrt(x) < y is x != +Inf when y is very large and we
6237 don't care about NaNs. */
6238 if (! HONOR_NANS (mode
))
6239 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6240 build_real (TREE_TYPE (arg
), c2
));
6242 /* sqrt(x) < y is x >= 0 when y is very large and we
6243 don't care about Infinities. */
6244 if (! HONOR_INFINITIES (mode
))
6245 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6246 build_real (TREE_TYPE (arg
), dconst0
));
6248 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6249 arg
= save_expr (arg
);
6250 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6251 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6252 build_real (TREE_TYPE (arg
),
6254 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6255 build_real (TREE_TYPE (arg
),
6259 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6260 if (! HONOR_NANS (mode
))
6261 return fold_build2_loc (loc
, code
, type
, arg
,
6262 build_real (TREE_TYPE (arg
), c2
));
6264 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6265 arg
= save_expr (arg
);
6266 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6267 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6268 build_real (TREE_TYPE (arg
),
6270 fold_build2_loc (loc
, code
, type
, arg
,
6271 build_real (TREE_TYPE (arg
),
6279 /* Subroutine of fold() that optimizes comparisons against Infinities,
6280 either +Inf or -Inf.
6282 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6283 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6284 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6286 The function returns the constant folded tree if a simplification
6287 can be made, and NULL_TREE otherwise. */
6290 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6291 tree arg0
, tree arg1
)
6293 enum machine_mode mode
;
6294 REAL_VALUE_TYPE max
;
6298 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6300 /* For negative infinity swap the sense of the comparison. */
6301 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6303 code
= swap_tree_comparison (code
);
6308 /* x > +Inf is always false, if with ignore sNANs. */
6309 if (HONOR_SNANS (mode
))
6311 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6314 /* x <= +Inf is always true, if we don't case about NaNs. */
6315 if (! HONOR_NANS (mode
))
6316 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6318 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6319 arg0
= save_expr (arg0
);
6320 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6324 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6325 real_maxval (&max
, neg
, mode
);
6326 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6327 arg0
, build_real (TREE_TYPE (arg0
), max
));
6330 /* x < +Inf is always equal to x <= DBL_MAX. */
6331 real_maxval (&max
, neg
, mode
);
6332 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6333 arg0
, build_real (TREE_TYPE (arg0
), max
));
6336 /* x != +Inf is always equal to !(x > DBL_MAX). */
6337 real_maxval (&max
, neg
, mode
);
6338 if (! HONOR_NANS (mode
))
6339 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6340 arg0
, build_real (TREE_TYPE (arg0
), max
));
6342 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6343 arg0
, build_real (TREE_TYPE (arg0
), max
));
6344 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6353 /* Subroutine of fold() that optimizes comparisons of a division by
6354 a nonzero integer constant against an integer constant, i.e.
6357 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6358 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6359 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6361 The function returns the constant folded tree if a simplification
6362 can be made, and NULL_TREE otherwise. */
6365 fold_div_compare (location_t loc
,
6366 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6368 tree prod
, tmp
, hi
, lo
;
6369 tree arg00
= TREE_OPERAND (arg0
, 0);
6370 tree arg01
= TREE_OPERAND (arg0
, 1);
6371 signop sign
= TYPE_SIGN (TREE_TYPE (arg0
));
6372 bool neg_overflow
= false;
6375 /* We have to do this the hard way to detect unsigned overflow.
6376 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6377 wide_int val
= wi::mul (arg01
, arg1
, sign
, &overflow
);
6378 prod
= force_fit_type (TREE_TYPE (arg00
), val
, -1, overflow
);
6379 neg_overflow
= false;
6381 if (sign
== UNSIGNED
)
6383 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6384 build_int_cst (TREE_TYPE (arg01
), 1));
6387 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6388 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6389 hi
= force_fit_type (TREE_TYPE (arg00
), val
,
6390 -1, overflow
| TREE_OVERFLOW (prod
));
6392 else if (tree_int_cst_sgn (arg01
) >= 0)
6394 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6395 build_int_cst (TREE_TYPE (arg01
), 1));
6396 switch (tree_int_cst_sgn (arg1
))
6399 neg_overflow
= true;
6400 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6405 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6410 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6420 /* A negative divisor reverses the relational operators. */
6421 code
= swap_tree_comparison (code
);
6423 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6424 build_int_cst (TREE_TYPE (arg01
), 1));
6425 switch (tree_int_cst_sgn (arg1
))
6428 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6433 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6438 neg_overflow
= true;
6439 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6451 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6452 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6453 if (TREE_OVERFLOW (hi
))
6454 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6455 if (TREE_OVERFLOW (lo
))
6456 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6457 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6460 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6461 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6462 if (TREE_OVERFLOW (hi
))
6463 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6464 if (TREE_OVERFLOW (lo
))
6465 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6466 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6469 if (TREE_OVERFLOW (lo
))
6471 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6472 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6474 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6477 if (TREE_OVERFLOW (hi
))
6479 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6480 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6482 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6485 if (TREE_OVERFLOW (hi
))
6487 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6488 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6490 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6493 if (TREE_OVERFLOW (lo
))
6495 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6496 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6498 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6508 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6509 equality/inequality test, then return a simplified form of the test
6510 using a sign testing. Otherwise return NULL. TYPE is the desired
6514 fold_single_bit_test_into_sign_test (location_t loc
,
6515 enum tree_code code
, tree arg0
, tree arg1
,
6518 /* If this is testing a single bit, we can optimize the test. */
6519 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6520 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6521 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6523 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6524 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6525 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6527 if (arg00
!= NULL_TREE
6528 /* This is only a win if casting to a signed type is cheap,
6529 i.e. when arg00's type is not a partial mode. */
6530 && TYPE_PRECISION (TREE_TYPE (arg00
))
6531 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6533 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6534 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6536 fold_convert_loc (loc
, stype
, arg00
),
6537 build_int_cst (stype
, 0));
6544 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6545 equality/inequality test, then return a simplified form of
6546 the test using shifts and logical operations. Otherwise return
6547 NULL. TYPE is the desired result type. */
6550 fold_single_bit_test (location_t loc
, enum tree_code code
,
6551 tree arg0
, tree arg1
, tree result_type
)
6553 /* If this is testing a single bit, we can optimize the test. */
6554 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6555 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6556 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6558 tree inner
= TREE_OPERAND (arg0
, 0);
6559 tree type
= TREE_TYPE (arg0
);
6560 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6561 enum machine_mode operand_mode
= TYPE_MODE (type
);
6563 tree signed_type
, unsigned_type
, intermediate_type
;
6566 /* First, see if we can fold the single bit test into a sign-bit
6568 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6573 /* Otherwise we have (A & C) != 0 where C is a single bit,
6574 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6575 Similarly for (A & C) == 0. */
6577 /* If INNER is a right shift of a constant and it plus BITNUM does
6578 not overflow, adjust BITNUM and INNER. */
6579 if (TREE_CODE (inner
) == RSHIFT_EXPR
6580 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6581 && bitnum
< TYPE_PRECISION (type
)
6582 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6583 TYPE_PRECISION (type
) - bitnum
))
6585 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6586 inner
= TREE_OPERAND (inner
, 0);
6589 /* If we are going to be able to omit the AND below, we must do our
6590 operations as unsigned. If we must use the AND, we have a choice.
6591 Normally unsigned is faster, but for some machines signed is. */
6592 #ifdef LOAD_EXTEND_OP
6593 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6594 && !flag_syntax_only
) ? 0 : 1;
6599 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6600 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6601 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6602 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6605 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6606 inner
, size_int (bitnum
));
6608 one
= build_int_cst (intermediate_type
, 1);
6610 if (code
== EQ_EXPR
)
6611 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6613 /* Put the AND last so it can combine with more things. */
6614 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6616 /* Make sure to return the proper type. */
6617 inner
= fold_convert_loc (loc
, result_type
, inner
);
6624 /* Check whether we are allowed to reorder operands arg0 and arg1,
6625 such that the evaluation of arg1 occurs before arg0. */
6628 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6630 if (! flag_evaluation_order
)
6632 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6634 return ! TREE_SIDE_EFFECTS (arg0
)
6635 && ! TREE_SIDE_EFFECTS (arg1
);
6638 /* Test whether it is preferable two swap two operands, ARG0 and
6639 ARG1, for example because ARG0 is an integer constant and ARG1
6640 isn't. If REORDER is true, only recommend swapping if we can
6641 evaluate the operands in reverse order. */
6644 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6646 if (CONSTANT_CLASS_P (arg1
))
6648 if (CONSTANT_CLASS_P (arg0
))
6651 STRIP_SIGN_NOPS (arg0
);
6652 STRIP_SIGN_NOPS (arg1
);
6654 if (TREE_CONSTANT (arg1
))
6656 if (TREE_CONSTANT (arg0
))
6659 if (reorder
&& flag_evaluation_order
6660 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6663 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6664 for commutative and comparison operators. Ensuring a canonical
6665 form allows the optimizers to find additional redundancies without
6666 having to explicitly check for both orderings. */
6667 if (TREE_CODE (arg0
) == SSA_NAME
6668 && TREE_CODE (arg1
) == SSA_NAME
6669 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6672 /* Put SSA_NAMEs last. */
6673 if (TREE_CODE (arg1
) == SSA_NAME
)
6675 if (TREE_CODE (arg0
) == SSA_NAME
)
6678 /* Put variables last. */
6687 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6688 ARG0 is extended to a wider type. */
6691 fold_widened_comparison (location_t loc
, enum tree_code code
,
6692 tree type
, tree arg0
, tree arg1
)
6694 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6696 tree shorter_type
, outer_type
;
6700 if (arg0_unw
== arg0
)
6702 shorter_type
= TREE_TYPE (arg0_unw
);
6704 #ifdef HAVE_canonicalize_funcptr_for_compare
6705 /* Disable this optimization if we're casting a function pointer
6706 type on targets that require function pointer canonicalization. */
6707 if (HAVE_canonicalize_funcptr_for_compare
6708 && TREE_CODE (shorter_type
) == POINTER_TYPE
6709 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6713 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6716 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6718 /* If possible, express the comparison in the shorter mode. */
6719 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6720 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6721 && (TREE_TYPE (arg1_unw
) == shorter_type
6722 || ((TYPE_PRECISION (shorter_type
)
6723 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6724 && (TYPE_UNSIGNED (shorter_type
)
6725 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6726 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6727 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6728 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6729 && int_fits_type_p (arg1_unw
, shorter_type
))))
6730 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6731 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6733 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6734 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6735 || !int_fits_type_p (arg1_unw
, shorter_type
))
6738 /* If we are comparing with the integer that does not fit into the range
6739 of the shorter type, the result is known. */
6740 outer_type
= TREE_TYPE (arg1_unw
);
6741 min
= lower_bound_in_type (outer_type
, shorter_type
);
6742 max
= upper_bound_in_type (outer_type
, shorter_type
);
6744 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6746 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6753 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6758 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6764 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6766 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6771 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6773 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6782 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6783 ARG0 just the signedness is changed. */
6786 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6787 tree arg0
, tree arg1
)
6790 tree inner_type
, outer_type
;
6792 if (!CONVERT_EXPR_P (arg0
))
6795 outer_type
= TREE_TYPE (arg0
);
6796 arg0_inner
= TREE_OPERAND (arg0
, 0);
6797 inner_type
= TREE_TYPE (arg0_inner
);
6799 #ifdef HAVE_canonicalize_funcptr_for_compare
6800 /* Disable this optimization if we're casting a function pointer
6801 type on targets that require function pointer canonicalization. */
6802 if (HAVE_canonicalize_funcptr_for_compare
6803 && TREE_CODE (inner_type
) == POINTER_TYPE
6804 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6808 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6811 if (TREE_CODE (arg1
) != INTEGER_CST
6812 && !(CONVERT_EXPR_P (arg1
)
6813 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6816 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6821 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6824 if (TREE_CODE (arg1
) == INTEGER_CST
)
6825 arg1
= force_fit_type (inner_type
, wi::to_widest (arg1
), 0,
6826 TREE_OVERFLOW (arg1
));
6828 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6830 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6834 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6835 means A >= Y && A != MAX, but in this case we know that
6836 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6839 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6841 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6843 if (TREE_CODE (bound
) == LT_EXPR
)
6844 a
= TREE_OPERAND (bound
, 0);
6845 else if (TREE_CODE (bound
) == GT_EXPR
)
6846 a
= TREE_OPERAND (bound
, 1);
6850 typea
= TREE_TYPE (a
);
6851 if (!INTEGRAL_TYPE_P (typea
)
6852 && !POINTER_TYPE_P (typea
))
6855 if (TREE_CODE (ineq
) == LT_EXPR
)
6857 a1
= TREE_OPERAND (ineq
, 1);
6858 y
= TREE_OPERAND (ineq
, 0);
6860 else if (TREE_CODE (ineq
) == GT_EXPR
)
6862 a1
= TREE_OPERAND (ineq
, 0);
6863 y
= TREE_OPERAND (ineq
, 1);
6868 if (TREE_TYPE (a1
) != typea
)
6871 if (POINTER_TYPE_P (typea
))
6873 /* Convert the pointer types into integer before taking the difference. */
6874 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
6875 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
6876 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
6879 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
6881 if (!diff
|| !integer_onep (diff
))
6884 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
6887 /* Fold a sum or difference of at least one multiplication.
6888 Returns the folded tree or NULL if no simplification could be made. */
6891 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
6892 tree arg0
, tree arg1
)
6894 tree arg00
, arg01
, arg10
, arg11
;
6895 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
6897 /* (A * C) +- (B * C) -> (A+-B) * C.
6898 (A * C) +- A -> A * (C+-1).
6899 We are most concerned about the case where C is a constant,
6900 but other combinations show up during loop reduction. Since
6901 it is not difficult, try all four possibilities. */
6903 if (TREE_CODE (arg0
) == MULT_EXPR
)
6905 arg00
= TREE_OPERAND (arg0
, 0);
6906 arg01
= TREE_OPERAND (arg0
, 1);
6908 else if (TREE_CODE (arg0
) == INTEGER_CST
)
6910 arg00
= build_one_cst (type
);
6915 /* We cannot generate constant 1 for fract. */
6916 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6919 arg01
= build_one_cst (type
);
6921 if (TREE_CODE (arg1
) == MULT_EXPR
)
6923 arg10
= TREE_OPERAND (arg1
, 0);
6924 arg11
= TREE_OPERAND (arg1
, 1);
6926 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6928 arg10
= build_one_cst (type
);
6929 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6930 the purpose of this canonicalization. */
6931 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
6932 && negate_expr_p (arg1
)
6933 && code
== PLUS_EXPR
)
6935 arg11
= negate_expr (arg1
);
6943 /* We cannot generate constant 1 for fract. */
6944 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
6947 arg11
= build_one_cst (type
);
6951 if (operand_equal_p (arg01
, arg11
, 0))
6952 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
6953 else if (operand_equal_p (arg00
, arg10
, 0))
6954 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
6955 else if (operand_equal_p (arg00
, arg11
, 0))
6956 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
6957 else if (operand_equal_p (arg01
, arg10
, 0))
6958 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
6960 /* No identical multiplicands; see if we can find a common
6961 power-of-two factor in non-power-of-two multiplies. This
6962 can help in multi-dimensional array access. */
6963 else if (tree_fits_shwi_p (arg01
)
6964 && tree_fits_shwi_p (arg11
))
6966 HOST_WIDE_INT int01
, int11
, tmp
;
6969 int01
= tree_to_shwi (arg01
);
6970 int11
= tree_to_shwi (arg11
);
6972 /* Move min of absolute values to int11. */
6973 if (absu_hwi (int01
) < absu_hwi (int11
))
6975 tmp
= int01
, int01
= int11
, int11
= tmp
;
6976 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
6983 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
6984 /* The remainder should not be a constant, otherwise we
6985 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6986 increased the number of multiplications necessary. */
6987 && TREE_CODE (arg10
) != INTEGER_CST
)
6989 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
6990 build_int_cst (TREE_TYPE (arg00
),
6995 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7000 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7001 fold_build2_loc (loc
, code
, type
,
7002 fold_convert_loc (loc
, type
, alt0
),
7003 fold_convert_loc (loc
, type
, alt1
)),
7004 fold_convert_loc (loc
, type
, same
));
7009 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7010 specified by EXPR into the buffer PTR of length LEN bytes.
7011 Return the number of bytes placed in the buffer, or zero
7015 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7017 tree type
= TREE_TYPE (expr
);
7018 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7019 int byte
, offset
, word
, words
;
7020 unsigned char value
;
7022 if ((off
== -1 && total_bytes
> len
)
7023 || off
>= total_bytes
)
7027 words
= total_bytes
/ UNITS_PER_WORD
;
7029 for (byte
= 0; byte
< total_bytes
; byte
++)
7031 int bitpos
= byte
* BITS_PER_UNIT
;
7032 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7034 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7036 if (total_bytes
> UNITS_PER_WORD
)
7038 word
= byte
/ UNITS_PER_WORD
;
7039 if (WORDS_BIG_ENDIAN
)
7040 word
= (words
- 1) - word
;
7041 offset
= word
* UNITS_PER_WORD
;
7042 if (BYTES_BIG_ENDIAN
)
7043 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7045 offset
+= byte
% UNITS_PER_WORD
;
7048 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7050 && offset
- off
< len
)
7051 ptr
[offset
- off
] = value
;
7053 return MIN (len
, total_bytes
- off
);
7057 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7058 specified by EXPR into the buffer PTR of length LEN bytes.
7059 Return the number of bytes placed in the buffer, or zero
7063 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7065 tree type
= TREE_TYPE (expr
);
7066 enum machine_mode mode
= TYPE_MODE (type
);
7067 int total_bytes
= GET_MODE_SIZE (mode
);
7068 FIXED_VALUE_TYPE value
;
7069 tree i_value
, i_type
;
7071 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7074 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7076 if (NULL_TREE
== i_type
7077 || TYPE_PRECISION (i_type
) != total_bytes
)
7080 value
= TREE_FIXED_CST (expr
);
7081 i_value
= double_int_to_tree (i_type
, value
.data
);
7083 return native_encode_int (i_value
, ptr
, len
, off
);
7087 /* Subroutine of native_encode_expr. Encode the REAL_CST
7088 specified by EXPR into the buffer PTR of length LEN bytes.
7089 Return the number of bytes placed in the buffer, or zero
7093 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7095 tree type
= TREE_TYPE (expr
);
7096 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7097 int byte
, offset
, word
, words
, bitpos
;
7098 unsigned char value
;
7100 /* There are always 32 bits in each long, no matter the size of
7101 the hosts long. We handle floating point representations with
7105 if ((off
== -1 && total_bytes
> len
)
7106 || off
>= total_bytes
)
7110 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7112 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7114 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7115 bitpos
+= BITS_PER_UNIT
)
7117 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7118 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7120 if (UNITS_PER_WORD
< 4)
7122 word
= byte
/ UNITS_PER_WORD
;
7123 if (WORDS_BIG_ENDIAN
)
7124 word
= (words
- 1) - word
;
7125 offset
= word
* UNITS_PER_WORD
;
7126 if (BYTES_BIG_ENDIAN
)
7127 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7129 offset
+= byte
% UNITS_PER_WORD
;
7132 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7133 offset
= offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3);
7135 && offset
- off
< len
)
7136 ptr
[offset
- off
] = value
;
7138 return MIN (len
, total_bytes
- off
);
7141 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7142 specified by EXPR into the buffer PTR of length LEN bytes.
7143 Return the number of bytes placed in the buffer, or zero
7147 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7152 part
= TREE_REALPART (expr
);
7153 rsize
= native_encode_expr (part
, ptr
, len
, off
);
7157 part
= TREE_IMAGPART (expr
);
7159 off
= MAX (0, off
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part
))));
7160 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
, off
);
7164 return rsize
+ isize
;
7168 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7169 specified by EXPR into the buffer PTR of length LEN bytes.
7170 Return the number of bytes placed in the buffer, or zero
7174 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7181 count
= VECTOR_CST_NELTS (expr
);
7182 itype
= TREE_TYPE (TREE_TYPE (expr
));
7183 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7184 for (i
= 0; i
< count
; i
++)
7191 elem
= VECTOR_CST_ELT (expr
, i
);
7192 int res
= native_encode_expr (elem
, ptr
+offset
, len
-offset
, off
);
7193 if ((off
== -1 && res
!= size
)
7206 /* Subroutine of native_encode_expr. Encode the STRING_CST
7207 specified by EXPR into the buffer PTR of length LEN bytes.
7208 Return the number of bytes placed in the buffer, or zero
7212 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7214 tree type
= TREE_TYPE (expr
);
7215 HOST_WIDE_INT total_bytes
;
7217 if (TREE_CODE (type
) != ARRAY_TYPE
7218 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7219 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7220 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7222 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7223 if ((off
== -1 && total_bytes
> len
)
7224 || off
>= total_bytes
)
7228 if (TREE_STRING_LENGTH (expr
) - off
< MIN (total_bytes
, len
))
7231 if (off
< TREE_STRING_LENGTH (expr
))
7233 written
= MIN (len
, TREE_STRING_LENGTH (expr
) - off
);
7234 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, written
);
7236 memset (ptr
+ written
, 0,
7237 MIN (total_bytes
- written
, len
- written
));
7240 memcpy (ptr
, TREE_STRING_POINTER (expr
) + off
, MIN (total_bytes
, len
));
7241 return MIN (total_bytes
- off
, len
);
7245 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7246 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7247 buffer PTR of length LEN bytes. If OFF is not -1 then start
7248 the encoding at byte offset OFF and encode at most LEN bytes.
7249 Return the number of bytes placed in the buffer, or zero upon failure. */
7252 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
, int off
)
7254 switch (TREE_CODE (expr
))
7257 return native_encode_int (expr
, ptr
, len
, off
);
7260 return native_encode_real (expr
, ptr
, len
, off
);
7263 return native_encode_fixed (expr
, ptr
, len
, off
);
7266 return native_encode_complex (expr
, ptr
, len
, off
);
7269 return native_encode_vector (expr
, ptr
, len
, off
);
7272 return native_encode_string (expr
, ptr
, len
, off
);
7280 /* Subroutine of native_interpret_expr. Interpret the contents of
7281 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7282 If the buffer cannot be interpreted, return NULL_TREE. */
7285 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7287 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7289 if (total_bytes
> len
7290 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7293 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7295 return wide_int_to_tree (type
, result
);
7299 /* Subroutine of native_interpret_expr. Interpret the contents of
7300 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7301 If the buffer cannot be interpreted, return NULL_TREE. */
7304 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7306 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7308 FIXED_VALUE_TYPE fixed_value
;
7310 if (total_bytes
> len
7311 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7314 result
= double_int::from_buffer (ptr
, total_bytes
);
7315 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7317 return build_fixed (type
, fixed_value
);
7321 /* Subroutine of native_interpret_expr. Interpret the contents of
7322 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7323 If the buffer cannot be interpreted, return NULL_TREE. */
7326 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7328 enum machine_mode mode
= TYPE_MODE (type
);
7329 int total_bytes
= GET_MODE_SIZE (mode
);
7330 int byte
, offset
, word
, words
, bitpos
;
7331 unsigned char value
;
7332 /* There are always 32 bits in each long, no matter the size of
7333 the hosts long. We handle floating point representations with
7338 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7339 if (total_bytes
> len
|| total_bytes
> 24)
7341 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7343 memset (tmp
, 0, sizeof (tmp
));
7344 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7345 bitpos
+= BITS_PER_UNIT
)
7347 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7348 if (UNITS_PER_WORD
< 4)
7350 word
= byte
/ UNITS_PER_WORD
;
7351 if (WORDS_BIG_ENDIAN
)
7352 word
= (words
- 1) - word
;
7353 offset
= word
* UNITS_PER_WORD
;
7354 if (BYTES_BIG_ENDIAN
)
7355 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7357 offset
+= byte
% UNITS_PER_WORD
;
7360 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7361 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7363 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7366 real_from_target (&r
, tmp
, mode
);
7367 return build_real (type
, r
);
7371 /* Subroutine of native_interpret_expr. Interpret the contents of
7372 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7373 If the buffer cannot be interpreted, return NULL_TREE. */
7376 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7378 tree etype
, rpart
, ipart
;
7381 etype
= TREE_TYPE (type
);
7382 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7385 rpart
= native_interpret_expr (etype
, ptr
, size
);
7388 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7391 return build_complex (type
, rpart
, ipart
);
7395 /* Subroutine of native_interpret_expr. Interpret the contents of
7396 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7397 If the buffer cannot be interpreted, return NULL_TREE. */
7400 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7406 etype
= TREE_TYPE (type
);
7407 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7408 count
= TYPE_VECTOR_SUBPARTS (type
);
7409 if (size
* count
> len
)
7412 elements
= XALLOCAVEC (tree
, count
);
7413 for (i
= count
- 1; i
>= 0; i
--)
7415 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7420 return build_vector (type
, elements
);
7424 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7425 the buffer PTR of length LEN as a constant of type TYPE. For
7426 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7427 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7428 return NULL_TREE. */
7431 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7433 switch (TREE_CODE (type
))
7439 case REFERENCE_TYPE
:
7440 return native_interpret_int (type
, ptr
, len
);
7443 return native_interpret_real (type
, ptr
, len
);
7445 case FIXED_POINT_TYPE
:
7446 return native_interpret_fixed (type
, ptr
, len
);
7449 return native_interpret_complex (type
, ptr
, len
);
7452 return native_interpret_vector (type
, ptr
, len
);
7459 /* Returns true if we can interpret the contents of a native encoding
7463 can_native_interpret_type_p (tree type
)
7465 switch (TREE_CODE (type
))
7471 case REFERENCE_TYPE
:
7472 case FIXED_POINT_TYPE
:
7482 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7483 TYPE at compile-time. If we're unable to perform the conversion
7484 return NULL_TREE. */
7487 fold_view_convert_expr (tree type
, tree expr
)
7489 /* We support up to 512-bit values (for V8DFmode). */
7490 unsigned char buffer
[64];
7493 /* Check that the host and target are sane. */
7494 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7497 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7501 return native_interpret_expr (type
, buffer
, len
);
7504 /* Build an expression for the address of T. Folds away INDIRECT_REF
7505 to avoid confusing the gimplify process. */
7508 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7510 /* The size of the object is not relevant when talking about its address. */
7511 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7512 t
= TREE_OPERAND (t
, 0);
7514 if (TREE_CODE (t
) == INDIRECT_REF
)
7516 t
= TREE_OPERAND (t
, 0);
7518 if (TREE_TYPE (t
) != ptrtype
)
7519 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7521 else if (TREE_CODE (t
) == MEM_REF
7522 && integer_zerop (TREE_OPERAND (t
, 1)))
7523 return TREE_OPERAND (t
, 0);
7524 else if (TREE_CODE (t
) == MEM_REF
7525 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7526 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7527 TREE_OPERAND (t
, 0),
7528 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7529 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7531 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7533 if (TREE_TYPE (t
) != ptrtype
)
7534 t
= fold_convert_loc (loc
, ptrtype
, t
);
7537 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7542 /* Build an expression for the address of T. */
7545 build_fold_addr_expr_loc (location_t loc
, tree t
)
7547 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7549 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7552 static bool vec_cst_ctor_to_array (tree
, tree
*);
7554 /* Fold a unary expression of code CODE and type TYPE with operand
7555 OP0. Return the folded expression if folding is successful.
7556 Otherwise, return NULL_TREE. */
7559 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7563 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7565 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7566 && TREE_CODE_LENGTH (code
) == 1);
7568 tem
= generic_simplify (loc
, code
, type
, op0
);
7575 if (CONVERT_EXPR_CODE_P (code
)
7576 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7578 /* Don't use STRIP_NOPS, because signedness of argument type
7580 STRIP_SIGN_NOPS (arg0
);
7584 /* Strip any conversions that don't change the mode. This
7585 is safe for every expression, except for a comparison
7586 expression because its signedness is derived from its
7589 Note that this is done as an internal manipulation within
7590 the constant folder, in order to find the simplest
7591 representation of the arguments so that their form can be
7592 studied. In any cases, the appropriate type conversions
7593 should be put back in the tree that will get out of the
7599 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7601 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7602 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7603 fold_build1_loc (loc
, code
, type
,
7604 fold_convert_loc (loc
, TREE_TYPE (op0
),
7605 TREE_OPERAND (arg0
, 1))));
7606 else if (TREE_CODE (arg0
) == COND_EXPR
)
7608 tree arg01
= TREE_OPERAND (arg0
, 1);
7609 tree arg02
= TREE_OPERAND (arg0
, 2);
7610 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7611 arg01
= fold_build1_loc (loc
, code
, type
,
7612 fold_convert_loc (loc
,
7613 TREE_TYPE (op0
), arg01
));
7614 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7615 arg02
= fold_build1_loc (loc
, code
, type
,
7616 fold_convert_loc (loc
,
7617 TREE_TYPE (op0
), arg02
));
7618 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7621 /* If this was a conversion, and all we did was to move into
7622 inside the COND_EXPR, bring it back out. But leave it if
7623 it is a conversion from integer to integer and the
7624 result precision is no wider than a word since such a
7625 conversion is cheap and may be optimized away by combine,
7626 while it couldn't if it were outside the COND_EXPR. Then return
7627 so we don't get into an infinite recursion loop taking the
7628 conversion out and then back in. */
7630 if ((CONVERT_EXPR_CODE_P (code
)
7631 || code
== NON_LVALUE_EXPR
)
7632 && TREE_CODE (tem
) == COND_EXPR
7633 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7634 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7635 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7636 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7637 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7638 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7639 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7641 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7642 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7643 || flag_syntax_only
))
7644 tem
= build1_loc (loc
, code
, type
,
7646 TREE_TYPE (TREE_OPERAND
7647 (TREE_OPERAND (tem
, 1), 0)),
7648 TREE_OPERAND (tem
, 0),
7649 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7650 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7659 /* Re-association barriers around constants and other re-association
7660 barriers can be removed. */
7661 if (CONSTANT_CLASS_P (op0
)
7662 || TREE_CODE (op0
) == PAREN_EXPR
)
7663 return fold_convert_loc (loc
, type
, op0
);
7666 case NON_LVALUE_EXPR
:
7667 if (!maybe_lvalue_p (op0
))
7668 return fold_convert_loc (loc
, type
, op0
);
7673 case FIX_TRUNC_EXPR
:
7674 if (TREE_TYPE (op0
) == type
)
7677 if (COMPARISON_CLASS_P (op0
))
7679 /* If we have (type) (a CMP b) and type is an integral type, return
7680 new expression involving the new type. Canonicalize
7681 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7683 Do not fold the result as that would not simplify further, also
7684 folding again results in recursions. */
7685 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7686 return build2_loc (loc
, TREE_CODE (op0
), type
,
7687 TREE_OPERAND (op0
, 0),
7688 TREE_OPERAND (op0
, 1));
7689 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7690 && TREE_CODE (type
) != VECTOR_TYPE
)
7691 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7692 constant_boolean_node (true, type
),
7693 constant_boolean_node (false, type
));
7696 /* Handle cases of two conversions in a row. */
7697 if (CONVERT_EXPR_P (op0
))
7699 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7700 tree inter_type
= TREE_TYPE (op0
);
7701 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7702 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7703 int inside_float
= FLOAT_TYPE_P (inside_type
);
7704 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7705 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7706 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7707 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7708 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7709 int inter_float
= FLOAT_TYPE_P (inter_type
);
7710 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7711 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7712 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7713 int final_int
= INTEGRAL_TYPE_P (type
);
7714 int final_ptr
= POINTER_TYPE_P (type
);
7715 int final_float
= FLOAT_TYPE_P (type
);
7716 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7717 unsigned int final_prec
= TYPE_PRECISION (type
);
7718 int final_unsignedp
= TYPE_UNSIGNED (type
);
7720 /* check for cases specific to UPC, involving pointer types */
7721 if (final_ptr
|| inter_ptr
|| inside_ptr
)
7723 int final_pts
= final_ptr
7724 && upc_shared_type_p (TREE_TYPE (type
));
7725 int inter_pts
= inter_ptr
7726 && upc_shared_type_p (TREE_TYPE (inter_type
));
7727 int inside_pts
= inside_ptr
7728 && upc_shared_type_p (TREE_TYPE (inside_type
));
7729 if (final_pts
|| inter_pts
|| inside_pts
)
7731 if (!((final_pts
&& inter_pts
)
7732 && TREE_TYPE (type
) == TREE_TYPE (inter_type
))
7733 || ((inter_pts
&& inside_pts
)
7734 && (TREE_TYPE (inter_type
)
7735 == TREE_TYPE (inside_type
))))
7740 /* In addition to the cases of two conversions in a row
7741 handled below, if we are converting something to its own
7742 type via an object of identical or wider precision, neither
7743 conversion is needed. */
7744 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7745 && (((inter_int
|| inter_ptr
) && final_int
)
7746 || (inter_float
&& final_float
))
7747 && inter_prec
>= final_prec
)
7748 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7750 /* Likewise, if the intermediate and initial types are either both
7751 float or both integer, we don't need the middle conversion if the
7752 former is wider than the latter and doesn't change the signedness
7753 (for integers). Avoid this if the final type is a pointer since
7754 then we sometimes need the middle conversion. Likewise if the
7755 final type has a precision not equal to the size of its mode. */
7756 if (((inter_int
&& inside_int
)
7757 || (inter_float
&& inside_float
)
7758 || (inter_vec
&& inside_vec
))
7759 && inter_prec
>= inside_prec
7760 && (inter_float
|| inter_vec
7761 || inter_unsignedp
== inside_unsignedp
)
7762 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7763 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7765 && (! final_vec
|| inter_prec
== inside_prec
))
7766 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7768 /* If we have a sign-extension of a zero-extended value, we can
7769 replace that by a single zero-extension. Likewise if the
7770 final conversion does not change precision we can drop the
7771 intermediate conversion. */
7772 if (inside_int
&& inter_int
&& final_int
7773 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
7774 && inside_unsignedp
&& !inter_unsignedp
)
7775 || final_prec
== inter_prec
))
7776 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7778 /* Two conversions in a row are not needed unless:
7779 - some conversion is floating-point (overstrict for now), or
7780 - some conversion is a vector (overstrict for now), or
7781 - the intermediate type is narrower than both initial and
7783 - the intermediate type and innermost type differ in signedness,
7784 and the outermost type is wider than the intermediate, or
7785 - the initial type is a pointer type and the precisions of the
7786 intermediate and final types differ, or
7787 - the final type is a pointer type and the precisions of the
7788 initial and intermediate types differ. */
7789 if (! inside_float
&& ! inter_float
&& ! final_float
7790 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7791 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7792 && ! (inside_int
&& inter_int
7793 && inter_unsignedp
!= inside_unsignedp
7794 && inter_prec
< final_prec
)
7795 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7796 == (final_unsignedp
&& final_prec
> inter_prec
))
7797 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7798 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7799 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7800 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7801 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7804 /* Handle (T *)&A.B.C for A being of type T and B and C
7805 living at offset zero. This occurs frequently in
7806 C++ upcasting and then accessing the base. */
7807 if (TREE_CODE (op0
) == ADDR_EXPR
7808 && POINTER_TYPE_P (type
)
7809 && handled_component_p (TREE_OPERAND (op0
, 0)))
7811 HOST_WIDE_INT bitsize
, bitpos
;
7813 enum machine_mode mode
;
7814 int unsignedp
, volatilep
;
7815 tree base
= TREE_OPERAND (op0
, 0);
7816 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7817 &mode
, &unsignedp
, &volatilep
, false);
7818 /* If the reference was to a (constant) zero offset, we can use
7819 the address of the base if it has the same base type
7820 as the result type and the pointer type is unqualified. */
7821 if (! offset
&& bitpos
== 0
7822 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7823 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7824 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7825 return fold_convert_loc (loc
, type
,
7826 build_fold_addr_expr_loc (loc
, base
));
7829 if (TREE_CODE (op0
) == MODIFY_EXPR
7830 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7831 /* Detect assigning a bitfield. */
7832 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7834 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7836 /* Don't leave an assignment inside a conversion
7837 unless assigning a bitfield. */
7838 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7839 /* First do the assignment, then return converted constant. */
7840 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7841 TREE_NO_WARNING (tem
) = 1;
7842 TREE_USED (tem
) = 1;
7846 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7847 constants (if x has signed type, the sign bit cannot be set
7848 in c). This folds extension into the BIT_AND_EXPR.
7849 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7850 very likely don't have maximal range for their precision and this
7851 transformation effectively doesn't preserve non-maximal ranges. */
7852 if (TREE_CODE (type
) == INTEGER_TYPE
7853 && TREE_CODE (op0
) == BIT_AND_EXPR
7854 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7856 tree and_expr
= op0
;
7857 tree and0
= TREE_OPERAND (and_expr
, 0);
7858 tree and1
= TREE_OPERAND (and_expr
, 1);
7861 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7862 || (TYPE_PRECISION (type
)
7863 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7865 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7866 <= HOST_BITS_PER_WIDE_INT
7867 && tree_fits_uhwi_p (and1
))
7869 unsigned HOST_WIDE_INT cst
;
7871 cst
= tree_to_uhwi (and1
);
7872 cst
&= HOST_WIDE_INT_M1U
7873 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7874 change
= (cst
== 0);
7875 #ifdef LOAD_EXTEND_OP
7877 && !flag_syntax_only
7878 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7881 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7882 and0
= fold_convert_loc (loc
, uns
, and0
);
7883 and1
= fold_convert_loc (loc
, uns
, and1
);
7889 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
7890 TREE_OVERFLOW (and1
));
7891 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7892 fold_convert_loc (loc
, type
, and0
), tem
);
7896 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7897 when one of the new casts will fold away. Conservatively we assume
7898 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7899 if (POINTER_TYPE_P (type
)
7900 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
7901 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7902 && !upc_shared_type_p (TREE_TYPE (type
))
7903 && !upc_shared_type_p (TREE_TYPE (
7904 TREE_TYPE (TREE_OPERAND (arg0
, 0))))
7905 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7906 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7907 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7909 tree arg00
= TREE_OPERAND (arg0
, 0);
7910 tree arg01
= TREE_OPERAND (arg0
, 1);
7912 return fold_build_pointer_plus_loc
7913 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7916 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7917 of the same precision, and X is an integer type not narrower than
7918 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7919 if (INTEGRAL_TYPE_P (type
)
7920 && TREE_CODE (op0
) == BIT_NOT_EXPR
7921 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7922 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7923 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7925 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7926 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7927 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7928 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7929 fold_convert_loc (loc
, type
, tem
));
7932 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7933 type of X and Y (integer types only). */
7934 if (INTEGRAL_TYPE_P (type
)
7935 && TREE_CODE (op0
) == MULT_EXPR
7936 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7937 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7939 /* Be careful not to introduce new overflows. */
7941 if (TYPE_OVERFLOW_WRAPS (type
))
7944 mult_type
= unsigned_type_for (type
);
7946 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7948 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
7949 fold_convert_loc (loc
, mult_type
,
7950 TREE_OPERAND (op0
, 0)),
7951 fold_convert_loc (loc
, mult_type
,
7952 TREE_OPERAND (op0
, 1)));
7953 return fold_convert_loc (loc
, type
, tem
);
7957 tem
= fold_convert_const (code
, type
, arg0
);
7958 return tem
? tem
: NULL_TREE
;
7960 case ADDR_SPACE_CONVERT_EXPR
:
7961 if (integer_zerop (arg0
))
7962 return fold_convert_const (code
, type
, arg0
);
7965 case FIXED_CONVERT_EXPR
:
7966 tem
= fold_convert_const (code
, type
, arg0
);
7967 return tem
? tem
: NULL_TREE
;
7969 case VIEW_CONVERT_EXPR
:
7970 if (TREE_TYPE (op0
) == type
)
7972 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
7973 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
7974 type
, TREE_OPERAND (op0
, 0));
7975 if (TREE_CODE (op0
) == MEM_REF
)
7976 return fold_build2_loc (loc
, MEM_REF
, type
,
7977 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
7979 /* For integral conversions with the same precision or pointer
7980 conversions use a NOP_EXPR instead. */
7981 if ((INTEGRAL_TYPE_P (type
)
7982 || (POINTER_TYPE_P (type
)
7983 && !upc_shared_type_p (TREE_TYPE (type
))))
7984 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7985 || (POINTER_TYPE_P (TREE_TYPE (op0
))
7986 && !upc_shared_type_p (TREE_TYPE (TREE_TYPE (op0
)))))
7987 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7988 return fold_convert_loc (loc
, type
, op0
);
7990 /* Strip inner integral conversions that do not change the precision. */
7991 if (CONVERT_EXPR_P (op0
)
7992 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7993 || (POINTER_TYPE_P (TREE_TYPE (op0
))
7994 && !upc_shared_type_p (TREE_TYPE (TREE_TYPE (op0
)))))
7995 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
7996 || (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
7997 && !upc_shared_type_p (TREE_TYPE (
7999 TREE_OPERAND (op0
, 0))))))
8000 && (TYPE_PRECISION (TREE_TYPE (op0
))
8001 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8002 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8003 type
, TREE_OPERAND (op0
, 0));
8005 return fold_view_convert_expr (type
, op0
);
8008 tem
= fold_negate_expr (loc
, arg0
);
8010 return fold_convert_loc (loc
, type
, tem
);
8014 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8015 return fold_abs_const (arg0
, type
);
8016 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8017 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8018 /* Convert fabs((double)float) into (double)fabsf(float). */
8019 else if (TREE_CODE (arg0
) == NOP_EXPR
8020 && TREE_CODE (type
) == REAL_TYPE
)
8022 tree targ0
= strip_float_extensions (arg0
);
8024 return fold_convert_loc (loc
, type
,
8025 fold_build1_loc (loc
, ABS_EXPR
,
8029 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8030 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8032 else if (tree_expr_nonnegative_p (arg0
))
8035 /* Strip sign ops from argument. */
8036 if (TREE_CODE (type
) == REAL_TYPE
)
8038 tem
= fold_strip_sign_ops (arg0
);
8040 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8041 fold_convert_loc (loc
, type
, tem
));
8046 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8047 return fold_convert_loc (loc
, type
, arg0
);
8048 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8050 tree itype
= TREE_TYPE (type
);
8051 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8052 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8053 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8054 negate_expr (ipart
));
8056 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8058 tree itype
= TREE_TYPE (type
);
8059 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8060 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8061 return build_complex (type
, rpart
, negate_expr (ipart
));
8063 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8064 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8068 if (TREE_CODE (arg0
) == INTEGER_CST
)
8069 return fold_not_const (arg0
, type
);
8070 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8071 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8072 /* Convert ~ (-A) to A - 1. */
8073 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8074 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8075 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8076 build_int_cst (type
, 1));
8077 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8078 else if (INTEGRAL_TYPE_P (type
)
8079 && ((TREE_CODE (arg0
) == MINUS_EXPR
8080 && integer_onep (TREE_OPERAND (arg0
, 1)))
8081 || (TREE_CODE (arg0
) == PLUS_EXPR
8082 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8083 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8084 fold_convert_loc (loc
, type
,
8085 TREE_OPERAND (arg0
, 0)));
8086 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8087 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8088 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8089 fold_convert_loc (loc
, type
,
8090 TREE_OPERAND (arg0
, 0)))))
8091 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8092 fold_convert_loc (loc
, type
,
8093 TREE_OPERAND (arg0
, 1)));
8094 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8095 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8096 fold_convert_loc (loc
, type
,
8097 TREE_OPERAND (arg0
, 1)))))
8098 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8099 fold_convert_loc (loc
, type
,
8100 TREE_OPERAND (arg0
, 0)), tem
);
8101 /* Perform BIT_NOT_EXPR on each element individually. */
8102 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8106 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
8108 elements
= XALLOCAVEC (tree
, count
);
8109 for (i
= 0; i
< count
; i
++)
8111 elem
= VECTOR_CST_ELT (arg0
, i
);
8112 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8113 if (elem
== NULL_TREE
)
8118 return build_vector (type
, elements
);
8120 else if (COMPARISON_CLASS_P (arg0
)
8121 && (VECTOR_TYPE_P (type
)
8122 || (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) == 1)))
8124 tree op_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
8125 enum tree_code subcode
= invert_tree_comparison (TREE_CODE (arg0
),
8126 HONOR_NANS (TYPE_MODE (op_type
)));
8127 if (subcode
!= ERROR_MARK
)
8128 return build2_loc (loc
, subcode
, type
, TREE_OPERAND (arg0
, 0),
8129 TREE_OPERAND (arg0
, 1));
8135 case TRUTH_NOT_EXPR
:
8136 /* Note that the operand of this must be an int
8137 and its values must be 0 or 1.
8138 ("true" is a fixed value perhaps depending on the language,
8139 but we don't handle values other than 1 correctly yet.) */
8140 tem
= fold_truth_not_expr (loc
, arg0
);
8143 return fold_convert_loc (loc
, type
, tem
);
8146 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8147 return fold_convert_loc (loc
, type
, arg0
);
8148 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8149 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8150 TREE_OPERAND (arg0
, 1));
8151 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8152 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8153 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8155 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8156 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8157 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8158 TREE_OPERAND (arg0
, 0)),
8159 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8160 TREE_OPERAND (arg0
, 1)));
8161 return fold_convert_loc (loc
, type
, tem
);
8163 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8165 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8166 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8167 TREE_OPERAND (arg0
, 0));
8168 return fold_convert_loc (loc
, type
, tem
);
8170 if (TREE_CODE (arg0
) == CALL_EXPR
)
8172 tree fn
= get_callee_fndecl (arg0
);
8173 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8174 switch (DECL_FUNCTION_CODE (fn
))
8176 CASE_FLT_FN (BUILT_IN_CEXPI
):
8177 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8179 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8189 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8190 return build_zero_cst (type
);
8191 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8192 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8193 TREE_OPERAND (arg0
, 0));
8194 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8195 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8196 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8198 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8199 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8200 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8201 TREE_OPERAND (arg0
, 0)),
8202 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8203 TREE_OPERAND (arg0
, 1)));
8204 return fold_convert_loc (loc
, type
, tem
);
8206 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8208 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8209 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8210 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8212 if (TREE_CODE (arg0
) == CALL_EXPR
)
8214 tree fn
= get_callee_fndecl (arg0
);
8215 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8216 switch (DECL_FUNCTION_CODE (fn
))
8218 CASE_FLT_FN (BUILT_IN_CEXPI
):
8219 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8221 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8231 /* Fold *&X to X if X is an lvalue. */
8232 if (TREE_CODE (op0
) == ADDR_EXPR
)
8234 tree op00
= TREE_OPERAND (op0
, 0);
8235 if ((TREE_CODE (op00
) == VAR_DECL
8236 || TREE_CODE (op00
) == PARM_DECL
8237 || TREE_CODE (op00
) == RESULT_DECL
)
8238 && !TREE_READONLY (op00
))
8243 case VEC_UNPACK_LO_EXPR
:
8244 case VEC_UNPACK_HI_EXPR
:
8245 case VEC_UNPACK_FLOAT_LO_EXPR
:
8246 case VEC_UNPACK_FLOAT_HI_EXPR
:
8248 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8250 enum tree_code subcode
;
8252 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8253 if (TREE_CODE (arg0
) != VECTOR_CST
)
8256 elts
= XALLOCAVEC (tree
, nelts
* 2);
8257 if (!vec_cst_ctor_to_array (arg0
, elts
))
8260 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8261 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8264 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8267 subcode
= FLOAT_EXPR
;
8269 for (i
= 0; i
< nelts
; i
++)
8271 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8272 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8276 return build_vector (type
, elts
);
8279 case REDUC_MIN_EXPR
:
8280 case REDUC_MAX_EXPR
:
8281 case REDUC_PLUS_EXPR
:
8283 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8285 enum tree_code subcode
;
8287 if (TREE_CODE (op0
) != VECTOR_CST
)
8290 elts
= XALLOCAVEC (tree
, nelts
);
8291 if (!vec_cst_ctor_to_array (op0
, elts
))
8296 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
8297 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
8298 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
8299 default: gcc_unreachable ();
8302 for (i
= 1; i
< nelts
; i
++)
8304 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
8305 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
8307 elts
[i
] = build_zero_cst (TREE_TYPE (type
));
8310 return build_vector (type
, elts
);
8315 } /* switch (code) */
8319 /* If the operation was a conversion do _not_ mark a resulting constant
8320 with TREE_OVERFLOW if the original constant was not. These conversions
8321 have implementation defined behavior and retaining the TREE_OVERFLOW
8322 flag here would confuse later passes such as VRP. */
8324 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8325 tree type
, tree op0
)
8327 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8329 && TREE_CODE (res
) == INTEGER_CST
8330 && TREE_CODE (op0
) == INTEGER_CST
8331 && CONVERT_EXPR_CODE_P (code
))
8332 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8337 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8338 operands OP0 and OP1. LOC is the location of the resulting expression.
8339 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8340 Return the folded expression if folding is successful. Otherwise,
8341 return NULL_TREE. */
8343 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8344 tree arg0
, tree arg1
, tree op0
, tree op1
)
8348 /* We only do these simplifications if we are optimizing. */
8352 /* Check for things like (A || B) && (A || C). We can convert this
8353 to A || (B && C). Note that either operator can be any of the four
8354 truth and/or operations and the transformation will still be
8355 valid. Also note that we only care about order for the
8356 ANDIF and ORIF operators. If B contains side effects, this
8357 might change the truth-value of A. */
8358 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8359 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8360 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8361 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8362 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8363 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8365 tree a00
= TREE_OPERAND (arg0
, 0);
8366 tree a01
= TREE_OPERAND (arg0
, 1);
8367 tree a10
= TREE_OPERAND (arg1
, 0);
8368 tree a11
= TREE_OPERAND (arg1
, 1);
8369 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8370 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8371 && (code
== TRUTH_AND_EXPR
8372 || code
== TRUTH_OR_EXPR
));
8374 if (operand_equal_p (a00
, a10
, 0))
8375 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8376 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8377 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8378 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8379 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8380 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8381 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8382 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8384 /* This case if tricky because we must either have commutative
8385 operators or else A10 must not have side-effects. */
8387 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8388 && operand_equal_p (a01
, a11
, 0))
8389 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8390 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8394 /* See if we can build a range comparison. */
8395 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8398 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8399 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8401 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8403 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8406 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8407 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8409 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8411 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8414 /* Check for the possibility of merging component references. If our
8415 lhs is another similar operation, try to merge its rhs with our
8416 rhs. Then try to merge our lhs and rhs. */
8417 if (TREE_CODE (arg0
) == code
8418 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8419 TREE_OPERAND (arg0
, 1), arg1
)))
8420 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8422 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8425 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8426 && (code
== TRUTH_AND_EXPR
8427 || code
== TRUTH_ANDIF_EXPR
8428 || code
== TRUTH_OR_EXPR
8429 || code
== TRUTH_ORIF_EXPR
))
8431 enum tree_code ncode
, icode
;
8433 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8434 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8435 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8437 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8438 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8439 We don't want to pack more than two leafs to a non-IF AND/OR
8441 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8442 equal to IF-CODE, then we don't want to add right-hand operand.
8443 If the inner right-hand side of left-hand operand has
8444 side-effects, or isn't simple, then we can't add to it,
8445 as otherwise we might destroy if-sequence. */
8446 if (TREE_CODE (arg0
) == icode
8447 && simple_operand_p_2 (arg1
)
8448 /* Needed for sequence points to handle trappings, and
8450 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8452 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8454 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8457 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8458 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8459 else if (TREE_CODE (arg1
) == icode
8460 && simple_operand_p_2 (arg0
)
8461 /* Needed for sequence points to handle trappings, and
8463 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8465 tem
= fold_build2_loc (loc
, ncode
, type
,
8466 arg0
, TREE_OPERAND (arg1
, 0));
8467 return fold_build2_loc (loc
, icode
, type
, tem
,
8468 TREE_OPERAND (arg1
, 1));
8470 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8472 For sequence point consistancy, we need to check for trapping,
8473 and side-effects. */
8474 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8475 && simple_operand_p_2 (arg1
))
8476 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8482 /* Fold a binary expression of code CODE and type TYPE with operands
8483 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8484 Return the folded expression if folding is successful. Otherwise,
8485 return NULL_TREE. */
8488 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8490 enum tree_code compl_code
;
8492 if (code
== MIN_EXPR
)
8493 compl_code
= MAX_EXPR
;
8494 else if (code
== MAX_EXPR
)
8495 compl_code
= MIN_EXPR
;
8499 /* MIN (MAX (a, b), b) == b. */
8500 if (TREE_CODE (op0
) == compl_code
8501 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8502 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8504 /* MIN (MAX (b, a), b) == b. */
8505 if (TREE_CODE (op0
) == compl_code
8506 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8507 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8508 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8510 /* MIN (a, MAX (a, b)) == a. */
8511 if (TREE_CODE (op1
) == compl_code
8512 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8513 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8514 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8516 /* MIN (a, MAX (b, a)) == a. */
8517 if (TREE_CODE (op1
) == compl_code
8518 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8519 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8520 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8525 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8526 by changing CODE to reduce the magnitude of constants involved in
8527 ARG0 of the comparison.
8528 Returns a canonicalized comparison tree if a simplification was
8529 possible, otherwise returns NULL_TREE.
8530 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8531 valid if signed overflow is undefined. */
8534 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8535 tree arg0
, tree arg1
,
8536 bool *strict_overflow_p
)
8538 enum tree_code code0
= TREE_CODE (arg0
);
8539 tree t
, cst0
= NULL_TREE
;
8543 /* Match A +- CST code arg1 and CST code arg1. We can change the
8544 first form only if overflow is undefined. */
8545 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8546 /* In principle pointers also have undefined overflow behavior,
8547 but that causes problems elsewhere. */
8548 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8549 && (code0
== MINUS_EXPR
8550 || code0
== PLUS_EXPR
)
8551 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8552 || code0
== INTEGER_CST
))
8555 /* Identify the constant in arg0 and its sign. */
8556 if (code0
== INTEGER_CST
)
8559 cst0
= TREE_OPERAND (arg0
, 1);
8560 sgn0
= tree_int_cst_sgn (cst0
);
8562 /* Overflowed constants and zero will cause problems. */
8563 if (integer_zerop (cst0
)
8564 || TREE_OVERFLOW (cst0
))
8567 /* See if we can reduce the magnitude of the constant in
8568 arg0 by changing the comparison code. */
8569 if (code0
== INTEGER_CST
)
8571 /* CST <= arg1 -> CST-1 < arg1. */
8572 if (code
== LE_EXPR
&& sgn0
== 1)
8574 /* -CST < arg1 -> -CST-1 <= arg1. */
8575 else if (code
== LT_EXPR
&& sgn0
== -1)
8577 /* CST > arg1 -> CST-1 >= arg1. */
8578 else if (code
== GT_EXPR
&& sgn0
== 1)
8580 /* -CST >= arg1 -> -CST-1 > arg1. */
8581 else if (code
== GE_EXPR
&& sgn0
== -1)
8585 /* arg1 code' CST' might be more canonical. */
8590 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8592 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8594 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8595 else if (code
== GT_EXPR
8596 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8598 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8599 else if (code
== LE_EXPR
8600 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8602 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8603 else if (code
== GE_EXPR
8604 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8608 *strict_overflow_p
= true;
8611 /* Now build the constant reduced in magnitude. But not if that
8612 would produce one outside of its types range. */
8613 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8615 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8616 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8618 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8619 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8620 /* We cannot swap the comparison here as that would cause us to
8621 endlessly recurse. */
8624 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8625 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8626 if (code0
!= INTEGER_CST
)
8627 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8628 t
= fold_convert (TREE_TYPE (arg1
), t
);
8630 /* If swapping might yield to a more canonical form, do so. */
8632 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8634 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8637 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8638 overflow further. Try to decrease the magnitude of constants involved
8639 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8640 and put sole constants at the second argument position.
8641 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8644 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8645 tree arg0
, tree arg1
)
8648 bool strict_overflow_p
;
8649 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8650 "when reducing constant in comparison");
8652 /* Try canonicalization by simplifying arg0. */
8653 strict_overflow_p
= false;
8654 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8655 &strict_overflow_p
);
8658 if (strict_overflow_p
)
8659 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8663 /* Try canonicalization by simplifying arg1 using the swapped
8665 code
= swap_tree_comparison (code
);
8666 strict_overflow_p
= false;
8667 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8668 &strict_overflow_p
);
8669 if (t
&& strict_overflow_p
)
8670 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8674 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8675 space. This is used to avoid issuing overflow warnings for
8676 expressions like &p->x which can not wrap. */
8679 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8681 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8688 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8689 if (offset
== NULL_TREE
)
8690 wi_offset
= wi::zero (precision
);
8691 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8697 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8698 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8702 if (!wi::fits_uhwi_p (total
))
8705 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8709 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8711 if (TREE_CODE (base
) == ADDR_EXPR
)
8713 HOST_WIDE_INT base_size
;
8715 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8716 if (base_size
> 0 && size
< base_size
)
8720 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8723 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8724 kind INTEGER_CST. This makes sure to properly sign-extend the
8727 static HOST_WIDE_INT
8728 size_low_cst (const_tree t
)
8730 HOST_WIDE_INT w
= TREE_INT_CST_ELT (t
, 0);
8731 int prec
= TYPE_PRECISION (TREE_TYPE (t
));
8732 if (prec
< HOST_BITS_PER_WIDE_INT
)
8733 return sext_hwi (w
, prec
);
8737 /* Subroutine of fold_binary. This routine performs all of the
8738 transformations that are common to the equality/inequality
8739 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8740 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8741 fold_binary should call fold_binary. Fold a comparison with
8742 tree code CODE and type TYPE with operands OP0 and OP1. Return
8743 the folded comparison or NULL_TREE. */
8746 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8749 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8750 tree arg0
, arg1
, tem
;
8755 STRIP_SIGN_NOPS (arg0
);
8756 STRIP_SIGN_NOPS (arg1
);
8758 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8759 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8760 && (equality_code
|| TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8761 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8762 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8763 && TREE_CODE (arg1
) == INTEGER_CST
8764 && !TREE_OVERFLOW (arg1
))
8766 const enum tree_code
8767 reverse_op
= TREE_CODE (arg0
) == PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
;
8768 tree const1
= TREE_OPERAND (arg0
, 1);
8769 tree const2
= fold_convert_loc (loc
, TREE_TYPE (const1
), arg1
);
8770 tree variable
= TREE_OPERAND (arg0
, 0);
8771 tree new_const
= int_const_binop (reverse_op
, const2
, const1
);
8773 /* If the constant operation overflowed this can be
8774 simplified as a comparison against INT_MAX/INT_MIN. */
8775 if (TREE_OVERFLOW (new_const
))
8777 int const1_sgn
= tree_int_cst_sgn (const1
);
8778 enum tree_code code2
= code
;
8780 /* Get the sign of the constant on the lhs if the
8781 operation were VARIABLE + CONST1. */
8782 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8783 const1_sgn
= -const1_sgn
;
8785 /* The sign of the constant determines if we overflowed
8786 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8787 Canonicalize to the INT_MIN overflow by swapping the comparison
8789 if (const1_sgn
== -1)
8790 code2
= swap_tree_comparison (code
);
8792 /* We now can look at the canonicalized case
8793 VARIABLE + 1 CODE2 INT_MIN
8794 and decide on the result. */
8801 omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8807 omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8816 fold_overflow_warning ("assuming signed overflow does not occur "
8817 "when changing X +- C1 cmp C2 to "
8819 WARN_STRICT_OVERFLOW_COMPARISON
);
8820 return fold_build2_loc (loc
, code
, type
, variable
, new_const
);
8824 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8825 if (TREE_CODE (arg0
) == MINUS_EXPR
8827 && integer_zerop (arg1
))
8829 /* ??? The transformation is valid for the other operators if overflow
8830 is undefined for the type, but performing it here badly interacts
8831 with the transformation in fold_cond_expr_with_comparison which
8832 attempts to synthetize ABS_EXPR. */
8834 fold_overflow_warning ("assuming signed overflow does not occur "
8835 "when changing X - Y cmp 0 to X cmp Y",
8836 WARN_STRICT_OVERFLOW_COMPARISON
);
8837 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
8838 TREE_OPERAND (arg0
, 1));
8841 /* For comparisons of pointers we can decompose it to a compile time
8842 comparison of the base objects and the offsets into the object.
8843 This requires at least one operand being an ADDR_EXPR or a
8844 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8845 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8846 && (TREE_CODE (arg0
) == ADDR_EXPR
8847 || TREE_CODE (arg1
) == ADDR_EXPR
8848 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8849 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8851 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8852 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8853 enum machine_mode mode
;
8854 int volatilep
, unsignedp
;
8855 bool indirect_base0
= false, indirect_base1
= false;
8857 /* Get base and offset for the access. Strip ADDR_EXPR for
8858 get_inner_reference, but put it back by stripping INDIRECT_REF
8859 off the base object if possible. indirect_baseN will be true
8860 if baseN is not an address but refers to the object itself. */
8862 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8864 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8865 &bitsize
, &bitpos0
, &offset0
, &mode
,
8866 &unsignedp
, &volatilep
, false);
8867 if (TREE_CODE (base0
) == INDIRECT_REF
)
8868 base0
= TREE_OPERAND (base0
, 0);
8870 indirect_base0
= true;
8872 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8874 base0
= TREE_OPERAND (arg0
, 0);
8875 STRIP_SIGN_NOPS (base0
);
8876 if (TREE_CODE (base0
) == ADDR_EXPR
)
8878 base0
= TREE_OPERAND (base0
, 0);
8879 indirect_base0
= true;
8881 offset0
= TREE_OPERAND (arg0
, 1);
8882 if (tree_fits_shwi_p (offset0
))
8884 HOST_WIDE_INT off
= size_low_cst (offset0
);
8885 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8887 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8889 bitpos0
= off
* BITS_PER_UNIT
;
8890 offset0
= NULL_TREE
;
8896 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8898 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8899 &bitsize
, &bitpos1
, &offset1
, &mode
,
8900 &unsignedp
, &volatilep
, false);
8901 if (TREE_CODE (base1
) == INDIRECT_REF
)
8902 base1
= TREE_OPERAND (base1
, 0);
8904 indirect_base1
= true;
8906 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8908 base1
= TREE_OPERAND (arg1
, 0);
8909 STRIP_SIGN_NOPS (base1
);
8910 if (TREE_CODE (base1
) == ADDR_EXPR
)
8912 base1
= TREE_OPERAND (base1
, 0);
8913 indirect_base1
= true;
8915 offset1
= TREE_OPERAND (arg1
, 1);
8916 if (tree_fits_shwi_p (offset1
))
8918 HOST_WIDE_INT off
= size_low_cst (offset1
);
8919 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
8921 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
8923 bitpos1
= off
* BITS_PER_UNIT
;
8924 offset1
= NULL_TREE
;
8929 /* A local variable can never be pointed to by
8930 the default SSA name of an incoming parameter. */
8931 if ((TREE_CODE (arg0
) == ADDR_EXPR
8933 && TREE_CODE (base0
) == VAR_DECL
8934 && auto_var_in_fn_p (base0
, current_function_decl
)
8936 && TREE_CODE (base1
) == SSA_NAME
8937 && SSA_NAME_IS_DEFAULT_DEF (base1
)
8938 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
8939 || (TREE_CODE (arg1
) == ADDR_EXPR
8941 && TREE_CODE (base1
) == VAR_DECL
8942 && auto_var_in_fn_p (base1
, current_function_decl
)
8944 && TREE_CODE (base0
) == SSA_NAME
8945 && SSA_NAME_IS_DEFAULT_DEF (base0
)
8946 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
8948 if (code
== NE_EXPR
)
8949 return constant_boolean_node (1, type
);
8950 else if (code
== EQ_EXPR
)
8951 return constant_boolean_node (0, type
);
8953 /* If we have equivalent bases we might be able to simplify. */
8954 else if (indirect_base0
== indirect_base1
8955 && operand_equal_p (base0
, base1
, 0))
8957 /* We can fold this expression to a constant if the non-constant
8958 offset parts are equal. */
8959 if ((offset0
== offset1
8960 || (offset0
&& offset1
8961 && operand_equal_p (offset0
, offset1
, 0)))
8964 || (indirect_base0
&& DECL_P (base0
))
8965 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8969 && bitpos0
!= bitpos1
8970 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8971 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8972 fold_overflow_warning (("assuming pointer wraparound does not "
8973 "occur when comparing P +- C1 with "
8975 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8980 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8982 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8984 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8986 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8988 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8990 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8994 /* We can simplify the comparison to a comparison of the variable
8995 offset parts if the constant offset parts are equal.
8996 Be careful to use signed sizetype here because otherwise we
8997 mess with array offsets in the wrong way. This is possible
8998 because pointer arithmetic is restricted to retain within an
8999 object and overflow on pointer differences is undefined as of
9000 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9001 else if (bitpos0
== bitpos1
9003 || (indirect_base0
&& DECL_P (base0
))
9004 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9006 /* By converting to signed sizetype we cover middle-end pointer
9007 arithmetic which operates on unsigned pointer types of size
9008 type size and ARRAY_REF offsets which are properly sign or
9009 zero extended from their type in case it is narrower than
9011 if (offset0
== NULL_TREE
)
9012 offset0
= build_int_cst (ssizetype
, 0);
9014 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9015 if (offset1
== NULL_TREE
)
9016 offset1
= build_int_cst (ssizetype
, 0);
9018 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9021 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9022 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9023 fold_overflow_warning (("assuming pointer wraparound does not "
9024 "occur when comparing P +- C1 with "
9026 WARN_STRICT_OVERFLOW_COMPARISON
);
9028 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9031 /* For non-equal bases we can simplify if they are addresses
9032 of local binding decls or constants. */
9033 else if (indirect_base0
&& indirect_base1
9034 /* We know that !operand_equal_p (base0, base1, 0)
9035 because the if condition was false. But make
9036 sure two decls are not the same. */
9038 && TREE_CODE (arg0
) == ADDR_EXPR
9039 && TREE_CODE (arg1
) == ADDR_EXPR
9040 && (((TREE_CODE (base0
) == VAR_DECL
9041 || TREE_CODE (base0
) == PARM_DECL
)
9042 && (targetm
.binds_local_p (base0
)
9043 || CONSTANT_CLASS_P (base1
)))
9044 || CONSTANT_CLASS_P (base0
))
9045 && (((TREE_CODE (base1
) == VAR_DECL
9046 || TREE_CODE (base1
) == PARM_DECL
)
9047 && (targetm
.binds_local_p (base1
)
9048 || CONSTANT_CLASS_P (base0
)))
9049 || CONSTANT_CLASS_P (base1
)))
9051 if (code
== EQ_EXPR
)
9052 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9054 else if (code
== NE_EXPR
)
9055 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9058 /* For equal offsets we can simplify to a comparison of the
9060 else if (bitpos0
== bitpos1
9062 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9064 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9065 && ((offset0
== offset1
)
9066 || (offset0
&& offset1
9067 && operand_equal_p (offset0
, offset1
, 0))))
9070 base0
= build_fold_addr_expr_loc (loc
, base0
);
9072 base1
= build_fold_addr_expr_loc (loc
, base1
);
9073 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9077 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9078 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9079 the resulting offset is smaller in absolute value than the
9080 original one and has the same sign. */
9081 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9082 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9083 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9084 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9085 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9086 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9087 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9089 tree const1
= TREE_OPERAND (arg0
, 1);
9090 tree const2
= TREE_OPERAND (arg1
, 1);
9091 tree variable1
= TREE_OPERAND (arg0
, 0);
9092 tree variable2
= TREE_OPERAND (arg1
, 0);
9094 const char * const warnmsg
= G_("assuming signed overflow does not "
9095 "occur when combining constants around "
9098 /* Put the constant on the side where it doesn't overflow and is
9099 of lower absolute value and of same sign than before. */
9100 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9101 ? MINUS_EXPR
: PLUS_EXPR
,
9103 if (!TREE_OVERFLOW (cst
)
9104 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
9105 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
9107 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9108 return fold_build2_loc (loc
, code
, type
,
9110 fold_build2_loc (loc
, TREE_CODE (arg1
),
9115 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9116 ? MINUS_EXPR
: PLUS_EXPR
,
9118 if (!TREE_OVERFLOW (cst
)
9119 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
9120 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
9122 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9123 return fold_build2_loc (loc
, code
, type
,
9124 fold_build2_loc (loc
, TREE_CODE (arg0
),
9131 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9132 signed arithmetic case. That form is created by the compiler
9133 often enough for folding it to be of value. One example is in
9134 computing loop trip counts after Operator Strength Reduction. */
9135 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9136 && TREE_CODE (arg0
) == MULT_EXPR
9137 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9138 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9139 && integer_zerop (arg1
))
9141 tree const1
= TREE_OPERAND (arg0
, 1);
9142 tree const2
= arg1
; /* zero */
9143 tree variable1
= TREE_OPERAND (arg0
, 0);
9144 enum tree_code cmp_code
= code
;
9146 /* Handle unfolded multiplication by zero. */
9147 if (integer_zerop (const1
))
9148 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9150 fold_overflow_warning (("assuming signed overflow does not occur when "
9151 "eliminating multiplication in comparison "
9153 WARN_STRICT_OVERFLOW_COMPARISON
);
9155 /* If const1 is negative we swap the sense of the comparison. */
9156 if (tree_int_cst_sgn (const1
) < 0)
9157 cmp_code
= swap_tree_comparison (cmp_code
);
9159 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9162 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9166 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9168 tree targ0
= strip_float_extensions (arg0
);
9169 tree targ1
= strip_float_extensions (arg1
);
9170 tree newtype
= TREE_TYPE (targ0
);
9172 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9173 newtype
= TREE_TYPE (targ1
);
9175 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9176 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9177 return fold_build2_loc (loc
, code
, type
,
9178 fold_convert_loc (loc
, newtype
, targ0
),
9179 fold_convert_loc (loc
, newtype
, targ1
));
9181 /* (-a) CMP (-b) -> b CMP a */
9182 if (TREE_CODE (arg0
) == NEGATE_EXPR
9183 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9184 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9185 TREE_OPERAND (arg0
, 0));
9187 if (TREE_CODE (arg1
) == REAL_CST
)
9189 REAL_VALUE_TYPE cst
;
9190 cst
= TREE_REAL_CST (arg1
);
9192 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9193 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9194 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9195 TREE_OPERAND (arg0
, 0),
9196 build_real (TREE_TYPE (arg1
),
9197 real_value_negate (&cst
)));
9199 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9200 /* a CMP (-0) -> a CMP 0 */
9201 if (REAL_VALUE_MINUS_ZERO (cst
))
9202 return fold_build2_loc (loc
, code
, type
, arg0
,
9203 build_real (TREE_TYPE (arg1
), dconst0
));
9205 /* x != NaN is always true, other ops are always false. */
9206 if (REAL_VALUE_ISNAN (cst
)
9207 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9209 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9210 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9213 /* Fold comparisons against infinity. */
9214 if (REAL_VALUE_ISINF (cst
)
9215 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9217 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9218 if (tem
!= NULL_TREE
)
9223 /* If this is a comparison of a real constant with a PLUS_EXPR
9224 or a MINUS_EXPR of a real constant, we can convert it into a
9225 comparison with a revised real constant as long as no overflow
9226 occurs when unsafe_math_optimizations are enabled. */
9227 if (flag_unsafe_math_optimizations
9228 && TREE_CODE (arg1
) == REAL_CST
9229 && (TREE_CODE (arg0
) == PLUS_EXPR
9230 || TREE_CODE (arg0
) == MINUS_EXPR
)
9231 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9232 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9233 ? MINUS_EXPR
: PLUS_EXPR
,
9234 arg1
, TREE_OPERAND (arg0
, 1)))
9235 && !TREE_OVERFLOW (tem
))
9236 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9238 /* Likewise, we can simplify a comparison of a real constant with
9239 a MINUS_EXPR whose first operand is also a real constant, i.e.
9240 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9241 floating-point types only if -fassociative-math is set. */
9242 if (flag_associative_math
9243 && TREE_CODE (arg1
) == REAL_CST
9244 && TREE_CODE (arg0
) == MINUS_EXPR
9245 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9246 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9248 && !TREE_OVERFLOW (tem
))
9249 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9250 TREE_OPERAND (arg0
, 1), tem
);
9252 /* Fold comparisons against built-in math functions. */
9253 if (TREE_CODE (arg1
) == REAL_CST
9254 && flag_unsafe_math_optimizations
9255 && ! flag_errno_math
)
9257 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9259 if (fcode
!= END_BUILTINS
)
9261 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9262 if (tem
!= NULL_TREE
)
9268 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9269 && CONVERT_EXPR_P (arg0
))
9271 /* If we are widening one operand of an integer comparison,
9272 see if the other operand is similarly being widened. Perhaps we
9273 can do the comparison in the narrower type. */
9274 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9278 /* Or if we are changing signedness. */
9279 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9284 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9285 constant, we can simplify it. */
9286 if (TREE_CODE (arg1
) == INTEGER_CST
9287 && (TREE_CODE (arg0
) == MIN_EXPR
9288 || TREE_CODE (arg0
) == MAX_EXPR
)
9289 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9291 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9296 /* Simplify comparison of something with itself. (For IEEE
9297 floating-point, we can only do some of these simplifications.) */
9298 if (operand_equal_p (arg0
, arg1
, 0))
9303 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9304 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9305 return constant_boolean_node (1, type
);
9310 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9311 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9312 return constant_boolean_node (1, type
);
9313 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9316 /* For NE, we can only do this simplification if integer
9317 or we don't honor IEEE floating point NaNs. */
9318 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9319 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9321 /* ... fall through ... */
9324 return constant_boolean_node (0, type
);
9330 /* If we are comparing an expression that just has comparisons
9331 of two integer values, arithmetic expressions of those comparisons,
9332 and constants, we can simplify it. There are only three cases
9333 to check: the two values can either be equal, the first can be
9334 greater, or the second can be greater. Fold the expression for
9335 those three values. Since each value must be 0 or 1, we have
9336 eight possibilities, each of which corresponds to the constant 0
9337 or 1 or one of the six possible comparisons.
9339 This handles common cases like (a > b) == 0 but also handles
9340 expressions like ((x > y) - (y > x)) > 0, which supposedly
9341 occur in macroized code. */
9343 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9345 tree cval1
= 0, cval2
= 0;
9348 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9349 /* Don't handle degenerate cases here; they should already
9350 have been handled anyway. */
9351 && cval1
!= 0 && cval2
!= 0
9352 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9353 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9354 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9355 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9356 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9357 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9358 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9360 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9361 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9363 /* We can't just pass T to eval_subst in case cval1 or cval2
9364 was the same as ARG1. */
9367 = fold_build2_loc (loc
, code
, type
,
9368 eval_subst (loc
, arg0
, cval1
, maxval
,
9372 = fold_build2_loc (loc
, code
, type
,
9373 eval_subst (loc
, arg0
, cval1
, maxval
,
9377 = fold_build2_loc (loc
, code
, type
,
9378 eval_subst (loc
, arg0
, cval1
, minval
,
9382 /* All three of these results should be 0 or 1. Confirm they are.
9383 Then use those values to select the proper code to use. */
9385 if (TREE_CODE (high_result
) == INTEGER_CST
9386 && TREE_CODE (equal_result
) == INTEGER_CST
9387 && TREE_CODE (low_result
) == INTEGER_CST
)
9389 /* Make a 3-bit mask with the high-order bit being the
9390 value for `>', the next for '=', and the low for '<'. */
9391 switch ((integer_onep (high_result
) * 4)
9392 + (integer_onep (equal_result
) * 2)
9393 + integer_onep (low_result
))
9397 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9418 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9423 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9424 SET_EXPR_LOCATION (tem
, loc
);
9427 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9432 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9433 into a single range test. */
9434 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9435 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9436 && TREE_CODE (arg1
) == INTEGER_CST
9437 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9438 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9439 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9440 && !TREE_OVERFLOW (arg1
))
9442 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9443 if (tem
!= NULL_TREE
)
9447 /* Fold ~X op ~Y as Y op X. */
9448 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9449 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9451 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9452 return fold_build2_loc (loc
, code
, type
,
9453 fold_convert_loc (loc
, cmp_type
,
9454 TREE_OPERAND (arg1
, 0)),
9455 TREE_OPERAND (arg0
, 0));
9458 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9459 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9460 && (TREE_CODE (arg1
) == INTEGER_CST
|| TREE_CODE (arg1
) == VECTOR_CST
))
9462 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9463 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9464 TREE_OPERAND (arg0
, 0),
9465 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9466 fold_convert_loc (loc
, cmp_type
, arg1
)));
9473 /* Subroutine of fold_binary. Optimize complex multiplications of the
9474 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9475 argument EXPR represents the expression "z" of type TYPE. */
9478 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9480 tree itype
= TREE_TYPE (type
);
9481 tree rpart
, ipart
, tem
;
9483 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9485 rpart
= TREE_OPERAND (expr
, 0);
9486 ipart
= TREE_OPERAND (expr
, 1);
9488 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9490 rpart
= TREE_REALPART (expr
);
9491 ipart
= TREE_IMAGPART (expr
);
9495 expr
= save_expr (expr
);
9496 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9497 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9500 rpart
= save_expr (rpart
);
9501 ipart
= save_expr (ipart
);
9502 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9503 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9504 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9505 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9506 build_zero_cst (itype
));
9510 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9511 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9512 guarantees that P and N have the same least significant log2(M) bits.
9513 N is not otherwise constrained. In particular, N is not normalized to
9514 0 <= N < M as is common. In general, the precise value of P is unknown.
9515 M is chosen as large as possible such that constant N can be determined.
9517 Returns M and sets *RESIDUE to N.
9519 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9520 account. This is not always possible due to PR 35705.
9523 static unsigned HOST_WIDE_INT
9524 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9525 bool allow_func_align
)
9527 enum tree_code code
;
9531 code
= TREE_CODE (expr
);
9532 if (code
== ADDR_EXPR
)
9534 unsigned int bitalign
;
9535 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9536 *residue
/= BITS_PER_UNIT
;
9537 return bitalign
/ BITS_PER_UNIT
;
9539 else if (code
== POINTER_PLUS_EXPR
)
9542 unsigned HOST_WIDE_INT modulus
;
9543 enum tree_code inner_code
;
9545 op0
= TREE_OPERAND (expr
, 0);
9547 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9550 op1
= TREE_OPERAND (expr
, 1);
9552 inner_code
= TREE_CODE (op1
);
9553 if (inner_code
== INTEGER_CST
)
9555 *residue
+= TREE_INT_CST_LOW (op1
);
9558 else if (inner_code
== MULT_EXPR
)
9560 op1
= TREE_OPERAND (op1
, 1);
9561 if (TREE_CODE (op1
) == INTEGER_CST
)
9563 unsigned HOST_WIDE_INT align
;
9565 /* Compute the greatest power-of-2 divisor of op1. */
9566 align
= TREE_INT_CST_LOW (op1
);
9569 /* If align is non-zero and less than *modulus, replace
9570 *modulus with align., If align is 0, then either op1 is 0
9571 or the greatest power-of-2 divisor of op1 doesn't fit in an
9572 unsigned HOST_WIDE_INT. In either case, no additional
9573 constraint is imposed. */
9575 modulus
= MIN (modulus
, align
);
9582 /* If we get here, we were unable to determine anything useful about the
9587 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9588 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9591 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9593 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9595 if (TREE_CODE (arg
) == VECTOR_CST
)
9597 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9598 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9600 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9602 constructor_elt
*elt
;
9604 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9605 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9608 elts
[i
] = elt
->value
;
9612 for (; i
< nelts
; i
++)
9614 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9618 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9619 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9620 NULL_TREE otherwise. */
9623 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9625 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9627 bool need_ctor
= false;
9629 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9630 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9631 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9632 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9635 elts
= XALLOCAVEC (tree
, nelts
* 3);
9636 if (!vec_cst_ctor_to_array (arg0
, elts
)
9637 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9640 for (i
= 0; i
< nelts
; i
++)
9642 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9644 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9649 vec
<constructor_elt
, va_gc
> *v
;
9650 vec_alloc (v
, nelts
);
9651 for (i
= 0; i
< nelts
; i
++)
9652 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9653 return build_constructor (type
, v
);
9656 return build_vector (type
, &elts
[2 * nelts
]);
9659 /* Try to fold a pointer difference of type TYPE two address expressions of
9660 array references AREF0 and AREF1 using location LOC. Return a
9661 simplified expression for the difference or NULL_TREE. */
9664 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9665 tree aref0
, tree aref1
)
9667 tree base0
= TREE_OPERAND (aref0
, 0);
9668 tree base1
= TREE_OPERAND (aref1
, 0);
9669 tree base_offset
= build_int_cst (type
, 0);
9671 /* If the bases are array references as well, recurse. If the bases
9672 are pointer indirections compute the difference of the pointers.
9673 If the bases are equal, we are set. */
9674 if ((TREE_CODE (base0
) == ARRAY_REF
9675 && TREE_CODE (base1
) == ARRAY_REF
9677 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9678 || (INDIRECT_REF_P (base0
)
9679 && INDIRECT_REF_P (base1
)
9680 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9681 TREE_OPERAND (base0
, 0),
9682 TREE_OPERAND (base1
, 0))))
9683 || operand_equal_p (base0
, base1
, 0))
9685 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9686 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9687 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9688 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9689 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9691 fold_build2_loc (loc
, MULT_EXPR
, type
,
9697 /* If the real or vector real constant CST of type TYPE has an exact
9698 inverse, return it, else return NULL. */
9701 exact_inverse (tree type
, tree cst
)
9704 tree unit_type
, *elts
;
9705 enum machine_mode mode
;
9706 unsigned vec_nelts
, i
;
9708 switch (TREE_CODE (cst
))
9711 r
= TREE_REAL_CST (cst
);
9713 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9714 return build_real (type
, r
);
9719 vec_nelts
= VECTOR_CST_NELTS (cst
);
9720 elts
= XALLOCAVEC (tree
, vec_nelts
);
9721 unit_type
= TREE_TYPE (type
);
9722 mode
= TYPE_MODE (unit_type
);
9724 for (i
= 0; i
< vec_nelts
; i
++)
9726 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9727 if (!exact_real_inverse (mode
, &r
))
9729 elts
[i
] = build_real (unit_type
, r
);
9732 return build_vector (type
, elts
);
9739 /* Mask out the tz least significant bits of X of type TYPE where
9740 tz is the number of trailing zeroes in Y. */
9742 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
9744 int tz
= wi::ctz (y
);
9746 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
9750 /* Return true when T is an address and is known to be nonzero.
9751 For floating point we further ensure that T is not denormal.
9752 Similar logic is present in nonzero_address in rtlanal.h.
9754 If the return value is based on the assumption that signed overflow
9755 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9756 change *STRICT_OVERFLOW_P. */
9759 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
9761 tree type
= TREE_TYPE (t
);
9762 enum tree_code code
;
9764 /* Doing something useful for floating point would need more work. */
9765 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9768 code
= TREE_CODE (t
);
9769 switch (TREE_CODE_CLASS (code
))
9772 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9775 case tcc_comparison
:
9776 return tree_binary_nonzero_warnv_p (code
, type
,
9777 TREE_OPERAND (t
, 0),
9778 TREE_OPERAND (t
, 1),
9781 case tcc_declaration
:
9783 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9791 case TRUTH_NOT_EXPR
:
9792 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9795 case TRUTH_AND_EXPR
:
9797 case TRUTH_XOR_EXPR
:
9798 return tree_binary_nonzero_warnv_p (code
, type
,
9799 TREE_OPERAND (t
, 0),
9800 TREE_OPERAND (t
, 1),
9808 case WITH_SIZE_EXPR
:
9810 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9815 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
9819 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
9824 tree fndecl
= get_callee_fndecl (t
);
9825 if (!fndecl
) return false;
9826 if (flag_delete_null_pointer_checks
&& !flag_check_new
9827 && DECL_IS_OPERATOR_NEW (fndecl
)
9828 && !TREE_NOTHROW (fndecl
))
9830 if (flag_delete_null_pointer_checks
9831 && lookup_attribute ("returns_nonnull",
9832 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
9834 return alloca_call_p (t
);
9843 /* Return true when T is an address and is known to be nonzero.
9844 Handle warnings about undefined signed overflow. */
9847 tree_expr_nonzero_p (tree t
)
9849 bool ret
, strict_overflow_p
;
9851 strict_overflow_p
= false;
9852 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
9853 if (strict_overflow_p
)
9854 fold_overflow_warning (("assuming signed overflow does not occur when "
9855 "determining that expression is always "
9857 WARN_STRICT_OVERFLOW_MISC
);
9861 /* Fold a binary expression of code CODE and type TYPE with operands
9862 OP0 and OP1. LOC is the location of the resulting expression.
9863 Return the folded expression if folding is successful. Otherwise,
9864 return NULL_TREE. */
9867 fold_binary_loc (location_t loc
,
9868 enum tree_code code
, tree type
, tree op0
, tree op1
)
9870 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9871 tree arg0
, arg1
, tem
;
9872 tree t1
= NULL_TREE
;
9873 bool strict_overflow_p
;
9876 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9877 && TREE_CODE_LENGTH (code
) == 2
9879 && op1
!= NULL_TREE
);
9884 /* Strip any conversions that don't change the mode. This is
9885 safe for every expression, except for a comparison expression
9886 because its signedness is derived from its operands. So, in
9887 the latter case, only strip conversions that don't change the
9888 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9891 Note that this is done as an internal manipulation within the
9892 constant folder, in order to find the simplest representation
9893 of the arguments so that their form can be studied. In any
9894 cases, the appropriate type conversions should be put back in
9895 the tree that will get out of the constant folder. */
9897 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9899 STRIP_SIGN_NOPS (arg0
);
9900 STRIP_SIGN_NOPS (arg1
);
9908 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9909 constant but we can't do arithmetic on them. */
9910 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9911 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9912 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9913 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9914 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9915 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
)
9916 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == INTEGER_CST
))
9918 if (kind
== tcc_binary
)
9920 /* Make sure type and arg0 have the same saturating flag. */
9921 gcc_assert (TYPE_SATURATING (type
)
9922 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9923 tem
= const_binop (code
, arg0
, arg1
);
9925 else if (kind
== tcc_comparison
)
9926 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9930 if (tem
!= NULL_TREE
)
9932 if (TREE_TYPE (tem
) != type
)
9933 tem
= fold_convert_loc (loc
, type
, tem
);
9938 /* If this is a commutative operation, and ARG0 is a constant, move it
9939 to ARG1 to reduce the number of tests below. */
9940 if (commutative_tree_code (code
)
9941 && tree_swap_operands_p (arg0
, arg1
, true))
9942 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9944 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9945 to ARG1 to reduce the number of tests below. */
9946 if (kind
== tcc_comparison
9947 && tree_swap_operands_p (arg0
, arg1
, true))
9948 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
9950 tem
= generic_simplify (loc
, code
, type
, op0
, op1
);
9954 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9956 First check for cases where an arithmetic operation is applied to a
9957 compound, conditional, or comparison operation. Push the arithmetic
9958 operation inside the compound or conditional to see if any folding
9959 can then be done. Convert comparison to conditional for this purpose.
9960 The also optimizes non-constant cases that used to be done in
9963 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9964 one of the operands is a comparison and the other is a comparison, a
9965 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9966 code below would make the expression more complex. Change it to a
9967 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9968 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9970 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9971 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9972 && TREE_CODE (type
) != VECTOR_TYPE
9973 && ((truth_value_p (TREE_CODE (arg0
))
9974 && (truth_value_p (TREE_CODE (arg1
))
9975 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9976 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9977 || (truth_value_p (TREE_CODE (arg1
))
9978 && (truth_value_p (TREE_CODE (arg0
))
9979 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9980 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9982 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9983 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9986 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9987 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9989 if (code
== EQ_EXPR
)
9990 tem
= invert_truthvalue_loc (loc
, tem
);
9992 return fold_convert_loc (loc
, type
, tem
);
9995 if (TREE_CODE_CLASS (code
) == tcc_binary
9996 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9998 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
10000 tem
= fold_build2_loc (loc
, code
, type
,
10001 fold_convert_loc (loc
, TREE_TYPE (op0
),
10002 TREE_OPERAND (arg0
, 1)), op1
);
10003 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10006 if (TREE_CODE (arg1
) == COMPOUND_EXPR
10007 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10009 tem
= fold_build2_loc (loc
, code
, type
, op0
,
10010 fold_convert_loc (loc
, TREE_TYPE (op1
),
10011 TREE_OPERAND (arg1
, 1)));
10012 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
10016 if (TREE_CODE (arg0
) == COND_EXPR
10017 || TREE_CODE (arg0
) == VEC_COND_EXPR
10018 || COMPARISON_CLASS_P (arg0
))
10020 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10022 /*cond_first_p=*/1);
10023 if (tem
!= NULL_TREE
)
10027 if (TREE_CODE (arg1
) == COND_EXPR
10028 || TREE_CODE (arg1
) == VEC_COND_EXPR
10029 || COMPARISON_CLASS_P (arg1
))
10031 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10033 /*cond_first_p=*/0);
10034 if (tem
!= NULL_TREE
)
10042 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10043 if (TREE_CODE (arg0
) == ADDR_EXPR
10044 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
10046 tree iref
= TREE_OPERAND (arg0
, 0);
10047 return fold_build2 (MEM_REF
, type
,
10048 TREE_OPERAND (iref
, 0),
10049 int_const_binop (PLUS_EXPR
, arg1
,
10050 TREE_OPERAND (iref
, 1)));
10053 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10054 if (TREE_CODE (arg0
) == ADDR_EXPR
10055 && handled_component_p (TREE_OPERAND (arg0
, 0)))
10058 HOST_WIDE_INT coffset
;
10059 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
10063 return fold_build2 (MEM_REF
, type
,
10064 build_fold_addr_expr (base
),
10065 int_const_binop (PLUS_EXPR
, arg1
,
10066 size_int (coffset
)));
10071 case POINTER_PLUS_EXPR
:
10072 /* 0 +p index -> (type)index */
10073 if (integer_zerop (arg0
))
10074 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10076 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10077 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10078 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10079 return fold_convert_loc (loc
, type
,
10080 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10081 fold_convert_loc (loc
, sizetype
,
10083 fold_convert_loc (loc
, sizetype
,
10086 /* (PTR +p B) +p A -> PTR +p (B + A) */
10087 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10088 && !upc_shared_type_p (TREE_TYPE (type
)))
10091 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
10092 tree arg00
= TREE_OPERAND (arg0
, 0);
10093 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10094 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
10095 return fold_convert_loc (loc
, type
,
10096 fold_build_pointer_plus_loc (loc
,
10100 /* PTR_CST +p CST -> CST1 */
10101 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10102 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
10103 fold_convert_loc (loc
, type
, arg1
));
10108 /* A + (-B) -> A - B */
10109 if (TREE_CODE (arg1
) == NEGATE_EXPR
10110 && (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
10111 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10112 fold_convert_loc (loc
, type
, arg0
),
10113 fold_convert_loc (loc
, type
,
10114 TREE_OPERAND (arg1
, 0)));
10115 /* (-A) + B -> B - A */
10116 if (TREE_CODE (arg0
) == NEGATE_EXPR
10117 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
)
10118 && (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
10119 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10120 fold_convert_loc (loc
, type
, arg1
),
10121 fold_convert_loc (loc
, type
,
10122 TREE_OPERAND (arg0
, 0)));
10124 /* Disable further optimizations involving UPC shared pointers,
10125 because integers are not interoperable with shared pointers. */
10126 if ((TREE_TYPE (arg0
) && POINTER_TYPE_P (TREE_TYPE (arg0
))
10127 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0
))))
10128 || (TREE_TYPE (arg1
) && POINTER_TYPE_P (TREE_TYPE (arg1
))
10129 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg1
)))))
10132 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10134 /* Convert ~A + 1 to -A. */
10135 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10136 && integer_each_onep (arg1
))
10137 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
10138 fold_convert_loc (loc
, type
,
10139 TREE_OPERAND (arg0
, 0)));
10141 /* ~X + X is -1. */
10142 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10143 && !TYPE_OVERFLOW_TRAPS (type
))
10145 tree tem
= TREE_OPERAND (arg0
, 0);
10148 if (operand_equal_p (tem
, arg1
, 0))
10150 t1
= build_all_ones_cst (type
);
10151 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10155 /* X + ~X is -1. */
10156 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10157 && !TYPE_OVERFLOW_TRAPS (type
))
10159 tree tem
= TREE_OPERAND (arg1
, 0);
10162 if (operand_equal_p (arg0
, tem
, 0))
10164 t1
= build_all_ones_cst (type
);
10165 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10169 /* X + (X / CST) * -CST is X % CST. */
10170 if (TREE_CODE (arg1
) == MULT_EXPR
10171 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10172 && operand_equal_p (arg0
,
10173 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10175 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10176 tree cst1
= TREE_OPERAND (arg1
, 1);
10177 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10179 if (sum
&& integer_zerop (sum
))
10180 return fold_convert_loc (loc
, type
,
10181 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10182 TREE_TYPE (arg0
), arg0
,
10187 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10188 one. Make sure the type is not saturating and has the signedness of
10189 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10190 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10191 if ((TREE_CODE (arg0
) == MULT_EXPR
10192 || TREE_CODE (arg1
) == MULT_EXPR
)
10193 && !TYPE_SATURATING (type
)
10194 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10195 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10196 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10198 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10203 if (! FLOAT_TYPE_P (type
))
10205 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10206 with a constant, and the two constants have no bits in common,
10207 we should treat this as a BIT_IOR_EXPR since this may produce more
10208 simplifications. */
10209 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10210 && TREE_CODE (arg1
) == BIT_AND_EXPR
10211 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10212 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10213 && wi::bit_and (TREE_OPERAND (arg0
, 1),
10214 TREE_OPERAND (arg1
, 1)) == 0)
10216 code
= BIT_IOR_EXPR
;
10220 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10221 (plus (plus (mult) (mult)) (foo)) so that we can
10222 take advantage of the factoring cases below. */
10223 if (TYPE_OVERFLOW_WRAPS (type
)
10224 && (((TREE_CODE (arg0
) == PLUS_EXPR
10225 || TREE_CODE (arg0
) == MINUS_EXPR
)
10226 && TREE_CODE (arg1
) == MULT_EXPR
)
10227 || ((TREE_CODE (arg1
) == PLUS_EXPR
10228 || TREE_CODE (arg1
) == MINUS_EXPR
)
10229 && TREE_CODE (arg0
) == MULT_EXPR
)))
10231 tree parg0
, parg1
, parg
, marg
;
10232 enum tree_code pcode
;
10234 if (TREE_CODE (arg1
) == MULT_EXPR
)
10235 parg
= arg0
, marg
= arg1
;
10237 parg
= arg1
, marg
= arg0
;
10238 pcode
= TREE_CODE (parg
);
10239 parg0
= TREE_OPERAND (parg
, 0);
10240 parg1
= TREE_OPERAND (parg
, 1);
10241 STRIP_NOPS (parg0
);
10242 STRIP_NOPS (parg1
);
10244 if (TREE_CODE (parg0
) == MULT_EXPR
10245 && TREE_CODE (parg1
) != MULT_EXPR
)
10246 return fold_build2_loc (loc
, pcode
, type
,
10247 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10248 fold_convert_loc (loc
, type
,
10250 fold_convert_loc (loc
, type
,
10252 fold_convert_loc (loc
, type
, parg1
));
10253 if (TREE_CODE (parg0
) != MULT_EXPR
10254 && TREE_CODE (parg1
) == MULT_EXPR
)
10256 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10257 fold_convert_loc (loc
, type
, parg0
),
10258 fold_build2_loc (loc
, pcode
, type
,
10259 fold_convert_loc (loc
, type
, marg
),
10260 fold_convert_loc (loc
, type
,
10266 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10267 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10268 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10270 /* Likewise if the operands are reversed. */
10271 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10272 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10274 /* Convert X + -C into X - C. */
10275 if (TREE_CODE (arg1
) == REAL_CST
10276 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10278 tem
= fold_negate_const (arg1
, type
);
10279 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10280 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10281 fold_convert_loc (loc
, type
, arg0
),
10282 fold_convert_loc (loc
, type
, tem
));
10285 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10286 to __complex__ ( x, y ). This is not the same for SNaNs or
10287 if signed zeros are involved. */
10288 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10289 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10290 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10292 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10293 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10294 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10295 bool arg0rz
= false, arg0iz
= false;
10296 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10297 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10299 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10300 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10301 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10303 tree rp
= arg1r
? arg1r
10304 : build1 (REALPART_EXPR
, rtype
, arg1
);
10305 tree ip
= arg0i
? arg0i
10306 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10307 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10309 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10311 tree rp
= arg0r
? arg0r
10312 : build1 (REALPART_EXPR
, rtype
, arg0
);
10313 tree ip
= arg1i
? arg1i
10314 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10315 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10320 if (flag_unsafe_math_optimizations
10321 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10322 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10323 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10326 /* Convert x+x into x*2.0. */
10327 if (operand_equal_p (arg0
, arg1
, 0)
10328 && SCALAR_FLOAT_TYPE_P (type
))
10329 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10330 build_real (type
, dconst2
));
10332 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10333 We associate floats only if the user has specified
10334 -fassociative-math. */
10335 if (flag_associative_math
10336 && TREE_CODE (arg1
) == PLUS_EXPR
10337 && TREE_CODE (arg0
) != MULT_EXPR
)
10339 tree tree10
= TREE_OPERAND (arg1
, 0);
10340 tree tree11
= TREE_OPERAND (arg1
, 1);
10341 if (TREE_CODE (tree11
) == MULT_EXPR
10342 && TREE_CODE (tree10
) == MULT_EXPR
)
10345 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10346 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10349 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10350 We associate floats only if the user has specified
10351 -fassociative-math. */
10352 if (flag_associative_math
10353 && TREE_CODE (arg0
) == PLUS_EXPR
10354 && TREE_CODE (arg1
) != MULT_EXPR
)
10356 tree tree00
= TREE_OPERAND (arg0
, 0);
10357 tree tree01
= TREE_OPERAND (arg0
, 1);
10358 if (TREE_CODE (tree01
) == MULT_EXPR
10359 && TREE_CODE (tree00
) == MULT_EXPR
)
10362 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10363 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10369 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10370 is a rotate of A by C1 bits. */
10371 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10372 is a rotate of A by B bits. */
10374 enum tree_code code0
, code1
;
10376 code0
= TREE_CODE (arg0
);
10377 code1
= TREE_CODE (arg1
);
10378 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10379 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10380 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10381 TREE_OPERAND (arg1
, 0), 0)
10382 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10383 TYPE_UNSIGNED (rtype
))
10384 /* Only create rotates in complete modes. Other cases are not
10385 expanded properly. */
10386 && (element_precision (rtype
)
10387 == element_precision (TYPE_MODE (rtype
))))
10389 tree tree01
, tree11
;
10390 enum tree_code code01
, code11
;
10392 tree01
= TREE_OPERAND (arg0
, 1);
10393 tree11
= TREE_OPERAND (arg1
, 1);
10394 STRIP_NOPS (tree01
);
10395 STRIP_NOPS (tree11
);
10396 code01
= TREE_CODE (tree01
);
10397 code11
= TREE_CODE (tree11
);
10398 if (code01
== INTEGER_CST
10399 && code11
== INTEGER_CST
10400 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
10401 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10403 tem
= build2_loc (loc
, LROTATE_EXPR
,
10404 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10405 TREE_OPERAND (arg0
, 0),
10406 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10407 return fold_convert_loc (loc
, type
, tem
);
10409 else if (code11
== MINUS_EXPR
)
10411 tree tree110
, tree111
;
10412 tree110
= TREE_OPERAND (tree11
, 0);
10413 tree111
= TREE_OPERAND (tree11
, 1);
10414 STRIP_NOPS (tree110
);
10415 STRIP_NOPS (tree111
);
10416 if (TREE_CODE (tree110
) == INTEGER_CST
10417 && 0 == compare_tree_int (tree110
,
10419 (TREE_TYPE (TREE_OPERAND
10421 && operand_equal_p (tree01
, tree111
, 0))
10423 fold_convert_loc (loc
, type
,
10424 build2 ((code0
== LSHIFT_EXPR
10427 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10428 TREE_OPERAND (arg0
, 0), tree01
));
10430 else if (code01
== MINUS_EXPR
)
10432 tree tree010
, tree011
;
10433 tree010
= TREE_OPERAND (tree01
, 0);
10434 tree011
= TREE_OPERAND (tree01
, 1);
10435 STRIP_NOPS (tree010
);
10436 STRIP_NOPS (tree011
);
10437 if (TREE_CODE (tree010
) == INTEGER_CST
10438 && 0 == compare_tree_int (tree010
,
10440 (TREE_TYPE (TREE_OPERAND
10442 && operand_equal_p (tree11
, tree011
, 0))
10443 return fold_convert_loc
10445 build2 ((code0
!= LSHIFT_EXPR
10448 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10449 TREE_OPERAND (arg0
, 0), tree11
));
10455 /* In most languages, can't associate operations on floats through
10456 parentheses. Rather than remember where the parentheses were, we
10457 don't associate floats at all, unless the user has specified
10458 -fassociative-math.
10459 And, we need to make sure type is not saturating. */
10461 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10462 && !TYPE_SATURATING (type
))
10464 tree var0
, con0
, lit0
, minus_lit0
;
10465 tree var1
, con1
, lit1
, minus_lit1
;
10469 /* Split both trees into variables, constants, and literals. Then
10470 associate each group together, the constants with literals,
10471 then the result with variables. This increases the chances of
10472 literals being recombined later and of generating relocatable
10473 expressions for the sum of a constant and literal. */
10474 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10475 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10476 code
== MINUS_EXPR
);
10478 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10479 if (code
== MINUS_EXPR
)
10482 /* With undefined overflow prefer doing association in a type
10483 which wraps on overflow, if that is one of the operand types. */
10484 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10485 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10487 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10488 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10489 atype
= TREE_TYPE (arg0
);
10490 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10491 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10492 atype
= TREE_TYPE (arg1
);
10493 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10496 /* With undefined overflow we can only associate constants with one
10497 variable, and constants whose association doesn't overflow. */
10498 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10499 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10506 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10507 tmp0
= TREE_OPERAND (tmp0
, 0);
10508 if (CONVERT_EXPR_P (tmp0
)
10509 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10510 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10511 <= TYPE_PRECISION (atype
)))
10512 tmp0
= TREE_OPERAND (tmp0
, 0);
10513 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10514 tmp1
= TREE_OPERAND (tmp1
, 0);
10515 if (CONVERT_EXPR_P (tmp1
)
10516 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10517 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10518 <= TYPE_PRECISION (atype
)))
10519 tmp1
= TREE_OPERAND (tmp1
, 0);
10520 /* The only case we can still associate with two variables
10521 is if they are the same, modulo negation and bit-pattern
10522 preserving conversions. */
10523 if (!operand_equal_p (tmp0
, tmp1
, 0))
10528 /* Only do something if we found more than two objects. Otherwise,
10529 nothing has changed and we risk infinite recursion. */
10531 && (2 < ((var0
!= 0) + (var1
!= 0)
10532 + (con0
!= 0) + (con1
!= 0)
10533 + (lit0
!= 0) + (lit1
!= 0)
10534 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10536 bool any_overflows
= false;
10537 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10538 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10539 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10540 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10541 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10542 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10543 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10544 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10547 /* Preserve the MINUS_EXPR if the negative part of the literal is
10548 greater than the positive part. Otherwise, the multiplicative
10549 folding code (i.e extract_muldiv) may be fooled in case
10550 unsigned constants are subtracted, like in the following
10551 example: ((X*2 + 4) - 8U)/2. */
10552 if (minus_lit0
&& lit0
)
10554 if (TREE_CODE (lit0
) == INTEGER_CST
10555 && TREE_CODE (minus_lit0
) == INTEGER_CST
10556 && tree_int_cst_lt (lit0
, minus_lit0
))
10558 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10559 MINUS_EXPR
, atype
);
10564 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10565 MINUS_EXPR
, atype
);
10570 /* Don't introduce overflows through reassociation. */
10572 && ((lit0
&& TREE_OVERFLOW (lit0
))
10573 || (minus_lit0
&& TREE_OVERFLOW (minus_lit0
))))
10580 fold_convert_loc (loc
, type
,
10581 associate_trees (loc
, var0
, minus_lit0
,
10582 MINUS_EXPR
, atype
));
10585 con0
= associate_trees (loc
, con0
, minus_lit0
,
10586 MINUS_EXPR
, atype
);
10588 fold_convert_loc (loc
, type
,
10589 associate_trees (loc
, var0
, con0
,
10590 PLUS_EXPR
, atype
));
10594 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10596 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10604 /* Pointer simplifications for subtraction, simple reassociations. */
10605 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10607 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10608 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10609 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10611 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10612 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10613 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10614 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10615 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10616 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10618 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10621 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10622 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10624 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10625 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10626 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10627 fold_convert_loc (loc
, type
, arg1
));
10629 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10631 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10633 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10635 tree arg10
= fold_convert_loc (loc
, type
,
10636 TREE_OPERAND (arg1
, 0));
10637 tree arg11
= fold_convert_loc (loc
, type
,
10638 TREE_OPERAND (arg1
, 1));
10639 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
10640 fold_convert_loc (loc
, type
, arg0
),
10643 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tmp
, arg11
);
10646 /* A - (-B) -> A + B */
10647 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10648 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10649 fold_convert_loc (loc
, type
,
10650 TREE_OPERAND (arg1
, 0)));
10652 /* Disable further optimizations involving UPC shared pointers,
10653 because integers are not interoperable with shared pointers.
10654 (The test below also detects pointer difference between
10655 shared pointers, which cannot be folded. */
10657 if (TREE_TYPE (arg0
) && POINTER_TYPE_P (TREE_TYPE (arg0
))
10658 && upc_shared_type_p (TREE_TYPE (TREE_TYPE (arg0
))))
10661 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10662 if (TREE_CODE (arg0
) == NEGATE_EXPR
10663 && negate_expr_p (arg1
)
10664 && reorder_operands_p (arg0
, arg1
))
10665 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10666 fold_convert_loc (loc
, type
,
10667 negate_expr (arg1
)),
10668 fold_convert_loc (loc
, type
,
10669 TREE_OPERAND (arg0
, 0)));
10670 /* Convert -A - 1 to ~A. */
10671 if (TREE_CODE (arg0
) == NEGATE_EXPR
10672 && integer_each_onep (arg1
)
10673 && !TYPE_OVERFLOW_TRAPS (type
))
10674 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10675 fold_convert_loc (loc
, type
,
10676 TREE_OPERAND (arg0
, 0)));
10678 /* Convert -1 - A to ~A. */
10679 if (TREE_CODE (type
) != COMPLEX_TYPE
10680 && integer_all_onesp (arg0
))
10681 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10684 /* X - (X / Y) * Y is X % Y. */
10685 if ((INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10686 && TREE_CODE (arg1
) == MULT_EXPR
10687 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10688 && operand_equal_p (arg0
,
10689 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10690 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10691 TREE_OPERAND (arg1
, 1), 0))
10693 fold_convert_loc (loc
, type
,
10694 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10695 arg0
, TREE_OPERAND (arg1
, 1)));
10697 if (! FLOAT_TYPE_P (type
))
10699 if (integer_zerop (arg0
))
10700 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10702 /* Fold A - (A & B) into ~B & A. */
10703 if (!TREE_SIDE_EFFECTS (arg0
)
10704 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10706 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10708 tree arg10
= fold_convert_loc (loc
, type
,
10709 TREE_OPERAND (arg1
, 0));
10710 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10711 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10713 fold_convert_loc (loc
, type
, arg0
));
10715 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10717 tree arg11
= fold_convert_loc (loc
,
10718 type
, TREE_OPERAND (arg1
, 1));
10719 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10720 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10722 fold_convert_loc (loc
, type
, arg0
));
10726 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10727 any power of 2 minus 1. */
10728 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10729 && TREE_CODE (arg1
) == BIT_AND_EXPR
10730 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10731 TREE_OPERAND (arg1
, 0), 0))
10733 tree mask0
= TREE_OPERAND (arg0
, 1);
10734 tree mask1
= TREE_OPERAND (arg1
, 1);
10735 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10737 if (operand_equal_p (tem
, mask1
, 0))
10739 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10740 TREE_OPERAND (arg0
, 0), mask1
);
10741 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10746 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10747 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10748 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10750 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10751 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10752 (-ARG1 + ARG0) reduces to -ARG1. */
10753 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10754 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10756 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10757 __complex__ ( x, -y ). This is not the same for SNaNs or if
10758 signed zeros are involved. */
10759 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10760 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10761 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10763 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10764 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10765 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10766 bool arg0rz
= false, arg0iz
= false;
10767 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10768 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10770 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10771 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10772 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10774 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10776 : build1 (REALPART_EXPR
, rtype
, arg1
));
10777 tree ip
= arg0i
? arg0i
10778 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10779 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10781 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10783 tree rp
= arg0r
? arg0r
10784 : build1 (REALPART_EXPR
, rtype
, arg0
);
10785 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10787 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10788 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10793 /* A - B -> A + (-B) if B is easily negatable. */
10794 if (negate_expr_p (arg1
)
10795 && ((FLOAT_TYPE_P (type
)
10796 /* Avoid this transformation if B is a positive REAL_CST. */
10797 && (TREE_CODE (arg1
) != REAL_CST
10798 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10799 || INTEGRAL_TYPE_P (type
)))
10800 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10801 fold_convert_loc (loc
, type
, arg0
),
10802 fold_convert_loc (loc
, type
,
10803 negate_expr (arg1
)));
10805 /* Try folding difference of addresses. */
10807 HOST_WIDE_INT diff
;
10809 if ((TREE_CODE (arg0
) == ADDR_EXPR
10810 || TREE_CODE (arg1
) == ADDR_EXPR
)
10811 && ptr_difference_const (arg0
, arg1
, &diff
))
10812 return build_int_cst_type (type
, diff
);
10815 /* Fold &a[i] - &a[j] to i-j. */
10816 if (TREE_CODE (arg0
) == ADDR_EXPR
10817 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10818 && TREE_CODE (arg1
) == ADDR_EXPR
10819 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10821 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10822 TREE_OPERAND (arg0
, 0),
10823 TREE_OPERAND (arg1
, 0));
10828 if (FLOAT_TYPE_P (type
)
10829 && flag_unsafe_math_optimizations
10830 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10831 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10832 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10835 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10836 one. Make sure the type is not saturating and has the signedness of
10837 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10838 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10839 if ((TREE_CODE (arg0
) == MULT_EXPR
10840 || TREE_CODE (arg1
) == MULT_EXPR
)
10841 && !TYPE_SATURATING (type
)
10842 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10843 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10844 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10846 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10854 /* (-A) * (-B) -> A * B */
10855 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10856 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10857 fold_convert_loc (loc
, type
,
10858 TREE_OPERAND (arg0
, 0)),
10859 fold_convert_loc (loc
, type
,
10860 negate_expr (arg1
)));
10861 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10862 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10863 fold_convert_loc (loc
, type
,
10864 negate_expr (arg0
)),
10865 fold_convert_loc (loc
, type
,
10866 TREE_OPERAND (arg1
, 0)));
10868 if (! FLOAT_TYPE_P (type
))
10870 /* Transform x * -1 into -x. Make sure to do the negation
10871 on the original operand with conversions not stripped
10872 because we can only strip non-sign-changing conversions. */
10873 if (integer_minus_onep (arg1
))
10874 return fold_convert_loc (loc
, type
, negate_expr (op0
));
10875 /* Transform x * -C into -x * C if x is easily negatable. */
10876 if (TREE_CODE (arg1
) == INTEGER_CST
10877 && tree_int_cst_sgn (arg1
) == -1
10878 && negate_expr_p (arg0
)
10879 && (tem
= negate_expr (arg1
)) != arg1
10880 && !TREE_OVERFLOW (tem
))
10881 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10882 fold_convert_loc (loc
, type
,
10883 negate_expr (arg0
)),
10886 /* (a * (1 << b)) is (a << b) */
10887 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10888 && integer_onep (TREE_OPERAND (arg1
, 0)))
10889 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10890 TREE_OPERAND (arg1
, 1));
10891 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10892 && integer_onep (TREE_OPERAND (arg0
, 0)))
10893 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10894 TREE_OPERAND (arg0
, 1));
10896 /* (A + A) * C -> A * 2 * C */
10897 if (TREE_CODE (arg0
) == PLUS_EXPR
10898 && TREE_CODE (arg1
) == INTEGER_CST
10899 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10900 TREE_OPERAND (arg0
, 1), 0))
10901 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10902 omit_one_operand_loc (loc
, type
,
10903 TREE_OPERAND (arg0
, 0),
10904 TREE_OPERAND (arg0
, 1)),
10905 fold_build2_loc (loc
, MULT_EXPR
, type
,
10906 build_int_cst (type
, 2) , arg1
));
10908 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10909 sign-changing only. */
10910 if (TREE_CODE (arg1
) == INTEGER_CST
10911 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
10912 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
10913 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10915 strict_overflow_p
= false;
10916 if (TREE_CODE (arg1
) == INTEGER_CST
10917 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10918 &strict_overflow_p
)))
10920 if (strict_overflow_p
)
10921 fold_overflow_warning (("assuming signed overflow does not "
10922 "occur when simplifying "
10924 WARN_STRICT_OVERFLOW_MISC
);
10925 return fold_convert_loc (loc
, type
, tem
);
10928 /* Optimize z * conj(z) for integer complex numbers. */
10929 if (TREE_CODE (arg0
) == CONJ_EXPR
10930 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10931 return fold_mult_zconjz (loc
, type
, arg1
);
10932 if (TREE_CODE (arg1
) == CONJ_EXPR
10933 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10934 return fold_mult_zconjz (loc
, type
, arg0
);
10938 /* Maybe fold x * 0 to 0. The expressions aren't the same
10939 when x is NaN, since x * 0 is also NaN. Nor are they the
10940 same in modes with signed zeros, since multiplying a
10941 negative value by 0 gives -0, not +0. */
10942 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10943 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10944 && real_zerop (arg1
))
10945 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10946 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10947 Likewise for complex arithmetic with signed zeros. */
10948 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10949 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10950 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10951 && real_onep (arg1
))
10952 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10954 /* Transform x * -1.0 into -x. */
10955 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10956 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10957 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10958 && real_minus_onep (arg1
))
10959 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
10961 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10962 the result for floating point types due to rounding so it is applied
10963 only if -fassociative-math was specify. */
10964 if (flag_associative_math
10965 && TREE_CODE (arg0
) == RDIV_EXPR
10966 && TREE_CODE (arg1
) == REAL_CST
10967 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10969 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10972 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10973 TREE_OPERAND (arg0
, 1));
10976 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10977 if (operand_equal_p (arg0
, arg1
, 0))
10979 tree tem
= fold_strip_sign_ops (arg0
);
10980 if (tem
!= NULL_TREE
)
10982 tem
= fold_convert_loc (loc
, type
, tem
);
10983 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10987 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10988 This is not the same for NaNs or if signed zeros are
10990 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10991 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10992 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10993 && TREE_CODE (arg1
) == COMPLEX_CST
10994 && real_zerop (TREE_REALPART (arg1
)))
10996 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10997 if (real_onep (TREE_IMAGPART (arg1
)))
10999 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11000 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
11002 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
11003 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
11005 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11006 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
11007 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
11011 /* Optimize z * conj(z) for floating point complex numbers.
11012 Guarded by flag_unsafe_math_optimizations as non-finite
11013 imaginary components don't produce scalar results. */
11014 if (flag_unsafe_math_optimizations
11015 && TREE_CODE (arg0
) == CONJ_EXPR
11016 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11017 return fold_mult_zconjz (loc
, type
, arg1
);
11018 if (flag_unsafe_math_optimizations
11019 && TREE_CODE (arg1
) == CONJ_EXPR
11020 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11021 return fold_mult_zconjz (loc
, type
, arg0
);
11023 if (flag_unsafe_math_optimizations
)
11025 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11026 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11028 /* Optimizations of root(...)*root(...). */
11029 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
11032 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11033 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11035 /* Optimize sqrt(x)*sqrt(x) as x. */
11036 if (BUILTIN_SQRT_P (fcode0
)
11037 && operand_equal_p (arg00
, arg10
, 0)
11038 && ! HONOR_SNANS (TYPE_MODE (type
)))
11041 /* Optimize root(x)*root(y) as root(x*y). */
11042 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11043 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
11044 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
11047 /* Optimize expN(x)*expN(y) as expN(x+y). */
11048 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
11050 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11051 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11052 CALL_EXPR_ARG (arg0
, 0),
11053 CALL_EXPR_ARG (arg1
, 0));
11054 return build_call_expr_loc (loc
, expfn
, 1, arg
);
11057 /* Optimizations of pow(...)*pow(...). */
11058 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
11059 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
11060 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
11062 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11063 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11064 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11065 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11067 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11068 if (operand_equal_p (arg01
, arg11
, 0))
11070 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11071 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
11073 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
11076 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11077 if (operand_equal_p (arg00
, arg10
, 0))
11079 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11080 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11082 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
11086 /* Optimize tan(x)*cos(x) as sin(x). */
11087 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
11088 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
11089 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
11090 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
11091 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
11092 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
11093 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11094 CALL_EXPR_ARG (arg1
, 0), 0))
11096 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
11098 if (sinfn
!= NULL_TREE
)
11099 return build_call_expr_loc (loc
, sinfn
, 1,
11100 CALL_EXPR_ARG (arg0
, 0));
11103 /* Optimize x*pow(x,c) as pow(x,c+1). */
11104 if (fcode1
== BUILT_IN_POW
11105 || fcode1
== BUILT_IN_POWF
11106 || fcode1
== BUILT_IN_POWL
)
11108 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11109 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11110 if (TREE_CODE (arg11
) == REAL_CST
11111 && !TREE_OVERFLOW (arg11
)
11112 && operand_equal_p (arg0
, arg10
, 0))
11114 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11118 c
= TREE_REAL_CST (arg11
);
11119 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11120 arg
= build_real (type
, c
);
11121 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11125 /* Optimize pow(x,c)*x as pow(x,c+1). */
11126 if (fcode0
== BUILT_IN_POW
11127 || fcode0
== BUILT_IN_POWF
11128 || fcode0
== BUILT_IN_POWL
)
11130 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11131 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11132 if (TREE_CODE (arg01
) == REAL_CST
11133 && !TREE_OVERFLOW (arg01
)
11134 && operand_equal_p (arg1
, arg00
, 0))
11136 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11140 c
= TREE_REAL_CST (arg01
);
11141 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11142 arg
= build_real (type
, c
);
11143 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11147 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11148 if (!in_gimple_form
11150 && operand_equal_p (arg0
, arg1
, 0))
11152 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
11156 tree arg
= build_real (type
, dconst2
);
11157 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11166 if (operand_equal_p (arg0
, arg1
, 0))
11167 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11169 /* ~X | X is -1. */
11170 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11171 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11173 t1
= build_zero_cst (type
);
11174 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11175 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11178 /* X | ~X is -1. */
11179 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11180 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11182 t1
= build_zero_cst (type
);
11183 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11184 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11187 /* Canonicalize (X & C1) | C2. */
11188 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11189 && TREE_CODE (arg1
) == INTEGER_CST
11190 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11192 int width
= TYPE_PRECISION (type
), w
;
11193 wide_int c1
= TREE_OPERAND (arg0
, 1);
11194 wide_int c2
= arg1
;
11196 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11197 if ((c1
& c2
) == c1
)
11198 return omit_one_operand_loc (loc
, type
, arg1
,
11199 TREE_OPERAND (arg0
, 0));
11201 wide_int msk
= wi::mask (width
, false,
11202 TYPE_PRECISION (TREE_TYPE (arg1
)));
11204 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11205 if (msk
.and_not (c1
| c2
) == 0)
11206 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11207 TREE_OPERAND (arg0
, 0), arg1
);
11209 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11210 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11211 mode which allows further optimizations. */
11214 wide_int c3
= c1
.and_not (c2
);
11215 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
11217 wide_int mask
= wi::mask (w
, false,
11218 TYPE_PRECISION (type
));
11219 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
11227 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11228 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11229 TREE_OPERAND (arg0
, 0),
11230 wide_int_to_tree (type
,
11235 /* (X & Y) | Y is (X, Y). */
11236 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11237 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11238 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11239 /* (X & Y) | X is (Y, X). */
11240 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11241 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11242 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11243 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11244 /* X | (X & Y) is (Y, X). */
11245 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11246 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11247 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11248 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11249 /* X | (Y & X) is (Y, X). */
11250 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11251 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11252 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11253 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11255 /* (X & ~Y) | (~X & Y) is X ^ Y */
11256 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11257 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11259 tree a0
, a1
, l0
, l1
, n0
, n1
;
11261 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11262 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11264 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11265 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11267 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11268 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11270 if ((operand_equal_p (n0
, a0
, 0)
11271 && operand_equal_p (n1
, a1
, 0))
11272 || (operand_equal_p (n0
, a1
, 0)
11273 && operand_equal_p (n1
, a0
, 0)))
11274 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11277 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11278 if (t1
!= NULL_TREE
)
11281 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11283 This results in more efficient code for machines without a NAND
11284 instruction. Combine will canonicalize to the first form
11285 which will allow use of NAND instructions provided by the
11286 backend if they exist. */
11287 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11288 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11291 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11292 build2 (BIT_AND_EXPR
, type
,
11293 fold_convert_loc (loc
, type
,
11294 TREE_OPERAND (arg0
, 0)),
11295 fold_convert_loc (loc
, type
,
11296 TREE_OPERAND (arg1
, 0))));
11299 /* See if this can be simplified into a rotate first. If that
11300 is unsuccessful continue in the association code. */
11304 if (integer_all_onesp (arg1
))
11305 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11307 /* ~X ^ X is -1. */
11308 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11309 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11311 t1
= build_zero_cst (type
);
11312 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11313 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11316 /* X ^ ~X is -1. */
11317 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11318 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11320 t1
= build_zero_cst (type
);
11321 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11322 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11325 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11326 with a constant, and the two constants have no bits in common,
11327 we should treat this as a BIT_IOR_EXPR since this may produce more
11328 simplifications. */
11329 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11330 && TREE_CODE (arg1
) == BIT_AND_EXPR
11331 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11332 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11333 && wi::bit_and (TREE_OPERAND (arg0
, 1),
11334 TREE_OPERAND (arg1
, 1)) == 0)
11336 code
= BIT_IOR_EXPR
;
11340 /* (X | Y) ^ X -> Y & ~ X*/
11341 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11342 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11344 tree t2
= TREE_OPERAND (arg0
, 1);
11345 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11347 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11348 fold_convert_loc (loc
, type
, t2
),
11349 fold_convert_loc (loc
, type
, t1
));
11353 /* (Y | X) ^ X -> Y & ~ X*/
11354 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11355 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11357 tree t2
= TREE_OPERAND (arg0
, 0);
11358 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11360 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11361 fold_convert_loc (loc
, type
, t2
),
11362 fold_convert_loc (loc
, type
, t1
));
11366 /* X ^ (X | Y) -> Y & ~ X*/
11367 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11368 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11370 tree t2
= TREE_OPERAND (arg1
, 1);
11371 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11373 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11374 fold_convert_loc (loc
, type
, t2
),
11375 fold_convert_loc (loc
, type
, t1
));
11379 /* X ^ (Y | X) -> Y & ~ X*/
11380 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11381 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11383 tree t2
= TREE_OPERAND (arg1
, 0);
11384 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11386 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11387 fold_convert_loc (loc
, type
, t2
),
11388 fold_convert_loc (loc
, type
, t1
));
11392 /* Convert ~X ^ ~Y to X ^ Y. */
11393 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11394 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11395 return fold_build2_loc (loc
, code
, type
,
11396 fold_convert_loc (loc
, type
,
11397 TREE_OPERAND (arg0
, 0)),
11398 fold_convert_loc (loc
, type
,
11399 TREE_OPERAND (arg1
, 0)));
11401 /* Convert ~X ^ C to X ^ ~C. */
11402 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11403 && TREE_CODE (arg1
) == INTEGER_CST
)
11404 return fold_build2_loc (loc
, code
, type
,
11405 fold_convert_loc (loc
, type
,
11406 TREE_OPERAND (arg0
, 0)),
11407 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11409 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11410 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11411 && INTEGRAL_TYPE_P (type
)
11412 && integer_onep (TREE_OPERAND (arg0
, 1))
11413 && integer_onep (arg1
))
11414 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11415 build_zero_cst (TREE_TYPE (arg0
)));
11417 /* Fold (X & Y) ^ Y as ~X & Y. */
11418 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11419 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11421 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11422 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11423 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11424 fold_convert_loc (loc
, type
, arg1
));
11426 /* Fold (X & Y) ^ X as ~Y & X. */
11427 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11428 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11429 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11431 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11432 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11433 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11434 fold_convert_loc (loc
, type
, arg1
));
11436 /* Fold X ^ (X & Y) as X & ~Y. */
11437 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11438 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11440 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11441 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11442 fold_convert_loc (loc
, type
, arg0
),
11443 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11445 /* Fold X ^ (Y & X) as ~Y & X. */
11446 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11447 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11448 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11450 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11451 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11452 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11453 fold_convert_loc (loc
, type
, arg0
));
11456 /* See if this can be simplified into a rotate first. If that
11457 is unsuccessful continue in the association code. */
11461 if (integer_all_onesp (arg1
))
11462 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11463 if (operand_equal_p (arg0
, arg1
, 0))
11464 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11466 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11467 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11468 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11469 || (TREE_CODE (arg0
) == EQ_EXPR
11470 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11471 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11472 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11474 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11475 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11476 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11477 || (TREE_CODE (arg1
) == EQ_EXPR
11478 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11479 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11480 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11482 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11483 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11484 && TREE_CODE (arg1
) == INTEGER_CST
11485 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11487 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11488 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11489 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11490 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11491 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11493 fold_convert_loc (loc
, type
,
11494 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11495 type
, tmp2
, tmp3
));
11498 /* (X | Y) & Y is (X, Y). */
11499 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11500 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11501 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11502 /* (X | Y) & X is (Y, X). */
11503 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11504 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11505 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11506 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11507 /* X & (X | Y) is (Y, X). */
11508 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11509 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11510 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11511 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11512 /* X & (Y | X) is (Y, X). */
11513 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11514 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11515 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11516 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11518 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11519 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11520 && INTEGRAL_TYPE_P (type
)
11521 && integer_onep (TREE_OPERAND (arg0
, 1))
11522 && integer_onep (arg1
))
11525 tem
= TREE_OPERAND (arg0
, 0);
11526 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11527 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11529 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11530 build_zero_cst (TREE_TYPE (tem
)));
11532 /* Fold ~X & 1 as (X & 1) == 0. */
11533 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11534 && INTEGRAL_TYPE_P (type
)
11535 && integer_onep (arg1
))
11538 tem
= TREE_OPERAND (arg0
, 0);
11539 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11540 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11542 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11543 build_zero_cst (TREE_TYPE (tem
)));
11545 /* Fold !X & 1 as X == 0. */
11546 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11547 && integer_onep (arg1
))
11549 tem
= TREE_OPERAND (arg0
, 0);
11550 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11551 build_zero_cst (TREE_TYPE (tem
)));
11554 /* Fold (X ^ Y) & Y as ~X & Y. */
11555 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11556 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11558 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11559 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11560 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11561 fold_convert_loc (loc
, type
, arg1
));
11563 /* Fold (X ^ Y) & X as ~Y & X. */
11564 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11565 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11566 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11568 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11569 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11570 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11571 fold_convert_loc (loc
, type
, arg1
));
11573 /* Fold X & (X ^ Y) as X & ~Y. */
11574 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11575 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11577 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11578 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11579 fold_convert_loc (loc
, type
, arg0
),
11580 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11582 /* Fold X & (Y ^ X) as ~Y & X. */
11583 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11584 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11585 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11587 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11588 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11589 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11590 fold_convert_loc (loc
, type
, arg0
));
11593 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11594 multiple of 1 << CST. */
11595 if (TREE_CODE (arg1
) == INTEGER_CST
)
11597 wide_int cst1
= arg1
;
11598 wide_int ncst1
= -cst1
;
11599 if ((cst1
& ncst1
) == ncst1
11600 && multiple_of_p (type
, arg0
,
11601 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11602 return fold_convert_loc (loc
, type
, arg0
);
11605 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11607 if (TREE_CODE (arg1
) == INTEGER_CST
11608 && TREE_CODE (arg0
) == MULT_EXPR
11609 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11611 wide_int warg1
= arg1
;
11612 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
11615 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11617 else if (masked
!= warg1
)
11619 /* Avoid the transform if arg1 is a mask of some
11620 mode which allows further optimizations. */
11621 int pop
= wi::popcount (warg1
);
11622 if (!(pop
>= BITS_PER_UNIT
11623 && exact_log2 (pop
) != -1
11624 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
11625 return fold_build2_loc (loc
, code
, type
, op0
,
11626 wide_int_to_tree (type
, masked
));
11630 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11631 ((A & N) + B) & M -> (A + B) & M
11632 Similarly if (N & M) == 0,
11633 ((A | N) + B) & M -> (A + B) & M
11634 and for - instead of + (or unary - instead of +)
11635 and/or ^ instead of |.
11636 If B is constant and (B & M) == 0, fold into A & M. */
11637 if (TREE_CODE (arg1
) == INTEGER_CST
)
11639 wide_int cst1
= arg1
;
11640 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
11641 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11642 && (TREE_CODE (arg0
) == PLUS_EXPR
11643 || TREE_CODE (arg0
) == MINUS_EXPR
11644 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11645 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11646 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11652 /* Now we know that arg0 is (C + D) or (C - D) or
11653 -C and arg1 (M) is == (1LL << cst) - 1.
11654 Store C into PMOP[0] and D into PMOP[1]. */
11655 pmop
[0] = TREE_OPERAND (arg0
, 0);
11657 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11659 pmop
[1] = TREE_OPERAND (arg0
, 1);
11663 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
11666 for (; which
>= 0; which
--)
11667 switch (TREE_CODE (pmop
[which
]))
11672 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11675 cst0
= TREE_OPERAND (pmop
[which
], 1);
11677 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11682 else if (cst0
!= 0)
11684 /* If C or D is of the form (A & N) where
11685 (N & M) == M, or of the form (A | N) or
11686 (A ^ N) where (N & M) == 0, replace it with A. */
11687 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11690 /* If C or D is a N where (N & M) == 0, it can be
11691 omitted (assumed 0). */
11692 if ((TREE_CODE (arg0
) == PLUS_EXPR
11693 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11694 && (cst1
& pmop
[which
]) == 0)
11695 pmop
[which
] = NULL
;
11701 /* Only build anything new if we optimized one or both arguments
11703 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11704 || (TREE_CODE (arg0
) != NEGATE_EXPR
11705 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11707 tree utype
= TREE_TYPE (arg0
);
11708 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11710 /* Perform the operations in a type that has defined
11711 overflow behavior. */
11712 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11713 if (pmop
[0] != NULL
)
11714 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11715 if (pmop
[1] != NULL
)
11716 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11719 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11720 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11721 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11723 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11724 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11726 else if (pmop
[0] != NULL
)
11728 else if (pmop
[1] != NULL
)
11731 return build_int_cst (type
, 0);
11733 else if (pmop
[0] == NULL
)
11734 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11736 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11738 /* TEM is now the new binary +, - or unary - replacement. */
11739 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11740 fold_convert_loc (loc
, utype
, arg1
));
11741 return fold_convert_loc (loc
, type
, tem
);
11746 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11747 if (t1
!= NULL_TREE
)
11749 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11750 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11751 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11753 prec
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11755 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
11758 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11761 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11763 This results in more efficient code for machines without a NOR
11764 instruction. Combine will canonicalize to the first form
11765 which will allow use of NOR instructions provided by the
11766 backend if they exist. */
11767 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11768 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11770 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11771 build2 (BIT_IOR_EXPR
, type
,
11772 fold_convert_loc (loc
, type
,
11773 TREE_OPERAND (arg0
, 0)),
11774 fold_convert_loc (loc
, type
,
11775 TREE_OPERAND (arg1
, 0))));
11778 /* If arg0 is derived from the address of an object or function, we may
11779 be able to fold this expression using the object or function's
11781 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && tree_fits_uhwi_p (arg1
))
11783 unsigned HOST_WIDE_INT modulus
, residue
;
11784 unsigned HOST_WIDE_INT low
= tree_to_uhwi (arg1
);
11786 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11787 integer_onep (arg1
));
11789 /* This works because modulus is a power of 2. If this weren't the
11790 case, we'd have to replace it by its greatest power-of-2
11791 divisor: modulus & -modulus. */
11793 return build_int_cst (type
, residue
& low
);
11796 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11797 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11798 if the new mask might be further optimized. */
11799 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11800 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11801 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11802 && TREE_CODE (arg1
) == INTEGER_CST
11803 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11804 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) > 0
11805 && (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
11806 < TYPE_PRECISION (TREE_TYPE (arg0
))))
11808 unsigned int shiftc
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
11809 unsigned HOST_WIDE_INT mask
= TREE_INT_CST_LOW (arg1
);
11810 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11811 tree shift_type
= TREE_TYPE (arg0
);
11813 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11814 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11815 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11816 && TYPE_PRECISION (TREE_TYPE (arg0
))
11817 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0
))))
11819 prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11820 tree arg00
= TREE_OPERAND (arg0
, 0);
11821 /* See if more bits can be proven as zero because of
11823 if (TREE_CODE (arg00
) == NOP_EXPR
11824 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11826 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11827 if (TYPE_PRECISION (inner_type
)
11828 == GET_MODE_PRECISION (TYPE_MODE (inner_type
))
11829 && TYPE_PRECISION (inner_type
) < prec
)
11831 prec
= TYPE_PRECISION (inner_type
);
11832 /* See if we can shorten the right shift. */
11834 shift_type
= inner_type
;
11835 /* Otherwise X >> C1 is all zeros, so we'll optimize
11836 it into (X, 0) later on by making sure zerobits
11840 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11843 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11844 zerobits
<<= prec
- shiftc
;
11846 /* For arithmetic shift if sign bit could be set, zerobits
11847 can contain actually sign bits, so no transformation is
11848 possible, unless MASK masks them all away. In that
11849 case the shift needs to be converted into logical shift. */
11850 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11851 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11853 if ((mask
& zerobits
) == 0)
11854 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11860 /* ((X << 16) & 0xff00) is (X, 0). */
11861 if ((mask
& zerobits
) == mask
)
11862 return omit_one_operand_loc (loc
, type
,
11863 build_int_cst (type
, 0), arg0
);
11865 newmask
= mask
| zerobits
;
11866 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11868 /* Only do the transformation if NEWMASK is some integer
11870 for (prec
= BITS_PER_UNIT
;
11871 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11872 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11874 if (prec
< HOST_BITS_PER_WIDE_INT
11875 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11879 if (shift_type
!= TREE_TYPE (arg0
))
11881 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11882 fold_convert_loc (loc
, shift_type
,
11883 TREE_OPERAND (arg0
, 0)),
11884 TREE_OPERAND (arg0
, 1));
11885 tem
= fold_convert_loc (loc
, type
, tem
);
11889 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11890 if (!tree_int_cst_equal (newmaskt
, arg1
))
11891 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11899 /* Don't touch a floating-point divide by zero unless the mode
11900 of the constant can represent infinity. */
11901 if (TREE_CODE (arg1
) == REAL_CST
11902 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11903 && real_zerop (arg1
))
11906 /* Optimize A / A to 1.0 if we don't care about
11907 NaNs or Infinities. Skip the transformation
11908 for non-real operands. */
11909 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11910 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11911 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11912 && operand_equal_p (arg0
, arg1
, 0))
11914 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11916 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11919 /* The complex version of the above A / A optimization. */
11920 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11921 && operand_equal_p (arg0
, arg1
, 0))
11923 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11924 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11925 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11927 tree r
= build_real (elem_type
, dconst1
);
11928 /* omit_two_operands will call fold_convert for us. */
11929 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11933 /* (-A) / (-B) -> A / B */
11934 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11935 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11936 TREE_OPERAND (arg0
, 0),
11937 negate_expr (arg1
));
11938 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11939 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11940 negate_expr (arg0
),
11941 TREE_OPERAND (arg1
, 0));
11943 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11944 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11945 && real_onep (arg1
))
11946 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11948 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11949 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11950 && real_minus_onep (arg1
))
11951 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
11952 negate_expr (arg0
)));
11954 /* If ARG1 is a constant, we can convert this to a multiply by the
11955 reciprocal. This does not have the same rounding properties,
11956 so only do this if -freciprocal-math. We can actually
11957 always safely do it if ARG1 is a power of two, but it's hard to
11958 tell if it is or not in a portable manner. */
11960 && (TREE_CODE (arg1
) == REAL_CST
11961 || (TREE_CODE (arg1
) == COMPLEX_CST
11962 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1
)))
11963 || (TREE_CODE (arg1
) == VECTOR_CST
11964 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1
)))))
11966 if (flag_reciprocal_math
11967 && 0 != (tem
= const_binop (code
, build_one_cst (type
), arg1
)))
11968 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
11969 /* Find the reciprocal if optimizing and the result is exact.
11970 TODO: Complex reciprocal not implemented. */
11971 if (TREE_CODE (arg1
) != COMPLEX_CST
)
11973 tree inverse
= exact_inverse (TREE_TYPE (arg0
), arg1
);
11976 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, inverse
);
11979 /* Convert A/B/C to A/(B*C). */
11980 if (flag_reciprocal_math
11981 && TREE_CODE (arg0
) == RDIV_EXPR
)
11982 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11983 fold_build2_loc (loc
, MULT_EXPR
, type
,
11984 TREE_OPERAND (arg0
, 1), arg1
));
11986 /* Convert A/(B/C) to (A/B)*C. */
11987 if (flag_reciprocal_math
11988 && TREE_CODE (arg1
) == RDIV_EXPR
)
11989 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11990 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11991 TREE_OPERAND (arg1
, 0)),
11992 TREE_OPERAND (arg1
, 1));
11994 /* Convert C1/(X*C2) into (C1/C2)/X. */
11995 if (flag_reciprocal_math
11996 && TREE_CODE (arg1
) == MULT_EXPR
11997 && TREE_CODE (arg0
) == REAL_CST
11998 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
12000 tree tem
= const_binop (RDIV_EXPR
, arg0
,
12001 TREE_OPERAND (arg1
, 1));
12003 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
12004 TREE_OPERAND (arg1
, 0));
12007 if (flag_unsafe_math_optimizations
)
12009 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
12010 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
12012 /* Optimize sin(x)/cos(x) as tan(x). */
12013 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
12014 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
12015 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
12016 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12017 CALL_EXPR_ARG (arg1
, 0), 0))
12019 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12021 if (tanfn
!= NULL_TREE
)
12022 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
12025 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12026 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
12027 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
12028 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
12029 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12030 CALL_EXPR_ARG (arg1
, 0), 0))
12032 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12034 if (tanfn
!= NULL_TREE
)
12036 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
12037 CALL_EXPR_ARG (arg0
, 0));
12038 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12039 build_real (type
, dconst1
), tmp
);
12043 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12044 NaNs or Infinities. */
12045 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
12046 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
12047 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
12049 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12050 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12052 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12053 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12054 && operand_equal_p (arg00
, arg01
, 0))
12056 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12058 if (cosfn
!= NULL_TREE
)
12059 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12063 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12064 NaNs or Infinities. */
12065 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
12066 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
12067 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
12069 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12070 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12072 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12073 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12074 && operand_equal_p (arg00
, arg01
, 0))
12076 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12078 if (cosfn
!= NULL_TREE
)
12080 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12081 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12082 build_real (type
, dconst1
),
12088 /* Optimize pow(x,c)/x as pow(x,c-1). */
12089 if (fcode0
== BUILT_IN_POW
12090 || fcode0
== BUILT_IN_POWF
12091 || fcode0
== BUILT_IN_POWL
)
12093 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12094 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
12095 if (TREE_CODE (arg01
) == REAL_CST
12096 && !TREE_OVERFLOW (arg01
)
12097 && operand_equal_p (arg1
, arg00
, 0))
12099 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
12103 c
= TREE_REAL_CST (arg01
);
12104 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
12105 arg
= build_real (type
, c
);
12106 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
12110 /* Optimize a/root(b/c) into a*root(c/b). */
12111 if (BUILTIN_ROOT_P (fcode1
))
12113 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
12115 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
12117 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12118 tree b
= TREE_OPERAND (rootarg
, 0);
12119 tree c
= TREE_OPERAND (rootarg
, 1);
12121 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
12123 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
12124 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
12128 /* Optimize x/expN(y) into x*expN(-y). */
12129 if (BUILTIN_EXPONENT_P (fcode1
))
12131 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12132 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
12133 arg1
= build_call_expr_loc (loc
,
12135 fold_convert_loc (loc
, type
, arg
));
12136 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12139 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12140 if (fcode1
== BUILT_IN_POW
12141 || fcode1
== BUILT_IN_POWF
12142 || fcode1
== BUILT_IN_POWL
)
12144 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12145 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
12146 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
12147 tree neg11
= fold_convert_loc (loc
, type
,
12148 negate_expr (arg11
));
12149 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
12150 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12155 case TRUNC_DIV_EXPR
:
12156 /* Optimize (X & (-A)) / A where A is a power of 2,
12158 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12159 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
12160 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
12162 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
12163 arg1
, TREE_OPERAND (arg0
, 1));
12164 if (sum
&& integer_zerop (sum
)) {
12165 tree pow2
= build_int_cst (integer_type_node
,
12166 wi::exact_log2 (arg1
));
12167 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12168 TREE_OPERAND (arg0
, 0), pow2
);
12174 case FLOOR_DIV_EXPR
:
12175 /* Simplify A / (B << N) where A and B are positive and B is
12176 a power of 2, to A >> (N + log2(B)). */
12177 strict_overflow_p
= false;
12178 if (TREE_CODE (arg1
) == LSHIFT_EXPR
12179 && (TYPE_UNSIGNED (type
)
12180 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12182 tree sval
= TREE_OPERAND (arg1
, 0);
12183 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
12185 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
12186 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
12187 wi::exact_log2 (sval
));
12189 if (strict_overflow_p
)
12190 fold_overflow_warning (("assuming signed overflow does not "
12191 "occur when simplifying A / (B << N)"),
12192 WARN_STRICT_OVERFLOW_MISC
);
12194 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
12196 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12197 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
12201 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12202 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12203 if (INTEGRAL_TYPE_P (type
)
12204 && TYPE_UNSIGNED (type
)
12205 && code
== FLOOR_DIV_EXPR
)
12206 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
12210 case ROUND_DIV_EXPR
:
12211 case CEIL_DIV_EXPR
:
12212 case EXACT_DIV_EXPR
:
12213 if (integer_zerop (arg1
))
12215 /* X / -1 is -X. */
12216 if (!TYPE_UNSIGNED (type
)
12217 && TREE_CODE (arg1
) == INTEGER_CST
12218 && wi::eq_p (arg1
, -1))
12219 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
12221 /* Convert -A / -B to A / B when the type is signed and overflow is
12223 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12224 && TREE_CODE (arg0
) == NEGATE_EXPR
12225 && negate_expr_p (arg1
))
12227 if (INTEGRAL_TYPE_P (type
))
12228 fold_overflow_warning (("assuming signed overflow does not occur "
12229 "when distributing negation across "
12231 WARN_STRICT_OVERFLOW_MISC
);
12232 return fold_build2_loc (loc
, code
, type
,
12233 fold_convert_loc (loc
, type
,
12234 TREE_OPERAND (arg0
, 0)),
12235 fold_convert_loc (loc
, type
,
12236 negate_expr (arg1
)));
12238 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12239 && TREE_CODE (arg1
) == NEGATE_EXPR
12240 && negate_expr_p (arg0
))
12242 if (INTEGRAL_TYPE_P (type
))
12243 fold_overflow_warning (("assuming signed overflow does not occur "
12244 "when distributing negation across "
12246 WARN_STRICT_OVERFLOW_MISC
);
12247 return fold_build2_loc (loc
, code
, type
,
12248 fold_convert_loc (loc
, type
,
12249 negate_expr (arg0
)),
12250 fold_convert_loc (loc
, type
,
12251 TREE_OPERAND (arg1
, 0)));
12254 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12255 operation, EXACT_DIV_EXPR.
12257 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12258 At one time others generated faster code, it's not clear if they do
12259 after the last round to changes to the DIV code in expmed.c. */
12260 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
12261 && multiple_of_p (type
, arg0
, arg1
))
12262 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
12264 strict_overflow_p
= false;
12265 if (TREE_CODE (arg1
) == INTEGER_CST
12266 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12267 &strict_overflow_p
)))
12269 if (strict_overflow_p
)
12270 fold_overflow_warning (("assuming signed overflow does not occur "
12271 "when simplifying division"),
12272 WARN_STRICT_OVERFLOW_MISC
);
12273 return fold_convert_loc (loc
, type
, tem
);
12278 case CEIL_MOD_EXPR
:
12279 case FLOOR_MOD_EXPR
:
12280 case ROUND_MOD_EXPR
:
12281 case TRUNC_MOD_EXPR
:
12282 /* X % -1 is zero. */
12283 if (!TYPE_UNSIGNED (type
)
12284 && TREE_CODE (arg1
) == INTEGER_CST
12285 && wi::eq_p (arg1
, -1))
12286 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12288 /* X % -C is the same as X % C. */
12289 if (code
== TRUNC_MOD_EXPR
12290 && TYPE_SIGN (type
) == SIGNED
12291 && TREE_CODE (arg1
) == INTEGER_CST
12292 && !TREE_OVERFLOW (arg1
)
12293 && wi::neg_p (arg1
)
12294 && !TYPE_OVERFLOW_TRAPS (type
)
12295 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12296 && !sign_bit_p (arg1
, arg1
))
12297 return fold_build2_loc (loc
, code
, type
,
12298 fold_convert_loc (loc
, type
, arg0
),
12299 fold_convert_loc (loc
, type
,
12300 negate_expr (arg1
)));
12302 /* X % -Y is the same as X % Y. */
12303 if (code
== TRUNC_MOD_EXPR
12304 && !TYPE_UNSIGNED (type
)
12305 && TREE_CODE (arg1
) == NEGATE_EXPR
12306 && !TYPE_OVERFLOW_TRAPS (type
))
12307 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12308 fold_convert_loc (loc
, type
,
12309 TREE_OPERAND (arg1
, 0)));
12311 strict_overflow_p
= false;
12312 if (TREE_CODE (arg1
) == INTEGER_CST
12313 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12314 &strict_overflow_p
)))
12316 if (strict_overflow_p
)
12317 fold_overflow_warning (("assuming signed overflow does not occur "
12318 "when simplifying modulus"),
12319 WARN_STRICT_OVERFLOW_MISC
);
12320 return fold_convert_loc (loc
, type
, tem
);
12323 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12324 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12325 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12326 && (TYPE_UNSIGNED (type
)
12327 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12330 /* Also optimize A % (C << N) where C is a power of 2,
12331 to A & ((C << N) - 1). */
12332 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12333 c
= TREE_OPERAND (arg1
, 0);
12335 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12338 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12339 build_int_cst (TREE_TYPE (arg1
), 1));
12340 if (strict_overflow_p
)
12341 fold_overflow_warning (("assuming signed overflow does not "
12342 "occur when simplifying "
12343 "X % (power of two)"),
12344 WARN_STRICT_OVERFLOW_MISC
);
12345 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12346 fold_convert_loc (loc
, type
, arg0
),
12347 fold_convert_loc (loc
, type
, mask
));
12355 if (integer_all_onesp (arg0
))
12356 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12360 /* Optimize -1 >> x for arithmetic right shifts. */
12361 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12362 && tree_expr_nonnegative_p (arg1
))
12363 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12364 /* ... fall through ... */
12368 if (integer_zerop (arg1
))
12369 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12370 if (integer_zerop (arg0
))
12371 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12373 /* Prefer vector1 << scalar to vector1 << vector2
12374 if vector2 is uniform. */
12375 if (VECTOR_TYPE_P (TREE_TYPE (arg1
))
12376 && (tem
= uniform_vector_p (arg1
)) != NULL_TREE
)
12377 return fold_build2_loc (loc
, code
, type
, op0
, tem
);
12379 /* Since negative shift count is not well-defined,
12380 don't try to compute it in the compiler. */
12381 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12384 prec
= element_precision (type
);
12386 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12387 if (TREE_CODE (op0
) == code
&& tree_fits_uhwi_p (arg1
)
12388 && tree_to_uhwi (arg1
) < prec
12389 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12390 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
12392 unsigned int low
= (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
12393 + tree_to_uhwi (arg1
));
12395 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12396 being well defined. */
12399 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12401 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12402 return omit_one_operand_loc (loc
, type
, build_zero_cst (type
),
12403 TREE_OPERAND (arg0
, 0));
12408 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12409 build_int_cst (TREE_TYPE (arg1
), low
));
12412 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12413 into x & ((unsigned)-1 >> c) for unsigned types. */
12414 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12415 || (TYPE_UNSIGNED (type
)
12416 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12417 && tree_fits_uhwi_p (arg1
)
12418 && tree_to_uhwi (arg1
) < prec
12419 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12420 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
12422 HOST_WIDE_INT low0
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
12423 HOST_WIDE_INT low1
= tree_to_uhwi (arg1
);
12429 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12431 lshift
= build_minus_one_cst (type
);
12432 lshift
= const_binop (code
, lshift
, arg1
);
12434 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12438 /* Rewrite an LROTATE_EXPR by a constant into an
12439 RROTATE_EXPR by a new constant. */
12440 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12442 tree tem
= build_int_cst (TREE_TYPE (arg1
), prec
);
12443 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12444 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12447 /* If we have a rotate of a bit operation with the rotate count and
12448 the second operand of the bit operation both constant,
12449 permute the two operations. */
12450 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12451 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12452 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12453 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12454 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12455 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12456 fold_build2_loc (loc
, code
, type
,
12457 TREE_OPERAND (arg0
, 0), arg1
),
12458 fold_build2_loc (loc
, code
, type
,
12459 TREE_OPERAND (arg0
, 1), arg1
));
12461 /* Two consecutive rotates adding up to the some integer
12462 multiple of the precision of the type can be ignored. */
12463 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12464 && TREE_CODE (arg0
) == RROTATE_EXPR
12465 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12466 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
12468 return TREE_OPERAND (arg0
, 0);
12470 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12471 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12472 if the latter can be further optimized. */
12473 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12474 && TREE_CODE (arg0
) == BIT_AND_EXPR
12475 && TREE_CODE (arg1
) == INTEGER_CST
12476 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12478 tree mask
= fold_build2_loc (loc
, code
, type
,
12479 fold_convert_loc (loc
, type
,
12480 TREE_OPERAND (arg0
, 1)),
12482 tree shift
= fold_build2_loc (loc
, code
, type
,
12483 fold_convert_loc (loc
, type
,
12484 TREE_OPERAND (arg0
, 0)),
12486 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12494 if (operand_equal_p (arg0
, arg1
, 0))
12495 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12496 if (INTEGRAL_TYPE_P (type
)
12497 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12498 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12499 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12505 if (operand_equal_p (arg0
, arg1
, 0))
12506 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12507 if (INTEGRAL_TYPE_P (type
)
12508 && TYPE_MAX_VALUE (type
)
12509 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12510 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12511 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12516 case TRUTH_ANDIF_EXPR
:
12517 /* Note that the operands of this must be ints
12518 and their values must be 0 or 1.
12519 ("true" is a fixed value perhaps depending on the language.) */
12520 /* If first arg is constant zero, return it. */
12521 if (integer_zerop (arg0
))
12522 return fold_convert_loc (loc
, type
, arg0
);
12523 case TRUTH_AND_EXPR
:
12524 /* If either arg is constant true, drop it. */
12525 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12526 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12527 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12528 /* Preserve sequence points. */
12529 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12530 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12531 /* If second arg is constant zero, result is zero, but first arg
12532 must be evaluated. */
12533 if (integer_zerop (arg1
))
12534 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12535 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12536 case will be handled here. */
12537 if (integer_zerop (arg0
))
12538 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12540 /* !X && X is always false. */
12541 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12542 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12543 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12544 /* X && !X is always false. */
12545 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12546 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12547 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12549 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12550 means A >= Y && A != MAX, but in this case we know that
12553 if (!TREE_SIDE_EFFECTS (arg0
)
12554 && !TREE_SIDE_EFFECTS (arg1
))
12556 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12557 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12558 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12560 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12561 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12562 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12565 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12571 case TRUTH_ORIF_EXPR
:
12572 /* Note that the operands of this must be ints
12573 and their values must be 0 or true.
12574 ("true" is a fixed value perhaps depending on the language.) */
12575 /* If first arg is constant true, return it. */
12576 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12577 return fold_convert_loc (loc
, type
, arg0
);
12578 case TRUTH_OR_EXPR
:
12579 /* If either arg is constant zero, drop it. */
12580 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12581 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12582 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12583 /* Preserve sequence points. */
12584 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12585 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12586 /* If second arg is constant true, result is true, but we must
12587 evaluate first arg. */
12588 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12589 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12590 /* Likewise for first arg, but note this only occurs here for
12592 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12593 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12595 /* !X || X is always true. */
12596 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12597 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12598 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12599 /* X || !X is always true. */
12600 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12601 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12602 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12604 /* (X && !Y) || (!X && Y) is X ^ Y */
12605 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12606 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12608 tree a0
, a1
, l0
, l1
, n0
, n1
;
12610 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12611 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12613 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12614 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12616 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12617 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12619 if ((operand_equal_p (n0
, a0
, 0)
12620 && operand_equal_p (n1
, a1
, 0))
12621 || (operand_equal_p (n0
, a1
, 0)
12622 && operand_equal_p (n1
, a0
, 0)))
12623 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12626 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12632 case TRUTH_XOR_EXPR
:
12633 /* If the second arg is constant zero, drop it. */
12634 if (integer_zerop (arg1
))
12635 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12636 /* If the second arg is constant true, this is a logical inversion. */
12637 if (integer_onep (arg1
))
12639 tem
= invert_truthvalue_loc (loc
, arg0
);
12640 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12642 /* Identical arguments cancel to zero. */
12643 if (operand_equal_p (arg0
, arg1
, 0))
12644 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12646 /* !X ^ X is always true. */
12647 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12648 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12649 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12651 /* X ^ !X is always true. */
12652 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12653 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12654 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12663 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12664 if (tem
!= NULL_TREE
)
12667 /* bool_var != 0 becomes bool_var. */
12668 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12669 && code
== NE_EXPR
)
12670 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12672 /* bool_var == 1 becomes bool_var. */
12673 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12674 && code
== EQ_EXPR
)
12675 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12677 /* bool_var != 1 becomes !bool_var. */
12678 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12679 && code
== NE_EXPR
)
12680 return fold_convert_loc (loc
, type
,
12681 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12682 TREE_TYPE (arg0
), arg0
));
12684 /* bool_var == 0 becomes !bool_var. */
12685 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12686 && code
== EQ_EXPR
)
12687 return fold_convert_loc (loc
, type
,
12688 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12689 TREE_TYPE (arg0
), arg0
));
12691 /* !exp != 0 becomes !exp */
12692 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12693 && code
== NE_EXPR
)
12694 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12696 /* If this is an equality comparison of the address of two non-weak,
12697 unaliased symbols neither of which are extern (since we do not
12698 have access to attributes for externs), then we know the result. */
12699 if (TREE_CODE (arg0
) == ADDR_EXPR
12700 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12701 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12702 && ! lookup_attribute ("alias",
12703 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12704 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12705 && TREE_CODE (arg1
) == ADDR_EXPR
12706 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12707 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12708 && ! lookup_attribute ("alias",
12709 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12710 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12712 /* We know that we're looking at the address of two
12713 non-weak, unaliased, static _DECL nodes.
12715 It is both wasteful and incorrect to call operand_equal_p
12716 to compare the two ADDR_EXPR nodes. It is wasteful in that
12717 all we need to do is test pointer equality for the arguments
12718 to the two ADDR_EXPR nodes. It is incorrect to use
12719 operand_equal_p as that function is NOT equivalent to a
12720 C equality test. It can in fact return false for two
12721 objects which would test as equal using the C equality
12723 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12724 return constant_boolean_node (equal
12725 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12729 /* Similarly for a NEGATE_EXPR. */
12730 if (TREE_CODE (arg0
) == NEGATE_EXPR
12731 && TREE_CODE (arg1
) == INTEGER_CST
12732 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12734 && TREE_CODE (tem
) == INTEGER_CST
12735 && !TREE_OVERFLOW (tem
))
12736 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12738 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12739 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12740 && TREE_CODE (arg1
) == INTEGER_CST
12741 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12742 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12743 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12744 fold_convert_loc (loc
,
12747 TREE_OPERAND (arg0
, 1)));
12749 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12750 if ((TREE_CODE (arg0
) == PLUS_EXPR
12751 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12752 || TREE_CODE (arg0
) == MINUS_EXPR
)
12753 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12756 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12757 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12759 tree val
= TREE_OPERAND (arg0
, 1);
12760 return omit_two_operands_loc (loc
, type
,
12761 fold_build2_loc (loc
, code
, type
,
12763 build_int_cst (TREE_TYPE (val
),
12765 TREE_OPERAND (arg0
, 0), arg1
);
12768 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12769 if (TREE_CODE (arg0
) == MINUS_EXPR
12770 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12771 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12774 && wi::extract_uhwi (TREE_OPERAND (arg0
, 0), 0, 1) == 1)
12776 return omit_two_operands_loc (loc
, type
,
12778 ? boolean_true_node
: boolean_false_node
,
12779 TREE_OPERAND (arg0
, 1), arg1
);
12782 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12783 if (TREE_CODE (arg0
) == ABS_EXPR
12784 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12785 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12787 /* If this is an EQ or NE comparison with zero and ARG0 is
12788 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12789 two operations, but the latter can be done in one less insn
12790 on machines that have only two-operand insns or on which a
12791 constant cannot be the first operand. */
12792 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12793 && integer_zerop (arg1
))
12795 tree arg00
= TREE_OPERAND (arg0
, 0);
12796 tree arg01
= TREE_OPERAND (arg0
, 1);
12797 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12798 && integer_onep (TREE_OPERAND (arg00
, 0)))
12800 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12801 arg01
, TREE_OPERAND (arg00
, 1));
12802 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12803 build_int_cst (TREE_TYPE (arg0
), 1));
12804 return fold_build2_loc (loc
, code
, type
,
12805 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12808 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12809 && integer_onep (TREE_OPERAND (arg01
, 0)))
12811 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12812 arg00
, TREE_OPERAND (arg01
, 1));
12813 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12814 build_int_cst (TREE_TYPE (arg0
), 1));
12815 return fold_build2_loc (loc
, code
, type
,
12816 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12821 /* If this is an NE or EQ comparison of zero against the result of a
12822 signed MOD operation whose second operand is a power of 2, make
12823 the MOD operation unsigned since it is simpler and equivalent. */
12824 if (integer_zerop (arg1
)
12825 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12826 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12827 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12828 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12829 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12830 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12832 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12833 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12834 fold_convert_loc (loc
, newtype
,
12835 TREE_OPERAND (arg0
, 0)),
12836 fold_convert_loc (loc
, newtype
,
12837 TREE_OPERAND (arg0
, 1)));
12839 return fold_build2_loc (loc
, code
, type
, newmod
,
12840 fold_convert_loc (loc
, newtype
, arg1
));
12843 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12844 C1 is a valid shift constant, and C2 is a power of two, i.e.
12846 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12847 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12848 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12850 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12851 && integer_zerop (arg1
))
12853 tree itype
= TREE_TYPE (arg0
);
12854 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12855 prec
= TYPE_PRECISION (itype
);
12857 /* Check for a valid shift count. */
12858 if (wi::ltu_p (arg001
, prec
))
12860 tree arg01
= TREE_OPERAND (arg0
, 1);
12861 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12862 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12863 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12864 can be rewritten as (X & (C2 << C1)) != 0. */
12865 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12867 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12868 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12869 return fold_build2_loc (loc
, code
, type
, tem
,
12870 fold_convert_loc (loc
, itype
, arg1
));
12872 /* Otherwise, for signed (arithmetic) shifts,
12873 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12874 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12875 else if (!TYPE_UNSIGNED (itype
))
12876 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12877 arg000
, build_int_cst (itype
, 0));
12878 /* Otherwise, of unsigned (logical) shifts,
12879 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12880 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12882 return omit_one_operand_loc (loc
, type
,
12883 code
== EQ_EXPR
? integer_one_node
12884 : integer_zero_node
,
12889 /* If we have (A & C) == C where C is a power of 2, convert this into
12890 (A & C) != 0. Similarly for NE_EXPR. */
12891 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12892 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12893 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12894 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12895 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12896 integer_zero_node
));
12898 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12899 bit, then fold the expression into A < 0 or A >= 0. */
12900 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12904 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12905 Similarly for NE_EXPR. */
12906 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12907 && TREE_CODE (arg1
) == INTEGER_CST
12908 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12910 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12911 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12912 TREE_OPERAND (arg0
, 1));
12914 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12915 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12917 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12918 if (integer_nonzerop (dandnotc
))
12919 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12922 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12923 Similarly for NE_EXPR. */
12924 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12925 && TREE_CODE (arg1
) == INTEGER_CST
12926 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12928 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12930 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12931 TREE_OPERAND (arg0
, 1),
12932 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12933 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12934 if (integer_nonzerop (candnotd
))
12935 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12938 /* If this is a comparison of a field, we may be able to simplify it. */
12939 if ((TREE_CODE (arg0
) == COMPONENT_REF
12940 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12941 /* Handle the constant case even without -O
12942 to make sure the warnings are given. */
12943 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12945 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12950 /* Optimize comparisons of strlen vs zero to a compare of the
12951 first character of the string vs zero. To wit,
12952 strlen(ptr) == 0 => *ptr == 0
12953 strlen(ptr) != 0 => *ptr != 0
12954 Other cases should reduce to one of these two (or a constant)
12955 due to the return value of strlen being unsigned. */
12956 if (TREE_CODE (arg0
) == CALL_EXPR
12957 && integer_zerop (arg1
))
12959 tree fndecl
= get_callee_fndecl (arg0
);
12962 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12963 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12964 && call_expr_nargs (arg0
) == 1
12965 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12967 tree iref
= build_fold_indirect_ref_loc (loc
,
12968 CALL_EXPR_ARG (arg0
, 0));
12969 return fold_build2_loc (loc
, code
, type
, iref
,
12970 build_int_cst (TREE_TYPE (iref
), 0));
12974 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12975 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12976 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12977 && integer_zerop (arg1
)
12978 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12980 tree arg00
= TREE_OPERAND (arg0
, 0);
12981 tree arg01
= TREE_OPERAND (arg0
, 1);
12982 tree itype
= TREE_TYPE (arg00
);
12983 if (wi::eq_p (arg01
, TYPE_PRECISION (itype
) - 1))
12985 if (TYPE_UNSIGNED (itype
))
12987 itype
= signed_type_for (itype
);
12988 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12990 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12991 type
, arg00
, build_zero_cst (itype
));
12995 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12996 if (integer_zerop (arg1
)
12997 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12998 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12999 TREE_OPERAND (arg0
, 1));
13001 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13002 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13003 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
13004 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13005 build_zero_cst (TREE_TYPE (arg0
)));
13006 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13007 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13008 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13009 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
13010 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
13011 build_zero_cst (TREE_TYPE (arg0
)));
13013 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13014 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13015 && TREE_CODE (arg1
) == INTEGER_CST
13016 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13017 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13018 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
13019 TREE_OPERAND (arg0
, 1), arg1
));
13021 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13022 (X & C) == 0 when C is a single bit. */
13023 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13024 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
13025 && integer_zerop (arg1
)
13026 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13028 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13029 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
13030 TREE_OPERAND (arg0
, 1));
13031 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
13033 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13037 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13038 constant C is a power of two, i.e. a single bit. */
13039 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13040 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13041 && integer_zerop (arg1
)
13042 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13043 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13044 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13046 tree arg00
= TREE_OPERAND (arg0
, 0);
13047 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13048 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
13051 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13052 when is C is a power of two, i.e. a single bit. */
13053 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13054 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
13055 && integer_zerop (arg1
)
13056 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13057 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13058 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13060 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13061 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
13062 arg000
, TREE_OPERAND (arg0
, 1));
13063 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13064 tem
, build_int_cst (TREE_TYPE (tem
), 0));
13067 if (integer_zerop (arg1
)
13068 && tree_expr_nonzero_p (arg0
))
13070 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
13071 return omit_one_operand_loc (loc
, type
, res
, arg0
);
13074 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13075 if (TREE_CODE (arg0
) == NEGATE_EXPR
13076 && TREE_CODE (arg1
) == NEGATE_EXPR
)
13077 return fold_build2_loc (loc
, code
, type
,
13078 TREE_OPERAND (arg0
, 0),
13079 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13080 TREE_OPERAND (arg1
, 0)));
13082 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13083 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13084 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
13086 tree arg00
= TREE_OPERAND (arg0
, 0);
13087 tree arg01
= TREE_OPERAND (arg0
, 1);
13088 tree arg10
= TREE_OPERAND (arg1
, 0);
13089 tree arg11
= TREE_OPERAND (arg1
, 1);
13090 tree itype
= TREE_TYPE (arg0
);
13092 if (operand_equal_p (arg01
, arg11
, 0))
13093 return fold_build2_loc (loc
, code
, type
,
13094 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13095 fold_build2_loc (loc
,
13096 BIT_XOR_EXPR
, itype
,
13099 build_zero_cst (itype
));
13101 if (operand_equal_p (arg01
, arg10
, 0))
13102 return fold_build2_loc (loc
, code
, type
,
13103 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13104 fold_build2_loc (loc
,
13105 BIT_XOR_EXPR
, itype
,
13108 build_zero_cst (itype
));
13110 if (operand_equal_p (arg00
, arg11
, 0))
13111 return fold_build2_loc (loc
, code
, type
,
13112 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13113 fold_build2_loc (loc
,
13114 BIT_XOR_EXPR
, itype
,
13117 build_zero_cst (itype
));
13119 if (operand_equal_p (arg00
, arg10
, 0))
13120 return fold_build2_loc (loc
, code
, type
,
13121 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13122 fold_build2_loc (loc
,
13123 BIT_XOR_EXPR
, itype
,
13126 build_zero_cst (itype
));
13129 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13130 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
13132 tree arg00
= TREE_OPERAND (arg0
, 0);
13133 tree arg01
= TREE_OPERAND (arg0
, 1);
13134 tree arg10
= TREE_OPERAND (arg1
, 0);
13135 tree arg11
= TREE_OPERAND (arg1
, 1);
13136 tree itype
= TREE_TYPE (arg0
);
13138 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13139 operand_equal_p guarantees no side-effects so we don't need
13140 to use omit_one_operand on Z. */
13141 if (operand_equal_p (arg01
, arg11
, 0))
13142 return fold_build2_loc (loc
, code
, type
, arg00
,
13143 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13145 if (operand_equal_p (arg01
, arg10
, 0))
13146 return fold_build2_loc (loc
, code
, type
, arg00
,
13147 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13149 if (operand_equal_p (arg00
, arg11
, 0))
13150 return fold_build2_loc (loc
, code
, type
, arg01
,
13151 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13153 if (operand_equal_p (arg00
, arg10
, 0))
13154 return fold_build2_loc (loc
, code
, type
, arg01
,
13155 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13158 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13159 if (TREE_CODE (arg01
) == INTEGER_CST
13160 && TREE_CODE (arg11
) == INTEGER_CST
)
13162 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
13163 fold_convert_loc (loc
, itype
, arg11
));
13164 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
13165 return fold_build2_loc (loc
, code
, type
, tem
,
13166 fold_convert_loc (loc
, itype
, arg10
));
13170 /* Attempt to simplify equality/inequality comparisons of complex
13171 values. Only lower the comparison if the result is known or
13172 can be simplified to a single scalar comparison. */
13173 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
13174 || TREE_CODE (arg0
) == COMPLEX_CST
)
13175 && (TREE_CODE (arg1
) == COMPLEX_EXPR
13176 || TREE_CODE (arg1
) == COMPLEX_CST
))
13178 tree real0
, imag0
, real1
, imag1
;
13181 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
13183 real0
= TREE_OPERAND (arg0
, 0);
13184 imag0
= TREE_OPERAND (arg0
, 1);
13188 real0
= TREE_REALPART (arg0
);
13189 imag0
= TREE_IMAGPART (arg0
);
13192 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
13194 real1
= TREE_OPERAND (arg1
, 0);
13195 imag1
= TREE_OPERAND (arg1
, 1);
13199 real1
= TREE_REALPART (arg1
);
13200 imag1
= TREE_IMAGPART (arg1
);
13203 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
13204 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
13206 if (integer_zerop (rcond
))
13208 if (code
== EQ_EXPR
)
13209 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13211 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
13215 if (code
== NE_EXPR
)
13216 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13218 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
13222 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
13223 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
13225 if (integer_zerop (icond
))
13227 if (code
== EQ_EXPR
)
13228 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13230 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
13234 if (code
== NE_EXPR
)
13235 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13237 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
13248 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
13249 if (tem
!= NULL_TREE
)
13252 /* Transform comparisons of the form X +- C CMP X. */
13253 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13254 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13255 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13256 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13257 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13258 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13260 tree arg01
= TREE_OPERAND (arg0
, 1);
13261 enum tree_code code0
= TREE_CODE (arg0
);
13264 if (TREE_CODE (arg01
) == REAL_CST
)
13265 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13267 is_positive
= tree_int_cst_sgn (arg01
);
13269 /* (X - c) > X becomes false. */
13270 if (code
== GT_EXPR
13271 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13272 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13274 if (TREE_CODE (arg01
) == INTEGER_CST
13275 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13276 fold_overflow_warning (("assuming signed overflow does not "
13277 "occur when assuming that (X - c) > X "
13278 "is always false"),
13279 WARN_STRICT_OVERFLOW_ALL
);
13280 return constant_boolean_node (0, type
);
13283 /* Likewise (X + c) < X becomes false. */
13284 if (code
== LT_EXPR
13285 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13286 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13288 if (TREE_CODE (arg01
) == INTEGER_CST
13289 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13290 fold_overflow_warning (("assuming signed overflow does not "
13291 "occur when assuming that "
13292 "(X + c) < X is always false"),
13293 WARN_STRICT_OVERFLOW_ALL
);
13294 return constant_boolean_node (0, type
);
13297 /* Convert (X - c) <= X to true. */
13298 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13300 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13301 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13303 if (TREE_CODE (arg01
) == INTEGER_CST
13304 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13305 fold_overflow_warning (("assuming signed overflow does not "
13306 "occur when assuming that "
13307 "(X - c) <= X is always true"),
13308 WARN_STRICT_OVERFLOW_ALL
);
13309 return constant_boolean_node (1, type
);
13312 /* Convert (X + c) >= X to true. */
13313 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13315 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13316 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13318 if (TREE_CODE (arg01
) == INTEGER_CST
13319 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13320 fold_overflow_warning (("assuming signed overflow does not "
13321 "occur when assuming that "
13322 "(X + c) >= X is always true"),
13323 WARN_STRICT_OVERFLOW_ALL
);
13324 return constant_boolean_node (1, type
);
13327 if (TREE_CODE (arg01
) == INTEGER_CST
)
13329 /* Convert X + c > X and X - c < X to true for integers. */
13330 if (code
== GT_EXPR
13331 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13332 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13334 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13335 fold_overflow_warning (("assuming signed overflow does "
13336 "not occur when assuming that "
13337 "(X + c) > X is always true"),
13338 WARN_STRICT_OVERFLOW_ALL
);
13339 return constant_boolean_node (1, type
);
13342 if (code
== LT_EXPR
13343 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13344 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13346 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13347 fold_overflow_warning (("assuming signed overflow does "
13348 "not occur when assuming that "
13349 "(X - c) < X is always true"),
13350 WARN_STRICT_OVERFLOW_ALL
);
13351 return constant_boolean_node (1, type
);
13354 /* Convert X + c <= X and X - c >= X to false for integers. */
13355 if (code
== LE_EXPR
13356 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13357 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13359 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13360 fold_overflow_warning (("assuming signed overflow does "
13361 "not occur when assuming that "
13362 "(X + c) <= X is always false"),
13363 WARN_STRICT_OVERFLOW_ALL
);
13364 return constant_boolean_node (0, type
);
13367 if (code
== GE_EXPR
13368 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13369 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13371 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13372 fold_overflow_warning (("assuming signed overflow does "
13373 "not occur when assuming that "
13374 "(X - c) >= X is always false"),
13375 WARN_STRICT_OVERFLOW_ALL
);
13376 return constant_boolean_node (0, type
);
13381 /* Comparisons with the highest or lowest possible integer of
13382 the specified precision will have known values. */
13384 tree arg1_type
= TREE_TYPE (arg1
);
13385 unsigned int prec
= TYPE_PRECISION (arg1_type
);
13387 if (TREE_CODE (arg1
) == INTEGER_CST
13388 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13390 wide_int max
= wi::max_value (arg1_type
);
13391 wide_int signed_max
= wi::max_value (prec
, SIGNED
);
13392 wide_int min
= wi::min_value (arg1_type
);
13394 if (wi::eq_p (arg1
, max
))
13398 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13401 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13404 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13407 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13409 /* The GE_EXPR and LT_EXPR cases above are not normally
13410 reached because of previous transformations. */
13415 else if (wi::eq_p (arg1
, max
- 1))
13419 arg1
= const_binop (PLUS_EXPR
, arg1
,
13420 build_int_cst (TREE_TYPE (arg1
), 1));
13421 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13422 fold_convert_loc (loc
,
13423 TREE_TYPE (arg1
), arg0
),
13426 arg1
= const_binop (PLUS_EXPR
, arg1
,
13427 build_int_cst (TREE_TYPE (arg1
), 1));
13428 return fold_build2_loc (loc
, NE_EXPR
, type
,
13429 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13435 else if (wi::eq_p (arg1
, min
))
13439 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13442 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13445 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13448 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13453 else if (wi::eq_p (arg1
, min
+ 1))
13457 arg1
= const_binop (MINUS_EXPR
, arg1
,
13458 build_int_cst (TREE_TYPE (arg1
), 1));
13459 return fold_build2_loc (loc
, NE_EXPR
, type
,
13460 fold_convert_loc (loc
,
13461 TREE_TYPE (arg1
), arg0
),
13464 arg1
= const_binop (MINUS_EXPR
, arg1
,
13465 build_int_cst (TREE_TYPE (arg1
), 1));
13466 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13467 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13474 else if (wi::eq_p (arg1
, signed_max
)
13475 && TYPE_UNSIGNED (arg1_type
)
13476 /* We will flip the signedness of the comparison operator
13477 associated with the mode of arg1, so the sign bit is
13478 specified by this mode. Check that arg1 is the signed
13479 max associated with this sign bit. */
13480 && prec
== GET_MODE_PRECISION (TYPE_MODE (arg1_type
))
13481 /* signed_type does not work on pointer types. */
13482 && INTEGRAL_TYPE_P (arg1_type
))
13484 /* The following case also applies to X < signed_max+1
13485 and X >= signed_max+1 because previous transformations. */
13486 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13488 tree st
= signed_type_for (arg1_type
);
13489 return fold_build2_loc (loc
,
13490 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13491 type
, fold_convert_loc (loc
, st
, arg0
),
13492 build_int_cst (st
, 0));
13498 /* If we are comparing an ABS_EXPR with a constant, we can
13499 convert all the cases into explicit comparisons, but they may
13500 well not be faster than doing the ABS and one comparison.
13501 But ABS (X) <= C is a range comparison, which becomes a subtraction
13502 and a comparison, and is probably faster. */
13503 if (code
== LE_EXPR
13504 && TREE_CODE (arg1
) == INTEGER_CST
13505 && TREE_CODE (arg0
) == ABS_EXPR
13506 && ! TREE_SIDE_EFFECTS (arg0
)
13507 && (0 != (tem
= negate_expr (arg1
)))
13508 && TREE_CODE (tem
) == INTEGER_CST
13509 && !TREE_OVERFLOW (tem
))
13510 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13511 build2 (GE_EXPR
, type
,
13512 TREE_OPERAND (arg0
, 0), tem
),
13513 build2 (LE_EXPR
, type
,
13514 TREE_OPERAND (arg0
, 0), arg1
));
13516 /* Convert ABS_EXPR<x> >= 0 to true. */
13517 strict_overflow_p
= false;
13518 if (code
== GE_EXPR
13519 && (integer_zerop (arg1
)
13520 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13521 && real_zerop (arg1
)))
13522 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13524 if (strict_overflow_p
)
13525 fold_overflow_warning (("assuming signed overflow does not occur "
13526 "when simplifying comparison of "
13527 "absolute value and zero"),
13528 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13529 return omit_one_operand_loc (loc
, type
,
13530 constant_boolean_node (true, type
),
13534 /* Convert ABS_EXPR<x> < 0 to false. */
13535 strict_overflow_p
= false;
13536 if (code
== LT_EXPR
13537 && (integer_zerop (arg1
) || real_zerop (arg1
))
13538 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13540 if (strict_overflow_p
)
13541 fold_overflow_warning (("assuming signed overflow does not occur "
13542 "when simplifying comparison of "
13543 "absolute value and zero"),
13544 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13545 return omit_one_operand_loc (loc
, type
,
13546 constant_boolean_node (false, type
),
13550 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13551 and similarly for >= into !=. */
13552 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13553 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13554 && TREE_CODE (arg1
) == LSHIFT_EXPR
13555 && integer_onep (TREE_OPERAND (arg1
, 0)))
13556 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13557 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13558 TREE_OPERAND (arg1
, 1)),
13559 build_zero_cst (TREE_TYPE (arg0
)));
13561 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13562 otherwise Y might be >= # of bits in X's type and thus e.g.
13563 (unsigned char) (1 << Y) for Y 15 might be 0.
13564 If the cast is widening, then 1 << Y should have unsigned type,
13565 otherwise if Y is number of bits in the signed shift type minus 1,
13566 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13567 31 might be 0xffffffff80000000. */
13568 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13569 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13570 && CONVERT_EXPR_P (arg1
)
13571 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13572 && (TYPE_PRECISION (TREE_TYPE (arg1
))
13573 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
13574 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
13575 || (TYPE_PRECISION (TREE_TYPE (arg1
))
13576 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
13577 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13579 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13580 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13581 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13582 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13583 build_zero_cst (TREE_TYPE (arg0
)));
13588 case UNORDERED_EXPR
:
13596 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13598 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13599 if (t1
!= NULL_TREE
)
13603 /* If the first operand is NaN, the result is constant. */
13604 if (TREE_CODE (arg0
) == REAL_CST
13605 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13606 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13608 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13609 ? integer_zero_node
13610 : integer_one_node
;
13611 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13614 /* If the second operand is NaN, the result is constant. */
13615 if (TREE_CODE (arg1
) == REAL_CST
13616 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13617 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13619 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13620 ? integer_zero_node
13621 : integer_one_node
;
13622 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13625 /* Simplify unordered comparison of something with itself. */
13626 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13627 && operand_equal_p (arg0
, arg1
, 0))
13628 return constant_boolean_node (1, type
);
13630 if (code
== LTGT_EXPR
13631 && !flag_trapping_math
13632 && operand_equal_p (arg0
, arg1
, 0))
13633 return constant_boolean_node (0, type
);
13635 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13637 tree targ0
= strip_float_extensions (arg0
);
13638 tree targ1
= strip_float_extensions (arg1
);
13639 tree newtype
= TREE_TYPE (targ0
);
13641 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13642 newtype
= TREE_TYPE (targ1
);
13644 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13645 return fold_build2_loc (loc
, code
, type
,
13646 fold_convert_loc (loc
, newtype
, targ0
),
13647 fold_convert_loc (loc
, newtype
, targ1
));
13652 case COMPOUND_EXPR
:
13653 /* When pedantic, a compound expression can be neither an lvalue
13654 nor an integer constant expression. */
13655 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13657 /* Don't let (0, 0) be null pointer constant. */
13658 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13659 : fold_convert_loc (loc
, type
, arg1
);
13660 return pedantic_non_lvalue_loc (loc
, tem
);
13663 if ((TREE_CODE (arg0
) == REAL_CST
13664 && TREE_CODE (arg1
) == REAL_CST
)
13665 || (TREE_CODE (arg0
) == INTEGER_CST
13666 && TREE_CODE (arg1
) == INTEGER_CST
))
13667 return build_complex (type
, arg0
, arg1
);
13668 if (TREE_CODE (arg0
) == REALPART_EXPR
13669 && TREE_CODE (arg1
) == IMAGPART_EXPR
13670 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
13671 && operand_equal_p (TREE_OPERAND (arg0
, 0),
13672 TREE_OPERAND (arg1
, 0), 0))
13673 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
13674 TREE_OPERAND (arg1
, 0));
13678 /* An ASSERT_EXPR should never be passed to fold_binary. */
13679 gcc_unreachable ();
13681 case VEC_PACK_TRUNC_EXPR
:
13682 case VEC_PACK_FIX_TRUNC_EXPR
:
13684 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13687 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
13688 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
13689 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13692 elts
= XALLOCAVEC (tree
, nelts
);
13693 if (!vec_cst_ctor_to_array (arg0
, elts
)
13694 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
13697 for (i
= 0; i
< nelts
; i
++)
13699 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
13700 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
13701 TREE_TYPE (type
), elts
[i
]);
13702 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
13706 return build_vector (type
, elts
);
13709 case VEC_WIDEN_MULT_LO_EXPR
:
13710 case VEC_WIDEN_MULT_HI_EXPR
:
13711 case VEC_WIDEN_MULT_EVEN_EXPR
:
13712 case VEC_WIDEN_MULT_ODD_EXPR
:
13714 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
13715 unsigned int out
, ofs
, scale
;
13718 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
13719 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
13720 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13723 elts
= XALLOCAVEC (tree
, nelts
* 4);
13724 if (!vec_cst_ctor_to_array (arg0
, elts
)
13725 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
13728 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
13729 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
13730 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
13731 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
13732 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
13733 scale
= 1, ofs
= 0;
13734 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13735 scale
= 1, ofs
= 1;
13737 for (out
= 0; out
< nelts
; out
++)
13739 unsigned int in1
= (out
<< scale
) + ofs
;
13740 unsigned int in2
= in1
+ nelts
* 2;
13743 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
13744 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
13746 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13748 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
13749 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
13753 return build_vector (type
, elts
);
13758 } /* switch (code) */
13761 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13762 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13766 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13768 switch (TREE_CODE (*tp
))
13774 *walk_subtrees
= 0;
13776 /* ... fall through ... */
13783 /* Return whether the sub-tree ST contains a label which is accessible from
13784 outside the sub-tree. */
13787 contains_label_p (tree st
)
13790 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13793 /* Fold a ternary expression of code CODE and type TYPE with operands
13794 OP0, OP1, and OP2. Return the folded expression if folding is
13795 successful. Otherwise, return NULL_TREE. */
13798 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13799 tree op0
, tree op1
, tree op2
)
13802 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13803 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13805 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13806 && TREE_CODE_LENGTH (code
) == 3);
13808 /* If this is a commutative operation, and OP0 is a constant, move it
13809 to OP1 to reduce the number of tests below. */
13810 if (commutative_ternary_tree_code (code
)
13811 && tree_swap_operands_p (op0
, op1
, true))
13812 return fold_build3_loc (loc
, code
, type
, op1
, op0
, op2
);
13814 tem
= generic_simplify (loc
, code
, type
, op0
, op1
, op2
);
13818 /* Strip any conversions that don't change the mode. This is safe
13819 for every expression, except for a comparison expression because
13820 its signedness is derived from its operands. So, in the latter
13821 case, only strip conversions that don't change the signedness.
13823 Note that this is done as an internal manipulation within the
13824 constant folder, in order to find the simplest representation of
13825 the arguments so that their form can be studied. In any cases,
13826 the appropriate type conversions should be put back in the tree
13827 that will get out of the constant folder. */
13848 case COMPONENT_REF
:
13849 if (TREE_CODE (arg0
) == CONSTRUCTOR
13850 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13852 unsigned HOST_WIDE_INT idx
;
13854 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13861 case VEC_COND_EXPR
:
13862 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13863 so all simple results must be passed through pedantic_non_lvalue. */
13864 if (TREE_CODE (arg0
) == INTEGER_CST
)
13866 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13867 tem
= integer_zerop (arg0
) ? op2
: op1
;
13868 /* Only optimize constant conditions when the selected branch
13869 has the same type as the COND_EXPR. This avoids optimizing
13870 away "c ? x : throw", where the throw has a void type.
13871 Avoid throwing away that operand which contains label. */
13872 if ((!TREE_SIDE_EFFECTS (unused_op
)
13873 || !contains_label_p (unused_op
))
13874 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13875 || VOID_TYPE_P (type
)))
13876 return pedantic_non_lvalue_loc (loc
, tem
);
13879 else if (TREE_CODE (arg0
) == VECTOR_CST
)
13881 if (integer_all_onesp (arg0
))
13882 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg2
);
13883 if (integer_zerop (arg0
))
13884 return pedantic_omit_one_operand_loc (loc
, type
, arg2
, arg1
);
13886 if ((TREE_CODE (arg1
) == VECTOR_CST
13887 || TREE_CODE (arg1
) == CONSTRUCTOR
)
13888 && (TREE_CODE (arg2
) == VECTOR_CST
13889 || TREE_CODE (arg2
) == CONSTRUCTOR
))
13891 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13892 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
13893 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
13894 for (i
= 0; i
< nelts
; i
++)
13896 tree val
= VECTOR_CST_ELT (arg0
, i
);
13897 if (integer_all_onesp (val
))
13899 else if (integer_zerop (val
))
13900 sel
[i
] = nelts
+ i
;
13901 else /* Currently unreachable. */
13904 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
13905 if (t
!= NULL_TREE
)
13910 if (operand_equal_p (arg1
, op2
, 0))
13911 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
13913 /* If we have A op B ? A : C, we may be able to convert this to a
13914 simpler expression, depending on the operation and the values
13915 of B and C. Signed zeros prevent all of these transformations,
13916 for reasons given above each one.
13918 Also try swapping the arguments and inverting the conditional. */
13919 if (COMPARISON_CLASS_P (arg0
)
13920 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13921 arg1
, TREE_OPERAND (arg0
, 1))
13922 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
13924 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13929 if (COMPARISON_CLASS_P (arg0
)
13930 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13932 TREE_OPERAND (arg0
, 1))
13933 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
13935 location_t loc0
= expr_location_or (arg0
, loc
);
13936 tem
= fold_invert_truthvalue (loc0
, arg0
);
13937 if (tem
&& COMPARISON_CLASS_P (tem
))
13939 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13945 /* If the second operand is simpler than the third, swap them
13946 since that produces better jump optimization results. */
13947 if (truth_value_p (TREE_CODE (arg0
))
13948 && tree_swap_operands_p (op1
, op2
, false))
13950 location_t loc0
= expr_location_or (arg0
, loc
);
13951 /* See if this can be inverted. If it can't, possibly because
13952 it was a floating-point inequality comparison, don't do
13954 tem
= fold_invert_truthvalue (loc0
, arg0
);
13956 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13959 /* Convert A ? 1 : 0 to simply A. */
13960 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
13961 : (integer_onep (op1
)
13962 && !VECTOR_TYPE_P (type
)))
13963 && integer_zerop (op2
)
13964 /* If we try to convert OP0 to our type, the
13965 call to fold will try to move the conversion inside
13966 a COND, which will recurse. In that case, the COND_EXPR
13967 is probably the best choice, so leave it alone. */
13968 && type
== TREE_TYPE (arg0
))
13969 return pedantic_non_lvalue_loc (loc
, arg0
);
13971 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13972 over COND_EXPR in cases such as floating point comparisons. */
13973 if (integer_zerop (op1
)
13974 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
13975 : (integer_onep (op2
)
13976 && !VECTOR_TYPE_P (type
)))
13977 && truth_value_p (TREE_CODE (arg0
)))
13978 return pedantic_non_lvalue_loc (loc
,
13979 fold_convert_loc (loc
, type
,
13980 invert_truthvalue_loc (loc
,
13983 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13984 if (TREE_CODE (arg0
) == LT_EXPR
13985 && integer_zerop (TREE_OPERAND (arg0
, 1))
13986 && integer_zerop (op2
)
13987 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13989 /* sign_bit_p looks through both zero and sign extensions,
13990 but for this optimization only sign extensions are
13992 tree tem2
= TREE_OPERAND (arg0
, 0);
13993 while (tem
!= tem2
)
13995 if (TREE_CODE (tem2
) != NOP_EXPR
13996 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
14001 tem2
= TREE_OPERAND (tem2
, 0);
14003 /* sign_bit_p only checks ARG1 bits within A's precision.
14004 If <sign bit of A> has wider type than A, bits outside
14005 of A's precision in <sign bit of A> need to be checked.
14006 If they are all 0, this optimization needs to be done
14007 in unsigned A's type, if they are all 1 in signed A's type,
14008 otherwise this can't be done. */
14010 && TYPE_PRECISION (TREE_TYPE (tem
))
14011 < TYPE_PRECISION (TREE_TYPE (arg1
))
14012 && TYPE_PRECISION (TREE_TYPE (tem
))
14013 < TYPE_PRECISION (type
))
14015 int inner_width
, outer_width
;
14018 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
14019 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
14020 if (outer_width
> TYPE_PRECISION (type
))
14021 outer_width
= TYPE_PRECISION (type
);
14023 wide_int mask
= wi::shifted_mask
14024 (inner_width
, outer_width
- inner_width
, false,
14025 TYPE_PRECISION (TREE_TYPE (arg1
)));
14027 wide_int common
= mask
& arg1
;
14028 if (common
== mask
)
14030 tem_type
= signed_type_for (TREE_TYPE (tem
));
14031 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14033 else if (common
== 0)
14035 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
14036 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14044 fold_convert_loc (loc
, type
,
14045 fold_build2_loc (loc
, BIT_AND_EXPR
,
14046 TREE_TYPE (tem
), tem
,
14047 fold_convert_loc (loc
,
14052 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14053 already handled above. */
14054 if (TREE_CODE (arg0
) == BIT_AND_EXPR
14055 && integer_onep (TREE_OPERAND (arg0
, 1))
14056 && integer_zerop (op2
)
14057 && integer_pow2p (arg1
))
14059 tree tem
= TREE_OPERAND (arg0
, 0);
14061 if (TREE_CODE (tem
) == RSHIFT_EXPR
14062 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
14063 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
14064 tree_to_uhwi (TREE_OPERAND (tem
, 1)))
14065 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
14066 TREE_OPERAND (tem
, 0), arg1
);
14069 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14070 is probably obsolete because the first operand should be a
14071 truth value (that's why we have the two cases above), but let's
14072 leave it in until we can confirm this for all front-ends. */
14073 if (integer_zerop (op2
)
14074 && TREE_CODE (arg0
) == NE_EXPR
14075 && integer_zerop (TREE_OPERAND (arg0
, 1))
14076 && integer_pow2p (arg1
)
14077 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
14078 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
14079 arg1
, OEP_ONLY_CONST
))
14080 return pedantic_non_lvalue_loc (loc
,
14081 fold_convert_loc (loc
, type
,
14082 TREE_OPERAND (arg0
, 0)));
14084 /* Disable the transformations below for vectors, since
14085 fold_binary_op_with_conditional_arg may undo them immediately,
14086 yielding an infinite loop. */
14087 if (code
== VEC_COND_EXPR
)
14090 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14091 if (integer_zerop (op2
)
14092 && truth_value_p (TREE_CODE (arg0
))
14093 && truth_value_p (TREE_CODE (arg1
))
14094 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14095 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
14096 : TRUTH_ANDIF_EXPR
,
14097 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
14099 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14100 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
14101 && truth_value_p (TREE_CODE (arg0
))
14102 && truth_value_p (TREE_CODE (arg1
))
14103 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14105 location_t loc0
= expr_location_or (arg0
, loc
);
14106 /* Only perform transformation if ARG0 is easily inverted. */
14107 tem
= fold_invert_truthvalue (loc0
, arg0
);
14109 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14112 type
, fold_convert_loc (loc
, type
, tem
),
14116 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14117 if (integer_zerop (arg1
)
14118 && truth_value_p (TREE_CODE (arg0
))
14119 && truth_value_p (TREE_CODE (op2
))
14120 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14122 location_t loc0
= expr_location_or (arg0
, loc
);
14123 /* Only perform transformation if ARG0 is easily inverted. */
14124 tem
= fold_invert_truthvalue (loc0
, arg0
);
14126 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14127 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
14128 type
, fold_convert_loc (loc
, type
, tem
),
14132 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14133 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
14134 && truth_value_p (TREE_CODE (arg0
))
14135 && truth_value_p (TREE_CODE (op2
))
14136 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14137 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14138 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
14139 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
14144 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14145 of fold_ternary on them. */
14146 gcc_unreachable ();
14148 case BIT_FIELD_REF
:
14149 if ((TREE_CODE (arg0
) == VECTOR_CST
14150 || (TREE_CODE (arg0
) == CONSTRUCTOR
14151 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
14152 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
14153 || (TREE_CODE (type
) == VECTOR_TYPE
14154 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
14156 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
14157 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
14158 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
14159 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
14162 && (idx
% width
) == 0
14163 && (n
% width
) == 0
14164 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
14169 if (TREE_CODE (arg0
) == VECTOR_CST
)
14172 return VECTOR_CST_ELT (arg0
, idx
);
14174 tree
*vals
= XALLOCAVEC (tree
, n
);
14175 for (unsigned i
= 0; i
< n
; ++i
)
14176 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
14177 return build_vector (type
, vals
);
14180 /* Constructor elements can be subvectors. */
14181 unsigned HOST_WIDE_INT k
= 1;
14182 if (CONSTRUCTOR_NELTS (arg0
) != 0)
14184 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
14185 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
14186 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
14189 /* We keep an exact subset of the constructor elements. */
14190 if ((idx
% k
) == 0 && (n
% k
) == 0)
14192 if (CONSTRUCTOR_NELTS (arg0
) == 0)
14193 return build_constructor (type
, NULL
);
14198 if (idx
< CONSTRUCTOR_NELTS (arg0
))
14199 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
14200 return build_zero_cst (type
);
14203 vec
<constructor_elt
, va_gc
> *vals
;
14204 vec_alloc (vals
, n
);
14205 for (unsigned i
= 0;
14206 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
14208 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
14210 (arg0
, idx
+ i
)->value
);
14211 return build_constructor (type
, vals
);
14213 /* The bitfield references a single constructor element. */
14214 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
14216 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
14217 return build_zero_cst (type
);
14219 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
14221 return fold_build3_loc (loc
, code
, type
,
14222 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
14223 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
14228 /* A bit-field-ref that referenced the full argument can be stripped. */
14229 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
14230 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
14231 && integer_zerop (op2
))
14232 return fold_convert_loc (loc
, type
, arg0
);
14234 /* On constants we can use native encode/interpret to constant
14235 fold (nearly) all BIT_FIELD_REFs. */
14236 if (CONSTANT_CLASS_P (arg0
)
14237 && can_native_interpret_type_p (type
)
14238 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
14239 /* This limitation should not be necessary, we just need to
14240 round this up to mode size. */
14241 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
14242 /* Need bit-shifting of the buffer to relax the following. */
14243 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
14245 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
14246 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
14247 unsigned HOST_WIDE_INT clen
;
14248 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
14249 /* ??? We cannot tell native_encode_expr to start at
14250 some random byte only. So limit us to a reasonable amount
14254 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
14255 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
14257 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
14259 tree v
= native_interpret_expr (type
,
14260 b
+ bitpos
/ BITS_PER_UNIT
,
14261 bitsize
/ BITS_PER_UNIT
);
14271 /* For integers we can decompose the FMA if possible. */
14272 if (TREE_CODE (arg0
) == INTEGER_CST
14273 && TREE_CODE (arg1
) == INTEGER_CST
)
14274 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
14275 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
14276 if (integer_zerop (arg2
))
14277 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
14279 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
14281 case VEC_PERM_EXPR
:
14282 if (TREE_CODE (arg2
) == VECTOR_CST
)
14284 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
;
14285 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14286 bool need_mask_canon
= false;
14287 bool all_in_vec0
= true;
14288 bool all_in_vec1
= true;
14289 bool maybe_identity
= true;
14290 bool single_arg
= (op0
== op1
);
14291 bool changed
= false;
14293 mask
= single_arg
? (nelts
- 1) : (2 * nelts
- 1);
14294 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
14295 for (i
= 0; i
< nelts
; i
++)
14297 tree val
= VECTOR_CST_ELT (arg2
, i
);
14298 if (TREE_CODE (val
) != INTEGER_CST
)
14301 /* Make sure that the perm value is in an acceptable
14304 if (wi::gtu_p (t
, mask
))
14306 need_mask_canon
= true;
14307 sel
[i
] = t
.to_uhwi () & mask
;
14310 sel
[i
] = t
.to_uhwi ();
14312 if (sel
[i
] < nelts
)
14313 all_in_vec1
= false;
14315 all_in_vec0
= false;
14317 if ((sel
[i
] & (nelts
-1)) != i
)
14318 maybe_identity
= false;
14321 if (maybe_identity
)
14331 else if (all_in_vec1
)
14334 for (i
= 0; i
< nelts
; i
++)
14336 need_mask_canon
= true;
14339 if ((TREE_CODE (op0
) == VECTOR_CST
14340 || TREE_CODE (op0
) == CONSTRUCTOR
)
14341 && (TREE_CODE (op1
) == VECTOR_CST
14342 || TREE_CODE (op1
) == CONSTRUCTOR
))
14344 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
14345 if (t
!= NULL_TREE
)
14349 if (op0
== op1
&& !single_arg
)
14352 if (need_mask_canon
&& arg2
== op2
)
14354 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
14355 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
14356 for (i
= 0; i
< nelts
; i
++)
14357 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
14358 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
14363 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
14369 } /* switch (code) */
14372 /* Perform constant folding and related simplification of EXPR.
14373 The related simplifications include x*1 => x, x*0 => 0, etc.,
14374 and application of the associative law.
14375 NOP_EXPR conversions may be removed freely (as long as we
14376 are careful not to change the type of the overall expression).
14377 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14378 but we can constant-fold them if they have constant operands. */
14380 #ifdef ENABLE_FOLD_CHECKING
14381 # define fold(x) fold_1 (x)
14382 static tree
fold_1 (tree
);
14388 const tree t
= expr
;
14389 enum tree_code code
= TREE_CODE (t
);
14390 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14392 location_t loc
= EXPR_LOCATION (expr
);
14394 /* Return right away if a constant. */
14395 if (kind
== tcc_constant
)
14398 /* CALL_EXPR-like objects with variable numbers of operands are
14399 treated specially. */
14400 if (kind
== tcc_vl_exp
)
14402 if (code
== CALL_EXPR
)
14404 tem
= fold_call_expr (loc
, expr
, false);
14405 return tem
? tem
: expr
;
14410 if (IS_EXPR_CODE_CLASS (kind
))
14412 tree type
= TREE_TYPE (t
);
14413 tree op0
, op1
, op2
;
14415 switch (TREE_CODE_LENGTH (code
))
14418 op0
= TREE_OPERAND (t
, 0);
14419 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14420 return tem
? tem
: expr
;
14422 op0
= TREE_OPERAND (t
, 0);
14423 op1
= TREE_OPERAND (t
, 1);
14424 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14425 return tem
? tem
: expr
;
14427 op0
= TREE_OPERAND (t
, 0);
14428 op1
= TREE_OPERAND (t
, 1);
14429 op2
= TREE_OPERAND (t
, 2);
14430 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14431 return tem
? tem
: expr
;
14441 tree op0
= TREE_OPERAND (t
, 0);
14442 tree op1
= TREE_OPERAND (t
, 1);
14444 if (TREE_CODE (op1
) == INTEGER_CST
14445 && TREE_CODE (op0
) == CONSTRUCTOR
14446 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14448 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
14449 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
14450 unsigned HOST_WIDE_INT begin
= 0;
14452 /* Find a matching index by means of a binary search. */
14453 while (begin
!= end
)
14455 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14456 tree index
= (*elts
)[middle
].index
;
14458 if (TREE_CODE (index
) == INTEGER_CST
14459 && tree_int_cst_lt (index
, op1
))
14460 begin
= middle
+ 1;
14461 else if (TREE_CODE (index
) == INTEGER_CST
14462 && tree_int_cst_lt (op1
, index
))
14464 else if (TREE_CODE (index
) == RANGE_EXPR
14465 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14466 begin
= middle
+ 1;
14467 else if (TREE_CODE (index
) == RANGE_EXPR
14468 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14471 return (*elts
)[middle
].value
;
14478 /* Return a VECTOR_CST if possible. */
14481 tree type
= TREE_TYPE (t
);
14482 if (TREE_CODE (type
) != VECTOR_TYPE
)
14485 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
14486 unsigned HOST_WIDE_INT idx
, pos
= 0;
14489 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
14491 if (!CONSTANT_CLASS_P (value
))
14493 if (TREE_CODE (value
) == VECTOR_CST
)
14495 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
14496 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
14499 vec
[pos
++] = value
;
14501 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
14502 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
14504 return build_vector (type
, vec
);
14508 return fold (DECL_INITIAL (t
));
14512 } /* switch (code) */
14515 #ifdef ENABLE_FOLD_CHECKING
14518 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
14519 hash_table
<pointer_hash
<const tree_node
> > *);
14520 static void fold_check_failed (const_tree
, const_tree
);
14521 void print_fold_checksum (const_tree
);
14523 /* When --enable-checking=fold, compute a digest of expr before
14524 and after actual fold call to see if fold did not accidentally
14525 change original expr. */
14531 struct md5_ctx ctx
;
14532 unsigned char checksum_before
[16], checksum_after
[16];
14533 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14535 md5_init_ctx (&ctx
);
14536 fold_checksum_tree (expr
, &ctx
, &ht
);
14537 md5_finish_ctx (&ctx
, checksum_before
);
14540 ret
= fold_1 (expr
);
14542 md5_init_ctx (&ctx
);
14543 fold_checksum_tree (expr
, &ctx
, &ht
);
14544 md5_finish_ctx (&ctx
, checksum_after
);
14546 if (memcmp (checksum_before
, checksum_after
, 16))
14547 fold_check_failed (expr
, ret
);
14553 print_fold_checksum (const_tree expr
)
14555 struct md5_ctx ctx
;
14556 unsigned char checksum
[16], cnt
;
14557 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14559 md5_init_ctx (&ctx
);
14560 fold_checksum_tree (expr
, &ctx
, &ht
);
14561 md5_finish_ctx (&ctx
, checksum
);
14562 for (cnt
= 0; cnt
< 16; ++cnt
)
14563 fprintf (stderr
, "%02x", checksum
[cnt
]);
14564 putc ('\n', stderr
);
14568 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14570 internal_error ("fold check: original tree changed by fold");
14574 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
14575 hash_table
<pointer_hash
<const tree_node
> > *ht
)
14577 const tree_node
**slot
;
14578 enum tree_code code
;
14579 union tree_node buf
;
14585 slot
= ht
->find_slot (expr
, INSERT
);
14589 code
= TREE_CODE (expr
);
14590 if (TREE_CODE_CLASS (code
) == tcc_declaration
14591 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14593 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14594 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14595 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14596 expr
= (tree
) &buf
;
14598 else if (TREE_CODE_CLASS (code
) == tcc_type
14599 && (TYPE_POINTER_TO (expr
)
14600 || TYPE_REFERENCE_TO (expr
)
14601 || TYPE_CACHED_VALUES_P (expr
)
14602 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14603 || TYPE_NEXT_VARIANT (expr
)))
14605 /* Allow these fields to be modified. */
14607 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14608 expr
= tmp
= (tree
) &buf
;
14609 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14610 TYPE_POINTER_TO (tmp
) = NULL
;
14611 TYPE_REFERENCE_TO (tmp
) = NULL
;
14612 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14613 if (TYPE_CACHED_VALUES_P (tmp
))
14615 TYPE_CACHED_VALUES_P (tmp
) = 0;
14616 TYPE_CACHED_VALUES (tmp
) = NULL
;
14619 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14620 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14621 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14622 if (TREE_CODE_CLASS (code
) != tcc_type
14623 && TREE_CODE_CLASS (code
) != tcc_declaration
14624 && code
!= TREE_LIST
14625 && code
!= SSA_NAME
14626 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14627 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14628 switch (TREE_CODE_CLASS (code
))
14634 md5_process_bytes (TREE_STRING_POINTER (expr
),
14635 TREE_STRING_LENGTH (expr
), ctx
);
14638 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14639 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14642 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14643 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14649 case tcc_exceptional
:
14653 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14654 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14655 expr
= TREE_CHAIN (expr
);
14656 goto recursive_label
;
14659 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14660 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14666 case tcc_expression
:
14667 case tcc_reference
:
14668 case tcc_comparison
:
14671 case tcc_statement
:
14673 len
= TREE_OPERAND_LENGTH (expr
);
14674 for (i
= 0; i
< len
; ++i
)
14675 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14677 case tcc_declaration
:
14678 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14679 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14680 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14682 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14683 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14684 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14685 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14686 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14689 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14691 if (TREE_CODE (expr
) == FUNCTION_DECL
)
14693 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14694 fold_checksum_tree (DECL_ARGUMENTS (expr
), ctx
, ht
);
14696 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14700 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14701 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14702 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14703 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14704 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14705 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14706 if (INTEGRAL_TYPE_P (expr
)
14707 || SCALAR_FLOAT_TYPE_P (expr
))
14709 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14710 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14712 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14713 if (TREE_CODE (expr
) == RECORD_TYPE
14714 || TREE_CODE (expr
) == UNION_TYPE
14715 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14716 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14717 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14724 /* Helper function for outputting the checksum of a tree T. When
14725 debugging with gdb, you can "define mynext" to be "next" followed
14726 by "call debug_fold_checksum (op0)", then just trace down till the
14729 DEBUG_FUNCTION
void
14730 debug_fold_checksum (const_tree t
)
14733 unsigned char checksum
[16];
14734 struct md5_ctx ctx
;
14735 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14737 md5_init_ctx (&ctx
);
14738 fold_checksum_tree (t
, &ctx
, &ht
);
14739 md5_finish_ctx (&ctx
, checksum
);
14742 for (i
= 0; i
< 16; i
++)
14743 fprintf (stderr
, "%d ", checksum
[i
]);
14745 fprintf (stderr
, "\n");
14750 /* Fold a unary tree expression with code CODE of type TYPE with an
14751 operand OP0. LOC is the location of the resulting expression.
14752 Return a folded expression if successful. Otherwise, return a tree
14753 expression with code CODE of type TYPE with an operand OP0. */
14756 fold_build1_stat_loc (location_t loc
,
14757 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14760 #ifdef ENABLE_FOLD_CHECKING
14761 unsigned char checksum_before
[16], checksum_after
[16];
14762 struct md5_ctx ctx
;
14763 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14765 md5_init_ctx (&ctx
);
14766 fold_checksum_tree (op0
, &ctx
, &ht
);
14767 md5_finish_ctx (&ctx
, checksum_before
);
14771 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14773 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14775 #ifdef ENABLE_FOLD_CHECKING
14776 md5_init_ctx (&ctx
);
14777 fold_checksum_tree (op0
, &ctx
, &ht
);
14778 md5_finish_ctx (&ctx
, checksum_after
);
14780 if (memcmp (checksum_before
, checksum_after
, 16))
14781 fold_check_failed (op0
, tem
);
14786 /* Fold a binary tree expression with code CODE of type TYPE with
14787 operands OP0 and OP1. LOC is the location of the resulting
14788 expression. Return a folded expression if successful. Otherwise,
14789 return a tree expression with code CODE of type TYPE with operands
14793 fold_build2_stat_loc (location_t loc
,
14794 enum tree_code code
, tree type
, tree op0
, tree op1
14798 #ifdef ENABLE_FOLD_CHECKING
14799 unsigned char checksum_before_op0
[16],
14800 checksum_before_op1
[16],
14801 checksum_after_op0
[16],
14802 checksum_after_op1
[16];
14803 struct md5_ctx ctx
;
14804 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14806 md5_init_ctx (&ctx
);
14807 fold_checksum_tree (op0
, &ctx
, &ht
);
14808 md5_finish_ctx (&ctx
, checksum_before_op0
);
14811 md5_init_ctx (&ctx
);
14812 fold_checksum_tree (op1
, &ctx
, &ht
);
14813 md5_finish_ctx (&ctx
, checksum_before_op1
);
14817 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14819 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14821 #ifdef ENABLE_FOLD_CHECKING
14822 md5_init_ctx (&ctx
);
14823 fold_checksum_tree (op0
, &ctx
, &ht
);
14824 md5_finish_ctx (&ctx
, checksum_after_op0
);
14827 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14828 fold_check_failed (op0
, tem
);
14830 md5_init_ctx (&ctx
);
14831 fold_checksum_tree (op1
, &ctx
, &ht
);
14832 md5_finish_ctx (&ctx
, checksum_after_op1
);
14834 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14835 fold_check_failed (op1
, tem
);
14840 /* Fold a ternary tree expression with code CODE of type TYPE with
14841 operands OP0, OP1, and OP2. Return a folded expression if
14842 successful. Otherwise, return a tree expression with code CODE of
14843 type TYPE with operands OP0, OP1, and OP2. */
14846 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14847 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14850 #ifdef ENABLE_FOLD_CHECKING
14851 unsigned char checksum_before_op0
[16],
14852 checksum_before_op1
[16],
14853 checksum_before_op2
[16],
14854 checksum_after_op0
[16],
14855 checksum_after_op1
[16],
14856 checksum_after_op2
[16];
14857 struct md5_ctx ctx
;
14858 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14860 md5_init_ctx (&ctx
);
14861 fold_checksum_tree (op0
, &ctx
, &ht
);
14862 md5_finish_ctx (&ctx
, checksum_before_op0
);
14865 md5_init_ctx (&ctx
);
14866 fold_checksum_tree (op1
, &ctx
, &ht
);
14867 md5_finish_ctx (&ctx
, checksum_before_op1
);
14870 md5_init_ctx (&ctx
);
14871 fold_checksum_tree (op2
, &ctx
, &ht
);
14872 md5_finish_ctx (&ctx
, checksum_before_op2
);
14876 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14877 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14879 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14881 #ifdef ENABLE_FOLD_CHECKING
14882 md5_init_ctx (&ctx
);
14883 fold_checksum_tree (op0
, &ctx
, &ht
);
14884 md5_finish_ctx (&ctx
, checksum_after_op0
);
14887 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14888 fold_check_failed (op0
, tem
);
14890 md5_init_ctx (&ctx
);
14891 fold_checksum_tree (op1
, &ctx
, &ht
);
14892 md5_finish_ctx (&ctx
, checksum_after_op1
);
14895 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14896 fold_check_failed (op1
, tem
);
14898 md5_init_ctx (&ctx
);
14899 fold_checksum_tree (op2
, &ctx
, &ht
);
14900 md5_finish_ctx (&ctx
, checksum_after_op2
);
14902 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14903 fold_check_failed (op2
, tem
);
14908 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14909 arguments in ARGARRAY, and a null static chain.
14910 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14911 of type TYPE from the given operands as constructed by build_call_array. */
14914 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14915 int nargs
, tree
*argarray
)
14918 #ifdef ENABLE_FOLD_CHECKING
14919 unsigned char checksum_before_fn
[16],
14920 checksum_before_arglist
[16],
14921 checksum_after_fn
[16],
14922 checksum_after_arglist
[16];
14923 struct md5_ctx ctx
;
14924 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14927 md5_init_ctx (&ctx
);
14928 fold_checksum_tree (fn
, &ctx
, &ht
);
14929 md5_finish_ctx (&ctx
, checksum_before_fn
);
14932 md5_init_ctx (&ctx
);
14933 for (i
= 0; i
< nargs
; i
++)
14934 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
14935 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14939 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14941 #ifdef ENABLE_FOLD_CHECKING
14942 md5_init_ctx (&ctx
);
14943 fold_checksum_tree (fn
, &ctx
, &ht
);
14944 md5_finish_ctx (&ctx
, checksum_after_fn
);
14947 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14948 fold_check_failed (fn
, tem
);
14950 md5_init_ctx (&ctx
);
14951 for (i
= 0; i
< nargs
; i
++)
14952 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
14953 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14955 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14956 fold_check_failed (NULL_TREE
, tem
);
14961 /* Perform constant folding and related simplification of initializer
14962 expression EXPR. These behave identically to "fold_buildN" but ignore
14963 potential run-time traps and exceptions that fold must preserve. */
14965 #define START_FOLD_INIT \
14966 int saved_signaling_nans = flag_signaling_nans;\
14967 int saved_trapping_math = flag_trapping_math;\
14968 int saved_rounding_math = flag_rounding_math;\
14969 int saved_trapv = flag_trapv;\
14970 int saved_folding_initializer = folding_initializer;\
14971 flag_signaling_nans = 0;\
14972 flag_trapping_math = 0;\
14973 flag_rounding_math = 0;\
14975 folding_initializer = 1;
14977 #define END_FOLD_INIT \
14978 flag_signaling_nans = saved_signaling_nans;\
14979 flag_trapping_math = saved_trapping_math;\
14980 flag_rounding_math = saved_rounding_math;\
14981 flag_trapv = saved_trapv;\
14982 folding_initializer = saved_folding_initializer;
14985 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14986 tree type
, tree op
)
14991 result
= fold_build1_loc (loc
, code
, type
, op
);
14998 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14999 tree type
, tree op0
, tree op1
)
15004 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
15011 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
15012 int nargs
, tree
*argarray
)
15017 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
15023 #undef START_FOLD_INIT
15024 #undef END_FOLD_INIT
15026 /* Determine if first argument is a multiple of second argument. Return 0 if
15027 it is not, or we cannot easily determined it to be.
15029 An example of the sort of thing we care about (at this point; this routine
15030 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15031 fold cases do now) is discovering that
15033 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15039 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15041 This code also handles discovering that
15043 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15045 is a multiple of 8 so we don't have to worry about dealing with a
15046 possible remainder.
15048 Note that we *look* inside a SAVE_EXPR only to determine how it was
15049 calculated; it is not safe for fold to do much of anything else with the
15050 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15051 at run time. For example, the latter example above *cannot* be implemented
15052 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15053 evaluation time of the original SAVE_EXPR is not necessarily the same at
15054 the time the new expression is evaluated. The only optimization of this
15055 sort that would be valid is changing
15057 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15061 SAVE_EXPR (I) * SAVE_EXPR (J)
15063 (where the same SAVE_EXPR (J) is used in the original and the
15064 transformed version). */
15067 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
15069 if (operand_equal_p (top
, bottom
, 0))
15072 if (TREE_CODE (type
) != INTEGER_TYPE
)
15075 switch (TREE_CODE (top
))
15078 /* Bitwise and provides a power of two multiple. If the mask is
15079 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15080 if (!integer_pow2p (bottom
))
15085 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15086 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15090 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15091 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15094 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
15098 op1
= TREE_OPERAND (top
, 1);
15099 /* const_binop may not detect overflow correctly,
15100 so check for it explicitly here. */
15101 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
15102 && 0 != (t1
= fold_convert (type
,
15103 const_binop (LSHIFT_EXPR
,
15106 && !TREE_OVERFLOW (t1
))
15107 return multiple_of_p (type
, t1
, bottom
);
15112 /* Can't handle conversions from non-integral or wider integral type. */
15113 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
15114 || (TYPE_PRECISION (type
)
15115 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
15118 /* .. fall through ... */
15121 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
15124 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
15125 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
15128 if (TREE_CODE (bottom
) != INTEGER_CST
15129 || integer_zerop (bottom
)
15130 || (TYPE_UNSIGNED (type
)
15131 && (tree_int_cst_sgn (top
) < 0
15132 || tree_int_cst_sgn (bottom
) < 0)))
15134 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
15142 /* Return true if CODE or TYPE is known to be non-negative. */
15145 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
15147 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
15148 && truth_value_p (code
))
15149 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15150 have a signed:1 type (where the value is -1 and 0). */
15155 /* Return true if (CODE OP0) is known to be non-negative. If the return
15156 value is based on the assumption that signed overflow is undefined,
15157 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15158 *STRICT_OVERFLOW_P. */
15161 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15162 bool *strict_overflow_p
)
15164 if (TYPE_UNSIGNED (type
))
15170 /* We can't return 1 if flag_wrapv is set because
15171 ABS_EXPR<INT_MIN> = INT_MIN. */
15172 if (!INTEGRAL_TYPE_P (type
))
15174 if (TYPE_OVERFLOW_UNDEFINED (type
))
15176 *strict_overflow_p
= true;
15181 case NON_LVALUE_EXPR
:
15183 case FIX_TRUNC_EXPR
:
15184 return tree_expr_nonnegative_warnv_p (op0
,
15185 strict_overflow_p
);
15189 tree inner_type
= TREE_TYPE (op0
);
15190 tree outer_type
= type
;
15192 if (TREE_CODE (outer_type
) == REAL_TYPE
)
15194 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15195 return tree_expr_nonnegative_warnv_p (op0
,
15196 strict_overflow_p
);
15197 if (INTEGRAL_TYPE_P (inner_type
))
15199 if (TYPE_UNSIGNED (inner_type
))
15201 return tree_expr_nonnegative_warnv_p (op0
,
15202 strict_overflow_p
);
15205 else if (INTEGRAL_TYPE_P (outer_type
))
15207 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15208 return tree_expr_nonnegative_warnv_p (op0
,
15209 strict_overflow_p
);
15210 if (INTEGRAL_TYPE_P (inner_type
))
15211 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
15212 && TYPE_UNSIGNED (inner_type
);
15218 return tree_simple_nonnegative_warnv_p (code
, type
);
15221 /* We don't know sign of `t', so be conservative and return false. */
15225 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15226 value is based on the assumption that signed overflow is undefined,
15227 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15228 *STRICT_OVERFLOW_P. */
15231 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15232 tree op1
, bool *strict_overflow_p
)
15234 if (TYPE_UNSIGNED (type
))
15239 case POINTER_PLUS_EXPR
:
15241 if (FLOAT_TYPE_P (type
))
15242 return (tree_expr_nonnegative_warnv_p (op0
,
15244 && tree_expr_nonnegative_warnv_p (op1
,
15245 strict_overflow_p
));
15247 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15248 both unsigned and at least 2 bits shorter than the result. */
15249 if (TREE_CODE (type
) == INTEGER_TYPE
15250 && TREE_CODE (op0
) == NOP_EXPR
15251 && TREE_CODE (op1
) == NOP_EXPR
)
15253 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
15254 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
15255 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
15256 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
15258 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
15259 TYPE_PRECISION (inner2
)) + 1;
15260 return prec
< TYPE_PRECISION (type
);
15266 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
15268 /* x * x is always non-negative for floating point x
15269 or without overflow. */
15270 if (operand_equal_p (op0
, op1
, 0)
15271 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
15272 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
15274 if (TYPE_OVERFLOW_UNDEFINED (type
))
15275 *strict_overflow_p
= true;
15280 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15281 both unsigned and their total bits is shorter than the result. */
15282 if (TREE_CODE (type
) == INTEGER_TYPE
15283 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
15284 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
15286 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
15287 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
15289 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
15290 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
15293 bool unsigned0
= TYPE_UNSIGNED (inner0
);
15294 bool unsigned1
= TYPE_UNSIGNED (inner1
);
15296 if (TREE_CODE (op0
) == INTEGER_CST
)
15297 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
15299 if (TREE_CODE (op1
) == INTEGER_CST
)
15300 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
15302 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
15303 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
15305 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
15306 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
15307 : TYPE_PRECISION (inner0
);
15309 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
15310 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
15311 : TYPE_PRECISION (inner1
);
15313 return precision0
+ precision1
< TYPE_PRECISION (type
);
15320 return (tree_expr_nonnegative_warnv_p (op0
,
15322 || tree_expr_nonnegative_warnv_p (op1
,
15323 strict_overflow_p
));
15329 case TRUNC_DIV_EXPR
:
15330 case CEIL_DIV_EXPR
:
15331 case FLOOR_DIV_EXPR
:
15332 case ROUND_DIV_EXPR
:
15333 return (tree_expr_nonnegative_warnv_p (op0
,
15335 && tree_expr_nonnegative_warnv_p (op1
,
15336 strict_overflow_p
));
15338 case TRUNC_MOD_EXPR
:
15339 case CEIL_MOD_EXPR
:
15340 case FLOOR_MOD_EXPR
:
15341 case ROUND_MOD_EXPR
:
15342 return tree_expr_nonnegative_warnv_p (op0
,
15343 strict_overflow_p
);
15345 return tree_simple_nonnegative_warnv_p (code
, type
);
15348 /* We don't know sign of `t', so be conservative and return false. */
15352 /* Return true if T is known to be non-negative. If the return
15353 value is based on the assumption that signed overflow is undefined,
15354 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15355 *STRICT_OVERFLOW_P. */
15358 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15360 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15363 switch (TREE_CODE (t
))
15366 return tree_int_cst_sgn (t
) >= 0;
15369 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15372 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15375 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15377 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15378 strict_overflow_p
));
15380 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15383 /* We don't know sign of `t', so be conservative and return false. */
15387 /* Return true if T is known to be non-negative. If the return
15388 value is based on the assumption that signed overflow is undefined,
15389 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15390 *STRICT_OVERFLOW_P. */
15393 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15394 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15396 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15397 switch (DECL_FUNCTION_CODE (fndecl
))
15399 CASE_FLT_FN (BUILT_IN_ACOS
):
15400 CASE_FLT_FN (BUILT_IN_ACOSH
):
15401 CASE_FLT_FN (BUILT_IN_CABS
):
15402 CASE_FLT_FN (BUILT_IN_COSH
):
15403 CASE_FLT_FN (BUILT_IN_ERFC
):
15404 CASE_FLT_FN (BUILT_IN_EXP
):
15405 CASE_FLT_FN (BUILT_IN_EXP10
):
15406 CASE_FLT_FN (BUILT_IN_EXP2
):
15407 CASE_FLT_FN (BUILT_IN_FABS
):
15408 CASE_FLT_FN (BUILT_IN_FDIM
):
15409 CASE_FLT_FN (BUILT_IN_HYPOT
):
15410 CASE_FLT_FN (BUILT_IN_POW10
):
15411 CASE_INT_FN (BUILT_IN_FFS
):
15412 CASE_INT_FN (BUILT_IN_PARITY
):
15413 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15414 CASE_INT_FN (BUILT_IN_CLZ
):
15415 CASE_INT_FN (BUILT_IN_CLRSB
):
15416 case BUILT_IN_BSWAP32
:
15417 case BUILT_IN_BSWAP64
:
15421 CASE_FLT_FN (BUILT_IN_SQRT
):
15422 /* sqrt(-0.0) is -0.0. */
15423 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15425 return tree_expr_nonnegative_warnv_p (arg0
,
15426 strict_overflow_p
);
15428 CASE_FLT_FN (BUILT_IN_ASINH
):
15429 CASE_FLT_FN (BUILT_IN_ATAN
):
15430 CASE_FLT_FN (BUILT_IN_ATANH
):
15431 CASE_FLT_FN (BUILT_IN_CBRT
):
15432 CASE_FLT_FN (BUILT_IN_CEIL
):
15433 CASE_FLT_FN (BUILT_IN_ERF
):
15434 CASE_FLT_FN (BUILT_IN_EXPM1
):
15435 CASE_FLT_FN (BUILT_IN_FLOOR
):
15436 CASE_FLT_FN (BUILT_IN_FMOD
):
15437 CASE_FLT_FN (BUILT_IN_FREXP
):
15438 CASE_FLT_FN (BUILT_IN_ICEIL
):
15439 CASE_FLT_FN (BUILT_IN_IFLOOR
):
15440 CASE_FLT_FN (BUILT_IN_IRINT
):
15441 CASE_FLT_FN (BUILT_IN_IROUND
):
15442 CASE_FLT_FN (BUILT_IN_LCEIL
):
15443 CASE_FLT_FN (BUILT_IN_LDEXP
):
15444 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15445 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15446 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15447 CASE_FLT_FN (BUILT_IN_LLRINT
):
15448 CASE_FLT_FN (BUILT_IN_LLROUND
):
15449 CASE_FLT_FN (BUILT_IN_LRINT
):
15450 CASE_FLT_FN (BUILT_IN_LROUND
):
15451 CASE_FLT_FN (BUILT_IN_MODF
):
15452 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15453 CASE_FLT_FN (BUILT_IN_RINT
):
15454 CASE_FLT_FN (BUILT_IN_ROUND
):
15455 CASE_FLT_FN (BUILT_IN_SCALB
):
15456 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15457 CASE_FLT_FN (BUILT_IN_SCALBN
):
15458 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15459 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15460 CASE_FLT_FN (BUILT_IN_SINH
):
15461 CASE_FLT_FN (BUILT_IN_TANH
):
15462 CASE_FLT_FN (BUILT_IN_TRUNC
):
15463 /* True if the 1st argument is nonnegative. */
15464 return tree_expr_nonnegative_warnv_p (arg0
,
15465 strict_overflow_p
);
15467 CASE_FLT_FN (BUILT_IN_FMAX
):
15468 /* True if the 1st OR 2nd arguments are nonnegative. */
15469 return (tree_expr_nonnegative_warnv_p (arg0
,
15471 || (tree_expr_nonnegative_warnv_p (arg1
,
15472 strict_overflow_p
)));
15474 CASE_FLT_FN (BUILT_IN_FMIN
):
15475 /* True if the 1st AND 2nd arguments are nonnegative. */
15476 return (tree_expr_nonnegative_warnv_p (arg0
,
15478 && (tree_expr_nonnegative_warnv_p (arg1
,
15479 strict_overflow_p
)));
15481 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15482 /* True if the 2nd argument is nonnegative. */
15483 return tree_expr_nonnegative_warnv_p (arg1
,
15484 strict_overflow_p
);
15486 CASE_FLT_FN (BUILT_IN_POWI
):
15487 /* True if the 1st argument is nonnegative or the second
15488 argument is an even integer. */
15489 if (TREE_CODE (arg1
) == INTEGER_CST
15490 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15492 return tree_expr_nonnegative_warnv_p (arg0
,
15493 strict_overflow_p
);
15495 CASE_FLT_FN (BUILT_IN_POW
):
15496 /* True if the 1st argument is nonnegative or the second
15497 argument is an even integer valued real. */
15498 if (TREE_CODE (arg1
) == REAL_CST
)
15503 c
= TREE_REAL_CST (arg1
);
15504 n
= real_to_integer (&c
);
15507 REAL_VALUE_TYPE cint
;
15508 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
15509 if (real_identical (&c
, &cint
))
15513 return tree_expr_nonnegative_warnv_p (arg0
,
15514 strict_overflow_p
);
15519 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15523 /* Return true if T is known to be non-negative. If the return
15524 value is based on the assumption that signed overflow is undefined,
15525 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15526 *STRICT_OVERFLOW_P. */
15529 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15531 enum tree_code code
= TREE_CODE (t
);
15532 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15539 tree temp
= TARGET_EXPR_SLOT (t
);
15540 t
= TARGET_EXPR_INITIAL (t
);
15542 /* If the initializer is non-void, then it's a normal expression
15543 that will be assigned to the slot. */
15544 if (!VOID_TYPE_P (t
))
15545 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15547 /* Otherwise, the initializer sets the slot in some way. One common
15548 way is an assignment statement at the end of the initializer. */
15551 if (TREE_CODE (t
) == BIND_EXPR
)
15552 t
= expr_last (BIND_EXPR_BODY (t
));
15553 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15554 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15555 t
= expr_last (TREE_OPERAND (t
, 0));
15556 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15561 if (TREE_CODE (t
) == MODIFY_EXPR
15562 && TREE_OPERAND (t
, 0) == temp
)
15563 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15564 strict_overflow_p
);
15571 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15572 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15574 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15575 get_callee_fndecl (t
),
15578 strict_overflow_p
);
15580 case COMPOUND_EXPR
:
15582 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15583 strict_overflow_p
);
15585 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15586 strict_overflow_p
);
15588 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15589 strict_overflow_p
);
15592 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15596 /* We don't know sign of `t', so be conservative and return false. */
15600 /* Return true if T is known to be non-negative. If the return
15601 value is based on the assumption that signed overflow is undefined,
15602 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15603 *STRICT_OVERFLOW_P. */
15606 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15608 enum tree_code code
;
15609 if (t
== error_mark_node
)
15612 code
= TREE_CODE (t
);
15613 switch (TREE_CODE_CLASS (code
))
15616 case tcc_comparison
:
15617 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15619 TREE_OPERAND (t
, 0),
15620 TREE_OPERAND (t
, 1),
15621 strict_overflow_p
);
15624 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15626 TREE_OPERAND (t
, 0),
15627 strict_overflow_p
);
15630 case tcc_declaration
:
15631 case tcc_reference
:
15632 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15640 case TRUTH_AND_EXPR
:
15641 case TRUTH_OR_EXPR
:
15642 case TRUTH_XOR_EXPR
:
15643 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15645 TREE_OPERAND (t
, 0),
15646 TREE_OPERAND (t
, 1),
15647 strict_overflow_p
);
15648 case TRUTH_NOT_EXPR
:
15649 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15651 TREE_OPERAND (t
, 0),
15652 strict_overflow_p
);
15659 case WITH_SIZE_EXPR
:
15661 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15664 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15668 /* Return true if `t' is known to be non-negative. Handle warnings
15669 about undefined signed overflow. */
15672 tree_expr_nonnegative_p (tree t
)
15674 bool ret
, strict_overflow_p
;
15676 strict_overflow_p
= false;
15677 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15678 if (strict_overflow_p
)
15679 fold_overflow_warning (("assuming signed overflow does not occur when "
15680 "determining that expression is always "
15682 WARN_STRICT_OVERFLOW_MISC
);
15687 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15688 For floating point we further ensure that T is not denormal.
15689 Similar logic is present in nonzero_address in rtlanal.h.
15691 If the return value is based on the assumption that signed overflow
15692 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15693 change *STRICT_OVERFLOW_P. */
15696 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15697 bool *strict_overflow_p
)
15702 return tree_expr_nonzero_warnv_p (op0
,
15703 strict_overflow_p
);
15707 tree inner_type
= TREE_TYPE (op0
);
15708 tree outer_type
= type
;
15710 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15711 && tree_expr_nonzero_warnv_p (op0
,
15712 strict_overflow_p
));
15716 case NON_LVALUE_EXPR
:
15717 return tree_expr_nonzero_warnv_p (op0
,
15718 strict_overflow_p
);
15727 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15728 For floating point we further ensure that T is not denormal.
15729 Similar logic is present in nonzero_address in rtlanal.h.
15731 If the return value is based on the assumption that signed overflow
15732 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15733 change *STRICT_OVERFLOW_P. */
15736 tree_binary_nonzero_warnv_p (enum tree_code code
,
15739 tree op1
, bool *strict_overflow_p
)
15741 bool sub_strict_overflow_p
;
15744 case POINTER_PLUS_EXPR
:
15746 if (TYPE_OVERFLOW_UNDEFINED (type
))
15748 /* With the presence of negative values it is hard
15749 to say something. */
15750 sub_strict_overflow_p
= false;
15751 if (!tree_expr_nonnegative_warnv_p (op0
,
15752 &sub_strict_overflow_p
)
15753 || !tree_expr_nonnegative_warnv_p (op1
,
15754 &sub_strict_overflow_p
))
15756 /* One of operands must be positive and the other non-negative. */
15757 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15758 overflows, on a twos-complement machine the sum of two
15759 nonnegative numbers can never be zero. */
15760 return (tree_expr_nonzero_warnv_p (op0
,
15762 || tree_expr_nonzero_warnv_p (op1
,
15763 strict_overflow_p
));
15768 if (TYPE_OVERFLOW_UNDEFINED (type
))
15770 if (tree_expr_nonzero_warnv_p (op0
,
15772 && tree_expr_nonzero_warnv_p (op1
,
15773 strict_overflow_p
))
15775 *strict_overflow_p
= true;
15782 sub_strict_overflow_p
= false;
15783 if (tree_expr_nonzero_warnv_p (op0
,
15784 &sub_strict_overflow_p
)
15785 && tree_expr_nonzero_warnv_p (op1
,
15786 &sub_strict_overflow_p
))
15788 if (sub_strict_overflow_p
)
15789 *strict_overflow_p
= true;
15794 sub_strict_overflow_p
= false;
15795 if (tree_expr_nonzero_warnv_p (op0
,
15796 &sub_strict_overflow_p
))
15798 if (sub_strict_overflow_p
)
15799 *strict_overflow_p
= true;
15801 /* When both operands are nonzero, then MAX must be too. */
15802 if (tree_expr_nonzero_warnv_p (op1
,
15803 strict_overflow_p
))
15806 /* MAX where operand 0 is positive is positive. */
15807 return tree_expr_nonnegative_warnv_p (op0
,
15808 strict_overflow_p
);
15810 /* MAX where operand 1 is positive is positive. */
15811 else if (tree_expr_nonzero_warnv_p (op1
,
15812 &sub_strict_overflow_p
)
15813 && tree_expr_nonnegative_warnv_p (op1
,
15814 &sub_strict_overflow_p
))
15816 if (sub_strict_overflow_p
)
15817 *strict_overflow_p
= true;
15823 return (tree_expr_nonzero_warnv_p (op1
,
15825 || tree_expr_nonzero_warnv_p (op0
,
15826 strict_overflow_p
));
15835 /* Return true when T is an address and is known to be nonzero.
15836 For floating point we further ensure that T is not denormal.
15837 Similar logic is present in nonzero_address in rtlanal.h.
15839 If the return value is based on the assumption that signed overflow
15840 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15841 change *STRICT_OVERFLOW_P. */
15844 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15846 bool sub_strict_overflow_p
;
15847 switch (TREE_CODE (t
))
15850 return !integer_zerop (t
);
15854 tree base
= TREE_OPERAND (t
, 0);
15856 if (!DECL_P (base
))
15857 base
= get_base_address (base
);
15862 /* For objects in symbol table check if we know they are non-zero.
15863 Don't do anything for variables and functions before symtab is built;
15864 it is quite possible that they will be declared weak later. */
15865 if (DECL_P (base
) && decl_in_symtab_p (base
))
15867 struct symtab_node
*symbol
;
15869 symbol
= symtab_node::get_create (base
);
15871 return symbol
->nonzero_address ();
15876 /* Function local objects are never NULL. */
15878 && (DECL_CONTEXT (base
)
15879 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15880 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
))))
15883 /* Constants are never weak. */
15884 if (CONSTANT_CLASS_P (base
))
15891 sub_strict_overflow_p
= false;
15892 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15893 &sub_strict_overflow_p
)
15894 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15895 &sub_strict_overflow_p
))
15897 if (sub_strict_overflow_p
)
15898 *strict_overflow_p
= true;
15909 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15910 attempt to fold the expression to a constant without modifying TYPE,
15913 If the expression could be simplified to a constant, then return
15914 the constant. If the expression would not be simplified to a
15915 constant, then return NULL_TREE. */
15918 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15920 tree tem
= fold_binary (code
, type
, op0
, op1
);
15921 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15924 /* Given the components of a unary expression CODE, TYPE and OP0,
15925 attempt to fold the expression to a constant without modifying
15928 If the expression could be simplified to a constant, then return
15929 the constant. If the expression would not be simplified to a
15930 constant, then return NULL_TREE. */
15933 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15935 tree tem
= fold_unary (code
, type
, op0
);
15936 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15939 /* If EXP represents referencing an element in a constant string
15940 (either via pointer arithmetic or array indexing), return the
15941 tree representing the value accessed, otherwise return NULL. */
15944 fold_read_from_constant_string (tree exp
)
15946 if ((TREE_CODE (exp
) == INDIRECT_REF
15947 || TREE_CODE (exp
) == ARRAY_REF
)
15948 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15950 tree exp1
= TREE_OPERAND (exp
, 0);
15953 location_t loc
= EXPR_LOCATION (exp
);
15955 if (TREE_CODE (exp
) == INDIRECT_REF
)
15956 string
= string_constant (exp1
, &index
);
15959 tree low_bound
= array_ref_low_bound (exp
);
15960 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15962 /* Optimize the special-case of a zero lower bound.
15964 We convert the low_bound to sizetype to avoid some problems
15965 with constant folding. (E.g. suppose the lower bound is 1,
15966 and its mode is QI. Without the conversion,l (ARRAY
15967 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15968 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15969 if (! integer_zerop (low_bound
))
15970 index
= size_diffop_loc (loc
, index
,
15971 fold_convert_loc (loc
, sizetype
, low_bound
));
15977 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15978 && TREE_CODE (string
) == STRING_CST
15979 && TREE_CODE (index
) == INTEGER_CST
15980 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15981 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15983 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15984 return build_int_cst_type (TREE_TYPE (exp
),
15985 (TREE_STRING_POINTER (string
)
15986 [TREE_INT_CST_LOW (index
)]));
15991 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15992 an integer constant, real, or fixed-point constant.
15994 TYPE is the type of the result. */
15997 fold_negate_const (tree arg0
, tree type
)
15999 tree t
= NULL_TREE
;
16001 switch (TREE_CODE (arg0
))
16006 wide_int val
= wi::neg (arg0
, &overflow
);
16007 t
= force_fit_type (type
, val
, 1,
16008 (overflow
| TREE_OVERFLOW (arg0
))
16009 && !TYPE_UNSIGNED (type
));
16014 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16019 FIXED_VALUE_TYPE f
;
16020 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
16021 &(TREE_FIXED_CST (arg0
)), NULL
,
16022 TYPE_SATURATING (type
));
16023 t
= build_fixed (type
, f
);
16024 /* Propagate overflow flags. */
16025 if (overflow_p
| TREE_OVERFLOW (arg0
))
16026 TREE_OVERFLOW (t
) = 1;
16031 gcc_unreachable ();
16037 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16038 an integer constant or real constant.
16040 TYPE is the type of the result. */
16043 fold_abs_const (tree arg0
, tree type
)
16045 tree t
= NULL_TREE
;
16047 switch (TREE_CODE (arg0
))
16051 /* If the value is unsigned or non-negative, then the absolute value
16052 is the same as the ordinary value. */
16053 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
16056 /* If the value is negative, then the absolute value is
16061 wide_int val
= wi::neg (arg0
, &overflow
);
16062 t
= force_fit_type (type
, val
, -1,
16063 overflow
| TREE_OVERFLOW (arg0
));
16069 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
16070 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16076 gcc_unreachable ();
16082 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16083 constant. TYPE is the type of the result. */
16086 fold_not_const (const_tree arg0
, tree type
)
16088 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
16090 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
16093 /* Given CODE, a relational operator, the target type, TYPE and two
16094 constant operands OP0 and OP1, return the result of the
16095 relational operation. If the result is not a compile time
16096 constant, then return NULL_TREE. */
16099 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
16101 int result
, invert
;
16103 /* From here on, the only cases we handle are when the result is
16104 known to be a constant. */
16106 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
16108 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
16109 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
16111 /* Handle the cases where either operand is a NaN. */
16112 if (real_isnan (c0
) || real_isnan (c1
))
16122 case UNORDERED_EXPR
:
16136 if (flag_trapping_math
)
16142 gcc_unreachable ();
16145 return constant_boolean_node (result
, type
);
16148 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
16151 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
16153 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
16154 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
16155 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
16158 /* Handle equality/inequality of complex constants. */
16159 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
16161 tree rcond
= fold_relational_const (code
, type
,
16162 TREE_REALPART (op0
),
16163 TREE_REALPART (op1
));
16164 tree icond
= fold_relational_const (code
, type
,
16165 TREE_IMAGPART (op0
),
16166 TREE_IMAGPART (op1
));
16167 if (code
== EQ_EXPR
)
16168 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
16169 else if (code
== NE_EXPR
)
16170 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
16175 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
16177 unsigned count
= VECTOR_CST_NELTS (op0
);
16178 tree
*elts
= XALLOCAVEC (tree
, count
);
16179 gcc_assert (VECTOR_CST_NELTS (op1
) == count
16180 && TYPE_VECTOR_SUBPARTS (type
) == count
);
16182 for (unsigned i
= 0; i
< count
; i
++)
16184 tree elem_type
= TREE_TYPE (type
);
16185 tree elem0
= VECTOR_CST_ELT (op0
, i
);
16186 tree elem1
= VECTOR_CST_ELT (op1
, i
);
16188 tree tem
= fold_relational_const (code
, elem_type
,
16191 if (tem
== NULL_TREE
)
16194 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
16197 return build_vector (type
, elts
);
16200 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16202 To compute GT, swap the arguments and do LT.
16203 To compute GE, do LT and invert the result.
16204 To compute LE, swap the arguments, do LT and invert the result.
16205 To compute NE, do EQ and invert the result.
16207 Therefore, the code below must handle only EQ and LT. */
16209 if (code
== LE_EXPR
|| code
== GT_EXPR
)
16214 code
= swap_tree_comparison (code
);
16217 /* Note that it is safe to invert for real values here because we
16218 have already handled the one case that it matters. */
16221 if (code
== NE_EXPR
|| code
== GE_EXPR
)
16224 code
= invert_tree_comparison (code
, false);
16227 /* Compute a result for LT or EQ if args permit;
16228 Otherwise return T. */
16229 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
16231 if (code
== EQ_EXPR
)
16232 result
= tree_int_cst_equal (op0
, op1
);
16234 result
= tree_int_cst_lt (op0
, op1
);
16241 return constant_boolean_node (result
, type
);
16244 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16245 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16249 fold_build_cleanup_point_expr (tree type
, tree expr
)
16251 /* If the expression does not have side effects then we don't have to wrap
16252 it with a cleanup point expression. */
16253 if (!TREE_SIDE_EFFECTS (expr
))
16256 /* If the expression is a return, check to see if the expression inside the
16257 return has no side effects or the right hand side of the modify expression
16258 inside the return. If either don't have side effects set we don't need to
16259 wrap the expression in a cleanup point expression. Note we don't check the
16260 left hand side of the modify because it should always be a return decl. */
16261 if (TREE_CODE (expr
) == RETURN_EXPR
)
16263 tree op
= TREE_OPERAND (expr
, 0);
16264 if (!op
|| !TREE_SIDE_EFFECTS (op
))
16266 op
= TREE_OPERAND (op
, 1);
16267 if (!TREE_SIDE_EFFECTS (op
))
16271 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
16274 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16275 of an indirection through OP0, or NULL_TREE if no simplification is
16279 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16285 subtype
= TREE_TYPE (sub
);
16286 if (!POINTER_TYPE_P (subtype
))
16289 if (TREE_CODE (sub
) == ADDR_EXPR
)
16291 tree op
= TREE_OPERAND (sub
, 0);
16292 tree optype
= TREE_TYPE (op
);
16293 /* *&CONST_DECL -> to the value of the const decl. */
16294 if (TREE_CODE (op
) == CONST_DECL
)
16295 return DECL_INITIAL (op
);
16296 /* *&p => p; make sure to handle *&"str"[cst] here. */
16297 if (type
== optype
)
16299 tree fop
= fold_read_from_constant_string (op
);
16305 /* *(foo *)&fooarray => fooarray[0] */
16306 else if (TREE_CODE (optype
) == ARRAY_TYPE
16307 && type
== TREE_TYPE (optype
)
16308 && (!in_gimple_form
16309 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16311 tree type_domain
= TYPE_DOMAIN (optype
);
16312 tree min_val
= size_zero_node
;
16313 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16314 min_val
= TYPE_MIN_VALUE (type_domain
);
16316 && TREE_CODE (min_val
) != INTEGER_CST
)
16318 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16319 NULL_TREE
, NULL_TREE
);
16321 /* *(foo *)&complexfoo => __real__ complexfoo */
16322 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16323 && type
== TREE_TYPE (optype
))
16324 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16325 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16326 else if (TREE_CODE (optype
) == VECTOR_TYPE
16327 && type
== TREE_TYPE (optype
))
16329 tree part_width
= TYPE_SIZE (type
);
16330 tree index
= bitsize_int (0);
16331 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16335 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16336 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16338 tree op00
= TREE_OPERAND (sub
, 0);
16339 tree op01
= TREE_OPERAND (sub
, 1);
16342 if (TREE_CODE (op00
) == ADDR_EXPR
)
16345 op00
= TREE_OPERAND (op00
, 0);
16346 op00type
= TREE_TYPE (op00
);
16348 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16349 if (TREE_CODE (op00type
) == VECTOR_TYPE
16350 && type
== TREE_TYPE (op00type
))
16352 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
16353 tree part_width
= TYPE_SIZE (type
);
16354 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
16355 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16356 tree index
= bitsize_int (indexi
);
16358 if (offset
/ part_widthi
< TYPE_VECTOR_SUBPARTS (op00type
))
16359 return fold_build3_loc (loc
,
16360 BIT_FIELD_REF
, type
, op00
,
16361 part_width
, index
);
16364 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16365 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16366 && type
== TREE_TYPE (op00type
))
16368 tree size
= TYPE_SIZE_UNIT (type
);
16369 if (tree_int_cst_equal (size
, op01
))
16370 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16372 /* ((foo *)&fooarray)[1] => fooarray[1] */
16373 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16374 && type
== TREE_TYPE (op00type
))
16376 tree type_domain
= TYPE_DOMAIN (op00type
);
16377 tree min_val
= size_zero_node
;
16378 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16379 min_val
= TYPE_MIN_VALUE (type_domain
);
16380 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
16381 TYPE_SIZE_UNIT (type
));
16382 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
16383 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16384 NULL_TREE
, NULL_TREE
);
16389 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16390 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16391 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16392 && (!in_gimple_form
16393 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16396 tree min_val
= size_zero_node
;
16397 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16398 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16399 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16400 min_val
= TYPE_MIN_VALUE (type_domain
);
16402 && TREE_CODE (min_val
) != INTEGER_CST
)
16404 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16411 /* Builds an expression for an indirection through T, simplifying some
16415 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16417 tree type
= TREE_TYPE (TREE_TYPE (t
));
16418 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16423 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16426 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16429 fold_indirect_ref_loc (location_t loc
, tree t
)
16431 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16439 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16440 whose result is ignored. The type of the returned tree need not be
16441 the same as the original expression. */
16444 fold_ignored_result (tree t
)
16446 if (!TREE_SIDE_EFFECTS (t
))
16447 return integer_zero_node
;
16450 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16453 t
= TREE_OPERAND (t
, 0);
16457 case tcc_comparison
:
16458 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16459 t
= TREE_OPERAND (t
, 0);
16460 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16461 t
= TREE_OPERAND (t
, 1);
16466 case tcc_expression
:
16467 switch (TREE_CODE (t
))
16469 case COMPOUND_EXPR
:
16470 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16472 t
= TREE_OPERAND (t
, 0);
16476 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16477 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16479 t
= TREE_OPERAND (t
, 0);
16492 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16495 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
16497 tree div
= NULL_TREE
;
16502 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16503 have to do anything. Only do this when we are not given a const,
16504 because in that case, this check is more expensive than just
16506 if (TREE_CODE (value
) != INTEGER_CST
)
16508 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16510 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16514 /* If divisor is a power of two, simplify this to bit manipulation. */
16515 if (divisor
== (divisor
& -divisor
))
16517 if (TREE_CODE (value
) == INTEGER_CST
)
16519 wide_int val
= value
;
16522 if ((val
& (divisor
- 1)) == 0)
16525 overflow_p
= TREE_OVERFLOW (value
);
16526 val
&= ~(divisor
- 1);
16531 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
16537 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16538 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16539 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16540 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16546 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16547 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16548 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16554 /* Likewise, but round down. */
16557 round_down_loc (location_t loc
, tree value
, int divisor
)
16559 tree div
= NULL_TREE
;
16561 gcc_assert (divisor
> 0);
16565 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16566 have to do anything. Only do this when we are not given a const,
16567 because in that case, this check is more expensive than just
16569 if (TREE_CODE (value
) != INTEGER_CST
)
16571 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16573 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16577 /* If divisor is a power of two, simplify this to bit manipulation. */
16578 if (divisor
== (divisor
& -divisor
))
16582 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16583 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16588 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16589 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16590 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16596 /* Returns the pointer to the base of the object addressed by EXP and
16597 extracts the information about the offset of the access, storing it
16598 to PBITPOS and POFFSET. */
16601 split_address_to_core_and_offset (tree exp
,
16602 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16605 enum machine_mode mode
;
16606 int unsignedp
, volatilep
;
16607 HOST_WIDE_INT bitsize
;
16608 location_t loc
= EXPR_LOCATION (exp
);
16610 if (TREE_CODE (exp
) == ADDR_EXPR
)
16612 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16613 poffset
, &mode
, &unsignedp
, &volatilep
,
16615 core
= build_fold_addr_expr_loc (loc
, core
);
16621 *poffset
= NULL_TREE
;
16627 /* Returns true if addresses of E1 and E2 differ by a constant, false
16628 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16631 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16634 HOST_WIDE_INT bitpos1
, bitpos2
;
16635 tree toffset1
, toffset2
, tdiff
, type
;
16637 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16638 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16640 if (bitpos1
% BITS_PER_UNIT
!= 0
16641 || bitpos2
% BITS_PER_UNIT
!= 0
16642 || !operand_equal_p (core1
, core2
, 0))
16645 if (toffset1
&& toffset2
)
16647 type
= TREE_TYPE (toffset1
);
16648 if (type
!= TREE_TYPE (toffset2
))
16649 toffset2
= fold_convert (type
, toffset2
);
16651 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16652 if (!cst_and_fits_in_hwi (tdiff
))
16655 *diff
= int_cst_value (tdiff
);
16657 else if (toffset1
|| toffset2
)
16659 /* If only one of the offsets is non-constant, the difference cannot
16666 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16670 /* Simplify the floating point expression EXP when the sign of the
16671 result is not significant. Return NULL_TREE if no simplification
16675 fold_strip_sign_ops (tree exp
)
16678 location_t loc
= EXPR_LOCATION (exp
);
16680 switch (TREE_CODE (exp
))
16684 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16685 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16689 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16691 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16692 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16693 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16694 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16695 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16696 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16699 case COMPOUND_EXPR
:
16700 arg0
= TREE_OPERAND (exp
, 0);
16701 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16703 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16707 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16708 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16710 return fold_build3_loc (loc
,
16711 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16712 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16713 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16718 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16721 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16722 /* Strip copysign function call, return the 1st argument. */
16723 arg0
= CALL_EXPR_ARG (exp
, 0);
16724 arg1
= CALL_EXPR_ARG (exp
, 1);
16725 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16728 /* Strip sign ops from the argument of "odd" math functions. */
16729 if (negate_mathfn_p (fcode
))
16731 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16733 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);