1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
45 #include "coretypes.h"
49 #include "stor-layout.h"
51 #include "tree-iterator.h"
57 #include "diagnostic-core.h"
59 #include "langhooks.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
65 #include "gimple-expr.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
73 /* Nonzero if we are folding constants inside an initializer; zero
75 int folding_initializer
= 0;
77 /* The following constants represent a bit based encoding of GCC's
78 comparison operators. This encoding simplifies transformations
79 on relational comparison operators, such as AND and OR. */
80 enum comparison_code
{
99 static bool negate_mathfn_p (enum built_in_function
);
100 static bool negate_expr_p (tree
);
101 static tree
negate_expr (tree
);
102 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
103 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
104 static tree
const_binop (enum tree_code
, tree
, tree
);
105 static enum comparison_code
comparison_to_compcode (enum tree_code
);
106 static enum tree_code
compcode_to_comparison (enum comparison_code
);
107 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
108 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
109 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
110 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
111 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
112 static tree
make_bit_field_ref (location_t
, tree
, tree
,
113 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
114 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
116 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
118 enum machine_mode
*, int *, int *,
120 static tree
sign_bit_p (tree
, const_tree
);
121 static int simple_operand_p (const_tree
);
122 static bool simple_operand_p_2 (tree
);
123 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
124 static tree
range_predecessor (tree
);
125 static tree
range_successor (tree
);
126 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
127 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
128 static tree
unextend (tree
, int, int, tree
);
129 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
131 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
132 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
133 static tree
fold_binary_op_with_conditional_arg (location_t
,
134 enum tree_code
, tree
,
137 static tree
fold_mathfn_compare (location_t
,
138 enum built_in_function
, enum tree_code
,
140 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
141 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
142 static bool reorder_operands_p (const_tree
, const_tree
);
143 static tree
fold_negate_const (tree
, tree
);
144 static tree
fold_not_const (const_tree
, tree
);
145 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
146 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
148 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
149 Otherwise, return LOC. */
152 expr_location_or (tree t
, location_t loc
)
154 location_t tloc
= EXPR_LOCATION (t
);
155 return tloc
== UNKNOWN_LOCATION
? loc
: tloc
;
158 /* Similar to protected_set_expr_location, but never modify x in place,
159 if location can and needs to be set, unshare it. */
162 protected_set_expr_location_unshare (tree x
, location_t loc
)
164 if (CAN_HAVE_LOCATION_P (x
)
165 && EXPR_LOCATION (x
) != loc
166 && !(TREE_CODE (x
) == SAVE_EXPR
167 || TREE_CODE (x
) == TARGET_EXPR
168 || TREE_CODE (x
) == BIND_EXPR
))
171 SET_EXPR_LOCATION (x
, loc
);
176 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
177 division and returns the quotient. Otherwise returns
181 div_if_zero_remainder (const_tree arg1
, const_tree arg2
)
185 if (wi::multiple_of_p (wi::to_widest (arg1
), wi::to_widest (arg2
),
187 return wide_int_to_tree (TREE_TYPE (arg1
), quo
);
192 /* This is nonzero if we should defer warnings about undefined
193 overflow. This facility exists because these warnings are a
194 special case. The code to estimate loop iterations does not want
195 to issue any warnings, since it works with expressions which do not
196 occur in user code. Various bits of cleanup code call fold(), but
197 only use the result if it has certain characteristics (e.g., is a
198 constant); that code only wants to issue a warning if the result is
201 static int fold_deferring_overflow_warnings
;
203 /* If a warning about undefined overflow is deferred, this is the
204 warning. Note that this may cause us to turn two warnings into
205 one, but that is fine since it is sufficient to only give one
206 warning per expression. */
208 static const char* fold_deferred_overflow_warning
;
210 /* If a warning about undefined overflow is deferred, this is the
211 level at which the warning should be emitted. */
213 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
215 /* Start deferring overflow warnings. We could use a stack here to
216 permit nested calls, but at present it is not necessary. */
219 fold_defer_overflow_warnings (void)
221 ++fold_deferring_overflow_warnings
;
224 /* Stop deferring overflow warnings. If there is a pending warning,
225 and ISSUE is true, then issue the warning if appropriate. STMT is
226 the statement with which the warning should be associated (used for
227 location information); STMT may be NULL. CODE is the level of the
228 warning--a warn_strict_overflow_code value. This function will use
229 the smaller of CODE and the deferred code when deciding whether to
230 issue the warning. CODE may be zero to mean to always use the
234 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
239 gcc_assert (fold_deferring_overflow_warnings
> 0);
240 --fold_deferring_overflow_warnings
;
241 if (fold_deferring_overflow_warnings
> 0)
243 if (fold_deferred_overflow_warning
!= NULL
245 && code
< (int) fold_deferred_overflow_code
)
246 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
250 warnmsg
= fold_deferred_overflow_warning
;
251 fold_deferred_overflow_warning
= NULL
;
253 if (!issue
|| warnmsg
== NULL
)
256 if (gimple_no_warning_p (stmt
))
259 /* Use the smallest code level when deciding to issue the
261 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
262 code
= fold_deferred_overflow_code
;
264 if (!issue_strict_overflow_warning (code
))
268 locus
= input_location
;
270 locus
= gimple_location (stmt
);
271 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
274 /* Stop deferring overflow warnings, ignoring any deferred
278 fold_undefer_and_ignore_overflow_warnings (void)
280 fold_undefer_overflow_warnings (false, NULL
, 0);
283 /* Whether we are deferring overflow warnings. */
286 fold_deferring_overflow_warnings_p (void)
288 return fold_deferring_overflow_warnings
> 0;
291 /* This is called when we fold something based on the fact that signed
292 overflow is undefined. */
295 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
297 if (fold_deferring_overflow_warnings
> 0)
299 if (fold_deferred_overflow_warning
== NULL
300 || wc
< fold_deferred_overflow_code
)
302 fold_deferred_overflow_warning
= gmsgid
;
303 fold_deferred_overflow_code
= wc
;
306 else if (issue_strict_overflow_warning (wc
))
307 warning (OPT_Wstrict_overflow
, gmsgid
);
310 /* Return true if the built-in mathematical function specified by CODE
311 is odd, i.e. -f(x) == f(-x). */
314 negate_mathfn_p (enum built_in_function code
)
318 CASE_FLT_FN (BUILT_IN_ASIN
):
319 CASE_FLT_FN (BUILT_IN_ASINH
):
320 CASE_FLT_FN (BUILT_IN_ATAN
):
321 CASE_FLT_FN (BUILT_IN_ATANH
):
322 CASE_FLT_FN (BUILT_IN_CASIN
):
323 CASE_FLT_FN (BUILT_IN_CASINH
):
324 CASE_FLT_FN (BUILT_IN_CATAN
):
325 CASE_FLT_FN (BUILT_IN_CATANH
):
326 CASE_FLT_FN (BUILT_IN_CBRT
):
327 CASE_FLT_FN (BUILT_IN_CPROJ
):
328 CASE_FLT_FN (BUILT_IN_CSIN
):
329 CASE_FLT_FN (BUILT_IN_CSINH
):
330 CASE_FLT_FN (BUILT_IN_CTAN
):
331 CASE_FLT_FN (BUILT_IN_CTANH
):
332 CASE_FLT_FN (BUILT_IN_ERF
):
333 CASE_FLT_FN (BUILT_IN_LLROUND
):
334 CASE_FLT_FN (BUILT_IN_LROUND
):
335 CASE_FLT_FN (BUILT_IN_ROUND
):
336 CASE_FLT_FN (BUILT_IN_SIN
):
337 CASE_FLT_FN (BUILT_IN_SINH
):
338 CASE_FLT_FN (BUILT_IN_TAN
):
339 CASE_FLT_FN (BUILT_IN_TANH
):
340 CASE_FLT_FN (BUILT_IN_TRUNC
):
343 CASE_FLT_FN (BUILT_IN_LLRINT
):
344 CASE_FLT_FN (BUILT_IN_LRINT
):
345 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
346 CASE_FLT_FN (BUILT_IN_RINT
):
347 return !flag_rounding_math
;
355 /* Check whether we may negate an integer constant T without causing
359 may_negate_without_overflow_p (const_tree t
)
363 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
365 type
= TREE_TYPE (t
);
366 if (TYPE_UNSIGNED (type
))
369 return !wi::only_sign_bit_p (t
);
372 /* Determine whether an expression T can be cheaply negated using
373 the function negate_expr without introducing undefined overflow. */
376 negate_expr_p (tree t
)
383 type
= TREE_TYPE (t
);
386 switch (TREE_CODE (t
))
389 if (TYPE_OVERFLOW_WRAPS (type
))
392 /* Check that -CST will not overflow type. */
393 return may_negate_without_overflow_p (t
);
395 return (INTEGRAL_TYPE_P (type
)
396 && TYPE_OVERFLOW_WRAPS (type
));
403 /* We want to canonicalize to positive real constants. Pretend
404 that only negative ones can be easily negated. */
405 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
408 return negate_expr_p (TREE_REALPART (t
))
409 && negate_expr_p (TREE_IMAGPART (t
));
413 if (FLOAT_TYPE_P (TREE_TYPE (type
)) || TYPE_OVERFLOW_WRAPS (type
))
416 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
418 for (i
= 0; i
< count
; i
++)
419 if (!negate_expr_p (VECTOR_CST_ELT (t
, i
)))
426 return negate_expr_p (TREE_OPERAND (t
, 0))
427 && negate_expr_p (TREE_OPERAND (t
, 1));
430 return negate_expr_p (TREE_OPERAND (t
, 0));
433 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
434 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
436 /* -(A + B) -> (-B) - A. */
437 if (negate_expr_p (TREE_OPERAND (t
, 1))
438 && reorder_operands_p (TREE_OPERAND (t
, 0),
439 TREE_OPERAND (t
, 1)))
441 /* -(A + B) -> (-A) - B. */
442 return negate_expr_p (TREE_OPERAND (t
, 0));
445 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
446 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
447 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
448 && reorder_operands_p (TREE_OPERAND (t
, 0),
449 TREE_OPERAND (t
, 1));
452 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
458 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
459 return negate_expr_p (TREE_OPERAND (t
, 1))
460 || negate_expr_p (TREE_OPERAND (t
, 0));
466 /* In general we can't negate A / B, because if A is INT_MIN and
467 B is 1, we may turn this into INT_MIN / -1 which is undefined
468 and actually traps on some architectures. But if overflow is
469 undefined, we can negate, because - (INT_MIN / 1) is an
471 if (INTEGRAL_TYPE_P (TREE_TYPE (t
)))
473 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
475 /* If overflow is undefined then we have to be careful because
476 we ask whether it's ok to associate the negate with the
477 division which is not ok for example for
478 -((a - b) / c) where (-(a - b)) / c may invoke undefined
479 overflow because of negating INT_MIN. So do not use
480 negate_expr_p here but open-code the two important cases. */
481 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NEGATE_EXPR
482 || (TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
483 && may_negate_without_overflow_p (TREE_OPERAND (t
, 0))))
486 else if (negate_expr_p (TREE_OPERAND (t
, 0)))
488 return negate_expr_p (TREE_OPERAND (t
, 1));
491 /* Negate -((double)float) as (double)(-float). */
492 if (TREE_CODE (type
) == REAL_TYPE
)
494 tree tem
= strip_float_extensions (t
);
496 return negate_expr_p (tem
);
501 /* Negate -f(x) as f(-x). */
502 if (negate_mathfn_p (builtin_mathfn_code (t
)))
503 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
507 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
508 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
510 tree op1
= TREE_OPERAND (t
, 1);
511 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
522 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
523 simplification is possible.
524 If negate_expr_p would return true for T, NULL_TREE will never be
528 fold_negate_expr (location_t loc
, tree t
)
530 tree type
= TREE_TYPE (t
);
533 switch (TREE_CODE (t
))
535 /* Convert - (~A) to A + 1. */
537 if (INTEGRAL_TYPE_P (type
))
538 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
539 build_one_cst (type
));
543 tem
= fold_negate_const (t
, type
);
544 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
545 || !TYPE_OVERFLOW_TRAPS (type
))
550 tem
= fold_negate_const (t
, type
);
551 /* Two's complement FP formats, such as c4x, may overflow. */
552 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
557 tem
= fold_negate_const (t
, type
);
562 tree rpart
= negate_expr (TREE_REALPART (t
));
563 tree ipart
= negate_expr (TREE_IMAGPART (t
));
565 if ((TREE_CODE (rpart
) == REAL_CST
566 && TREE_CODE (ipart
) == REAL_CST
)
567 || (TREE_CODE (rpart
) == INTEGER_CST
568 && TREE_CODE (ipart
) == INTEGER_CST
))
569 return build_complex (type
, rpart
, ipart
);
575 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
576 tree
*elts
= XALLOCAVEC (tree
, count
);
578 for (i
= 0; i
< count
; i
++)
580 elts
[i
] = fold_negate_expr (loc
, VECTOR_CST_ELT (t
, i
));
581 if (elts
[i
] == NULL_TREE
)
585 return build_vector (type
, elts
);
589 if (negate_expr_p (t
))
590 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
591 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
592 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
596 if (negate_expr_p (t
))
597 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
598 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
602 return TREE_OPERAND (t
, 0);
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
606 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
608 /* -(A + B) -> (-B) - A. */
609 if (negate_expr_p (TREE_OPERAND (t
, 1))
610 && reorder_operands_p (TREE_OPERAND (t
, 0),
611 TREE_OPERAND (t
, 1)))
613 tem
= negate_expr (TREE_OPERAND (t
, 1));
614 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
615 tem
, TREE_OPERAND (t
, 0));
618 /* -(A + B) -> (-A) - B. */
619 if (negate_expr_p (TREE_OPERAND (t
, 0)))
621 tem
= negate_expr (TREE_OPERAND (t
, 0));
622 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
623 tem
, TREE_OPERAND (t
, 1));
629 /* - (A - B) -> B - A */
630 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
631 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
632 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
633 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
634 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
638 if (TYPE_UNSIGNED (type
))
644 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
646 tem
= TREE_OPERAND (t
, 1);
647 if (negate_expr_p (tem
))
648 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
649 TREE_OPERAND (t
, 0), negate_expr (tem
));
650 tem
= TREE_OPERAND (t
, 0);
651 if (negate_expr_p (tem
))
652 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
653 negate_expr (tem
), TREE_OPERAND (t
, 1));
660 /* In general we can't negate A / B, because if A is INT_MIN and
661 B is 1, we may turn this into INT_MIN / -1 which is undefined
662 and actually traps on some architectures. But if overflow is
663 undefined, we can negate, because - (INT_MIN / 1) is an
665 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
667 const char * const warnmsg
= G_("assuming signed overflow does not "
668 "occur when negating a division");
669 tem
= TREE_OPERAND (t
, 1);
670 if (negate_expr_p (tem
))
672 if (INTEGRAL_TYPE_P (type
)
673 && (TREE_CODE (tem
) != INTEGER_CST
674 || integer_onep (tem
)))
675 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
676 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
677 TREE_OPERAND (t
, 0), negate_expr (tem
));
679 /* If overflow is undefined then we have to be careful because
680 we ask whether it's ok to associate the negate with the
681 division which is not ok for example for
682 -((a - b) / c) where (-(a - b)) / c may invoke undefined
683 overflow because of negating INT_MIN. So do not use
684 negate_expr_p here but open-code the two important cases. */
685 tem
= TREE_OPERAND (t
, 0);
686 if ((INTEGRAL_TYPE_P (type
)
687 && (TREE_CODE (tem
) == NEGATE_EXPR
688 || (TREE_CODE (tem
) == INTEGER_CST
689 && may_negate_without_overflow_p (tem
))))
690 || !INTEGRAL_TYPE_P (type
))
691 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
692 negate_expr (tem
), TREE_OPERAND (t
, 1));
697 /* Convert -((double)float) into (double)(-float). */
698 if (TREE_CODE (type
) == REAL_TYPE
)
700 tem
= strip_float_extensions (t
);
701 if (tem
!= t
&& negate_expr_p (tem
))
702 return fold_convert_loc (loc
, type
, negate_expr (tem
));
707 /* Negate -f(x) as f(-x). */
708 if (negate_mathfn_p (builtin_mathfn_code (t
))
709 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
713 fndecl
= get_callee_fndecl (t
);
714 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
715 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
720 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
721 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
723 tree op1
= TREE_OPERAND (t
, 1);
724 if (wi::eq_p (op1
, TYPE_PRECISION (type
) - 1))
726 tree ntype
= TYPE_UNSIGNED (type
)
727 ? signed_type_for (type
)
728 : unsigned_type_for (type
);
729 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
730 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
731 return fold_convert_loc (loc
, type
, temp
);
743 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
744 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
756 loc
= EXPR_LOCATION (t
);
757 type
= TREE_TYPE (t
);
760 tem
= fold_negate_expr (loc
, t
);
762 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
763 return fold_convert_loc (loc
, type
, tem
);
766 /* Split a tree IN into a constant, literal and variable parts that could be
767 combined with CODE to make IN. "constant" means an expression with
768 TREE_CONSTANT but that isn't an actual constant. CODE must be a
769 commutative arithmetic operation. Store the constant part into *CONP,
770 the literal in *LITP and return the variable part. If a part isn't
771 present, set it to null. If the tree does not decompose in this way,
772 return the entire tree as the variable part and the other parts as null.
774 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
775 case, we negate an operand that was subtracted. Except if it is a
776 literal for which we use *MINUS_LITP instead.
778 If NEGATE_P is true, we are negating all of IN, again except a literal
779 for which we use *MINUS_LITP instead.
781 If IN is itself a literal or constant, return it as appropriate.
783 Note that we do not guarantee that any of the three values will be the
784 same type as IN, but they will have the same signedness and mode. */
787 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
788 tree
*minus_litp
, int negate_p
)
796 /* Strip any conversions that don't change the machine mode or signedness. */
797 STRIP_SIGN_NOPS (in
);
799 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
800 || TREE_CODE (in
) == FIXED_CST
)
802 else if (TREE_CODE (in
) == code
803 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
804 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
805 /* We can associate addition and subtraction together (even
806 though the C standard doesn't say so) for integers because
807 the value is not affected. For reals, the value might be
808 affected, so we can't. */
809 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
810 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
812 tree op0
= TREE_OPERAND (in
, 0);
813 tree op1
= TREE_OPERAND (in
, 1);
814 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
815 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
817 /* First see if either of the operands is a literal, then a constant. */
818 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
819 || TREE_CODE (op0
) == FIXED_CST
)
820 *litp
= op0
, op0
= 0;
821 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
822 || TREE_CODE (op1
) == FIXED_CST
)
823 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
825 if (op0
!= 0 && TREE_CONSTANT (op0
))
826 *conp
= op0
, op0
= 0;
827 else if (op1
!= 0 && TREE_CONSTANT (op1
))
828 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
830 /* If we haven't dealt with either operand, this is not a case we can
831 decompose. Otherwise, VAR is either of the ones remaining, if any. */
832 if (op0
!= 0 && op1
!= 0)
837 var
= op1
, neg_var_p
= neg1_p
;
839 /* Now do any needed negations. */
841 *minus_litp
= *litp
, *litp
= 0;
843 *conp
= negate_expr (*conp
);
845 var
= negate_expr (var
);
847 else if (TREE_CODE (in
) == BIT_NOT_EXPR
848 && code
== PLUS_EXPR
)
850 /* -X - 1 is folded to ~X, undo that here. */
851 *minus_litp
= build_one_cst (TREE_TYPE (in
));
852 var
= negate_expr (TREE_OPERAND (in
, 0));
854 else if (TREE_CONSTANT (in
))
862 *minus_litp
= *litp
, *litp
= 0;
863 else if (*minus_litp
)
864 *litp
= *minus_litp
, *minus_litp
= 0;
865 *conp
= negate_expr (*conp
);
866 var
= negate_expr (var
);
872 /* Re-associate trees split by the above function. T1 and T2 are
873 either expressions to associate or null. Return the new
874 expression, if any. LOC is the location of the new expression. If
875 we build an operation, do it in TYPE and with CODE. */
878 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
885 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
886 try to fold this since we will have infinite recursion. But do
887 deal with any NEGATE_EXPRs. */
888 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
889 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
891 if (code
== PLUS_EXPR
)
893 if (TREE_CODE (t1
) == NEGATE_EXPR
)
894 return build2_loc (loc
, MINUS_EXPR
, type
,
895 fold_convert_loc (loc
, type
, t2
),
896 fold_convert_loc (loc
, type
,
897 TREE_OPERAND (t1
, 0)));
898 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
899 return build2_loc (loc
, MINUS_EXPR
, type
,
900 fold_convert_loc (loc
, type
, t1
),
901 fold_convert_loc (loc
, type
,
902 TREE_OPERAND (t2
, 0)));
903 else if (integer_zerop (t2
))
904 return fold_convert_loc (loc
, type
, t1
);
906 else if (code
== MINUS_EXPR
)
908 if (integer_zerop (t2
))
909 return fold_convert_loc (loc
, type
, t1
);
912 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
913 fold_convert_loc (loc
, type
, t2
));
916 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
917 fold_convert_loc (loc
, type
, t2
));
920 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
921 for use in int_const_binop, size_binop and size_diffop. */
924 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
926 if (!INTEGRAL_TYPE_P (type1
) && !POINTER_TYPE_P (type1
))
928 if (!INTEGRAL_TYPE_P (type2
) && !POINTER_TYPE_P (type2
))
943 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
944 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
945 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
949 /* Combine two integer constants ARG1 and ARG2 under operation CODE
950 to produce a new constant. Return NULL_TREE if we don't know how
951 to evaluate CODE at compile-time. */
954 int_const_binop_1 (enum tree_code code
, const_tree arg1
, const_tree parg2
,
959 tree type
= TREE_TYPE (arg1
);
960 signop sign
= TYPE_SIGN (type
);
961 bool overflow
= false;
963 wide_int arg2
= wide_int::from (parg2
, TYPE_PRECISION (type
),
964 TYPE_SIGN (TREE_TYPE (parg2
)));
969 res
= wi::bit_or (arg1
, arg2
);
973 res
= wi::bit_xor (arg1
, arg2
);
977 res
= wi::bit_and (arg1
, arg2
);
982 if (wi::neg_p (arg2
))
985 if (code
== RSHIFT_EXPR
)
991 if (code
== RSHIFT_EXPR
)
992 /* It's unclear from the C standard whether shifts can overflow.
993 The following code ignores overflow; perhaps a C standard
994 interpretation ruling is needed. */
995 res
= wi::rshift (arg1
, arg2
, sign
);
997 res
= wi::lshift (arg1
, arg2
);
1002 if (wi::neg_p (arg2
))
1005 if (code
== RROTATE_EXPR
)
1006 code
= LROTATE_EXPR
;
1008 code
= RROTATE_EXPR
;
1011 if (code
== RROTATE_EXPR
)
1012 res
= wi::rrotate (arg1
, arg2
);
1014 res
= wi::lrotate (arg1
, arg2
);
1018 res
= wi::add (arg1
, arg2
, sign
, &overflow
);
1022 res
= wi::sub (arg1
, arg2
, sign
, &overflow
);
1026 res
= wi::mul (arg1
, arg2
, sign
, &overflow
);
1029 case MULT_HIGHPART_EXPR
:
1030 res
= wi::mul_high (arg1
, arg2
, sign
);
1033 case TRUNC_DIV_EXPR
:
1034 case EXACT_DIV_EXPR
:
1037 res
= wi::div_trunc (arg1
, arg2
, sign
, &overflow
);
1040 case FLOOR_DIV_EXPR
:
1043 res
= wi::div_floor (arg1
, arg2
, sign
, &overflow
);
1049 res
= wi::div_ceil (arg1
, arg2
, sign
, &overflow
);
1052 case ROUND_DIV_EXPR
:
1055 res
= wi::div_round (arg1
, arg2
, sign
, &overflow
);
1058 case TRUNC_MOD_EXPR
:
1061 res
= wi::mod_trunc (arg1
, arg2
, sign
, &overflow
);
1064 case FLOOR_MOD_EXPR
:
1067 res
= wi::mod_floor (arg1
, arg2
, sign
, &overflow
);
1073 res
= wi::mod_ceil (arg1
, arg2
, sign
, &overflow
);
1076 case ROUND_MOD_EXPR
:
1079 res
= wi::mod_round (arg1
, arg2
, sign
, &overflow
);
1083 res
= wi::min (arg1
, arg2
, sign
);
1087 res
= wi::max (arg1
, arg2
, sign
);
1094 t
= force_fit_type (type
, res
, overflowable
,
1095 (((sign
== SIGNED
|| overflowable
== -1)
1097 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (parg2
)));
1103 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
1105 return int_const_binop_1 (code
, arg1
, arg2
, 1);
1108 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1109 constant. We assume ARG1 and ARG2 have the same data type, or at least
1110 are the same kind of constant and the same machine mode. Return zero if
1111 combining the constants is not allowed in the current operating mode. */
1114 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1116 /* Sanity check for the recursive cases. */
1123 if (TREE_CODE (arg1
) == INTEGER_CST
)
1124 return int_const_binop (code
, arg1
, arg2
);
1126 if (TREE_CODE (arg1
) == REAL_CST
)
1128 enum machine_mode mode
;
1131 REAL_VALUE_TYPE value
;
1132 REAL_VALUE_TYPE result
;
1136 /* The following codes are handled by real_arithmetic. */
1151 d1
= TREE_REAL_CST (arg1
);
1152 d2
= TREE_REAL_CST (arg2
);
1154 type
= TREE_TYPE (arg1
);
1155 mode
= TYPE_MODE (type
);
1157 /* Don't perform operation if we honor signaling NaNs and
1158 either operand is a NaN. */
1159 if (HONOR_SNANS (mode
)
1160 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1163 /* Don't perform operation if it would raise a division
1164 by zero exception. */
1165 if (code
== RDIV_EXPR
1166 && REAL_VALUES_EQUAL (d2
, dconst0
)
1167 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1170 /* If either operand is a NaN, just return it. Otherwise, set up
1171 for floating-point trap; we return an overflow. */
1172 if (REAL_VALUE_ISNAN (d1
))
1174 else if (REAL_VALUE_ISNAN (d2
))
1177 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1178 real_convert (&result
, mode
, &value
);
1180 /* Don't constant fold this floating point operation if
1181 the result has overflowed and flag_trapping_math. */
1182 if (flag_trapping_math
1183 && MODE_HAS_INFINITIES (mode
)
1184 && REAL_VALUE_ISINF (result
)
1185 && !REAL_VALUE_ISINF (d1
)
1186 && !REAL_VALUE_ISINF (d2
))
1189 /* Don't constant fold this floating point operation if the
1190 result may dependent upon the run-time rounding mode and
1191 flag_rounding_math is set, or if GCC's software emulation
1192 is unable to accurately represent the result. */
1193 if ((flag_rounding_math
1194 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1195 && (inexact
|| !real_identical (&result
, &value
)))
1198 t
= build_real (type
, result
);
1200 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1204 if (TREE_CODE (arg1
) == FIXED_CST
)
1206 FIXED_VALUE_TYPE f1
;
1207 FIXED_VALUE_TYPE f2
;
1208 FIXED_VALUE_TYPE result
;
1213 /* The following codes are handled by fixed_arithmetic. */
1219 case TRUNC_DIV_EXPR
:
1220 f2
= TREE_FIXED_CST (arg2
);
1227 f2
.data
.high
= w2
.elt (1);
1228 f2
.data
.low
= w2
.elt (0);
1237 f1
= TREE_FIXED_CST (arg1
);
1238 type
= TREE_TYPE (arg1
);
1239 sat_p
= TYPE_SATURATING (type
);
1240 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1241 t
= build_fixed (type
, result
);
1242 /* Propagate overflow flags. */
1243 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1244 TREE_OVERFLOW (t
) = 1;
1248 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1250 tree type
= TREE_TYPE (arg1
);
1251 tree r1
= TREE_REALPART (arg1
);
1252 tree i1
= TREE_IMAGPART (arg1
);
1253 tree r2
= TREE_REALPART (arg2
);
1254 tree i2
= TREE_IMAGPART (arg2
);
1261 real
= const_binop (code
, r1
, r2
);
1262 imag
= const_binop (code
, i1
, i2
);
1266 if (COMPLEX_FLOAT_TYPE_P (type
))
1267 return do_mpc_arg2 (arg1
, arg2
, type
,
1268 /* do_nonfinite= */ folding_initializer
,
1271 real
= const_binop (MINUS_EXPR
,
1272 const_binop (MULT_EXPR
, r1
, r2
),
1273 const_binop (MULT_EXPR
, i1
, i2
));
1274 imag
= const_binop (PLUS_EXPR
,
1275 const_binop (MULT_EXPR
, r1
, i2
),
1276 const_binop (MULT_EXPR
, i1
, r2
));
1280 if (COMPLEX_FLOAT_TYPE_P (type
))
1281 return do_mpc_arg2 (arg1
, arg2
, type
,
1282 /* do_nonfinite= */ folding_initializer
,
1285 case TRUNC_DIV_EXPR
:
1287 case FLOOR_DIV_EXPR
:
1288 case ROUND_DIV_EXPR
:
1289 if (flag_complex_method
== 0)
1291 /* Keep this algorithm in sync with
1292 tree-complex.c:expand_complex_div_straight().
1294 Expand complex division to scalars, straightforward algorithm.
1295 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1299 = const_binop (PLUS_EXPR
,
1300 const_binop (MULT_EXPR
, r2
, r2
),
1301 const_binop (MULT_EXPR
, i2
, i2
));
1303 = const_binop (PLUS_EXPR
,
1304 const_binop (MULT_EXPR
, r1
, r2
),
1305 const_binop (MULT_EXPR
, i1
, i2
));
1307 = const_binop (MINUS_EXPR
,
1308 const_binop (MULT_EXPR
, i1
, r2
),
1309 const_binop (MULT_EXPR
, r1
, i2
));
1311 real
= const_binop (code
, t1
, magsquared
);
1312 imag
= const_binop (code
, t2
, magsquared
);
1316 /* Keep this algorithm in sync with
1317 tree-complex.c:expand_complex_div_wide().
1319 Expand complex division to scalars, modified algorithm to minimize
1320 overflow with wide input ranges. */
1321 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1322 fold_abs_const (r2
, TREE_TYPE (type
)),
1323 fold_abs_const (i2
, TREE_TYPE (type
)));
1325 if (integer_nonzerop (compare
))
1327 /* In the TRUE branch, we compute
1329 div = (br * ratio) + bi;
1330 tr = (ar * ratio) + ai;
1331 ti = (ai * ratio) - ar;
1334 tree ratio
= const_binop (code
, r2
, i2
);
1335 tree div
= const_binop (PLUS_EXPR
, i2
,
1336 const_binop (MULT_EXPR
, r2
, ratio
));
1337 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1338 real
= const_binop (PLUS_EXPR
, real
, i1
);
1339 real
= const_binop (code
, real
, div
);
1341 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1342 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1343 imag
= const_binop (code
, imag
, div
);
1347 /* In the FALSE branch, we compute
1349 divisor = (d * ratio) + c;
1350 tr = (b * ratio) + a;
1351 ti = b - (a * ratio);
1354 tree ratio
= const_binop (code
, i2
, r2
);
1355 tree div
= const_binop (PLUS_EXPR
, r2
,
1356 const_binop (MULT_EXPR
, i2
, ratio
));
1358 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1359 real
= const_binop (PLUS_EXPR
, real
, r1
);
1360 real
= const_binop (code
, real
, div
);
1362 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1363 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1364 imag
= const_binop (code
, imag
, div
);
1374 return build_complex (type
, real
, imag
);
1377 if (TREE_CODE (arg1
) == VECTOR_CST
1378 && TREE_CODE (arg2
) == VECTOR_CST
)
1380 tree type
= TREE_TYPE (arg1
);
1381 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1382 tree
*elts
= XALLOCAVEC (tree
, count
);
1384 for (i
= 0; i
< count
; i
++)
1386 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1387 tree elem2
= VECTOR_CST_ELT (arg2
, i
);
1389 elts
[i
] = const_binop (code
, elem1
, elem2
);
1391 /* It is possible that const_binop cannot handle the given
1392 code and return NULL_TREE */
1393 if (elts
[i
] == NULL_TREE
)
1397 return build_vector (type
, elts
);
1400 /* Shifts allow a scalar offset for a vector. */
1401 if (TREE_CODE (arg1
) == VECTOR_CST
1402 && TREE_CODE (arg2
) == INTEGER_CST
)
1404 tree type
= TREE_TYPE (arg1
);
1405 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1406 tree
*elts
= XALLOCAVEC (tree
, count
);
1408 if (code
== VEC_LSHIFT_EXPR
1409 || code
== VEC_RSHIFT_EXPR
)
1411 if (!tree_fits_uhwi_p (arg2
))
1414 unsigned HOST_WIDE_INT shiftc
= tree_to_uhwi (arg2
);
1415 unsigned HOST_WIDE_INT outerc
= tree_to_uhwi (TYPE_SIZE (type
));
1416 unsigned HOST_WIDE_INT innerc
1417 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type
)));
1418 if (shiftc
>= outerc
|| (shiftc
% innerc
) != 0)
1420 int offset
= shiftc
/ innerc
;
1421 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1422 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1423 for !BYTES_BIG_ENDIAN picks first vector element, but
1424 for BYTES_BIG_ENDIAN last element from the vector. */
1425 if ((code
== VEC_RSHIFT_EXPR
) ^ (!BYTES_BIG_ENDIAN
))
1427 tree zero
= build_zero_cst (TREE_TYPE (type
));
1428 for (i
= 0; i
< count
; i
++)
1430 if (i
+ offset
< 0 || i
+ offset
>= count
)
1433 elts
[i
] = VECTOR_CST_ELT (arg1
, i
+ offset
);
1437 for (i
= 0; i
< count
; i
++)
1439 tree elem1
= VECTOR_CST_ELT (arg1
, i
);
1441 elts
[i
] = const_binop (code
, elem1
, arg2
);
1443 /* It is possible that const_binop cannot handle the given
1444 code and return NULL_TREE */
1445 if (elts
[i
] == NULL_TREE
)
1449 return build_vector (type
, elts
);
1454 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1455 indicates which particular sizetype to create. */
1458 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1460 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1463 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1464 is a tree code. The type of the result is taken from the operands.
1465 Both must be equivalent integer types, ala int_binop_types_match_p.
1466 If the operands are constant, so is the result. */
1469 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1471 tree type
= TREE_TYPE (arg0
);
1473 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1474 return error_mark_node
;
1476 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1479 /* Handle the special case of two integer constants faster. */
1480 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1482 /* And some specific cases even faster than that. */
1483 if (code
== PLUS_EXPR
)
1485 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1487 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1490 else if (code
== MINUS_EXPR
)
1492 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1495 else if (code
== MULT_EXPR
)
1497 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1501 /* Handle general case of two integer constants. For sizetype
1502 constant calculations we always want to know about overflow,
1503 even in the unsigned case. */
1504 return int_const_binop_1 (code
, arg0
, arg1
, -1);
1507 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1510 /* Given two values, either both of sizetype or both of bitsizetype,
1511 compute the difference between the two values. Return the value
1512 in signed type corresponding to the type of the operands. */
1515 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1517 tree type
= TREE_TYPE (arg0
);
1520 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1523 /* If the type is already signed, just do the simple thing. */
1524 if (!TYPE_UNSIGNED (type
))
1525 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1527 if (type
== sizetype
)
1529 else if (type
== bitsizetype
)
1530 ctype
= sbitsizetype
;
1532 ctype
= signed_type_for (type
);
1534 /* If either operand is not a constant, do the conversions to the signed
1535 type and subtract. The hardware will do the right thing with any
1536 overflow in the subtraction. */
1537 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1538 return size_binop_loc (loc
, MINUS_EXPR
,
1539 fold_convert_loc (loc
, ctype
, arg0
),
1540 fold_convert_loc (loc
, ctype
, arg1
));
1542 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1543 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1544 overflow) and negate (which can't either). Special-case a result
1545 of zero while we're here. */
1546 if (tree_int_cst_equal (arg0
, arg1
))
1547 return build_int_cst (ctype
, 0);
1548 else if (tree_int_cst_lt (arg1
, arg0
))
1549 return fold_convert_loc (loc
, ctype
,
1550 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1552 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1553 fold_convert_loc (loc
, ctype
,
1554 size_binop_loc (loc
,
1559 /* A subroutine of fold_convert_const handling conversions of an
1560 INTEGER_CST to another integer type. */
1563 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1565 /* Given an integer constant, make new constant with new type,
1566 appropriately sign-extended or truncated. Use widest_int
1567 so that any extension is done according ARG1's type. */
1568 return force_fit_type (type
, wi::to_widest (arg1
),
1569 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1570 TREE_OVERFLOW (arg1
));
1573 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1574 to an integer type. */
1577 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1579 bool overflow
= false;
1582 /* The following code implements the floating point to integer
1583 conversion rules required by the Java Language Specification,
1584 that IEEE NaNs are mapped to zero and values that overflow
1585 the target precision saturate, i.e. values greater than
1586 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1587 are mapped to INT_MIN. These semantics are allowed by the
1588 C and C++ standards that simply state that the behavior of
1589 FP-to-integer conversion is unspecified upon overflow. */
1593 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1597 case FIX_TRUNC_EXPR
:
1598 real_trunc (&r
, VOIDmode
, &x
);
1605 /* If R is NaN, return zero and show we have an overflow. */
1606 if (REAL_VALUE_ISNAN (r
))
1609 val
= wi::zero (TYPE_PRECISION (type
));
1612 /* See if R is less than the lower bound or greater than the
1617 tree lt
= TYPE_MIN_VALUE (type
);
1618 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1619 if (REAL_VALUES_LESS (r
, l
))
1628 tree ut
= TYPE_MAX_VALUE (type
);
1631 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1632 if (REAL_VALUES_LESS (u
, r
))
1641 val
= real_to_integer (&r
, &overflow
, TYPE_PRECISION (type
));
1643 t
= force_fit_type (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1647 /* A subroutine of fold_convert_const handling conversions of a
1648 FIXED_CST to an integer type. */
1651 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1654 double_int temp
, temp_trunc
;
1657 /* Right shift FIXED_CST to temp by fbit. */
1658 temp
= TREE_FIXED_CST (arg1
).data
;
1659 mode
= TREE_FIXED_CST (arg1
).mode
;
1660 if (GET_MODE_FBIT (mode
) < HOST_BITS_PER_DOUBLE_INT
)
1662 temp
= temp
.rshift (GET_MODE_FBIT (mode
),
1663 HOST_BITS_PER_DOUBLE_INT
,
1664 SIGNED_FIXED_POINT_MODE_P (mode
));
1666 /* Left shift temp to temp_trunc by fbit. */
1667 temp_trunc
= temp
.lshift (GET_MODE_FBIT (mode
),
1668 HOST_BITS_PER_DOUBLE_INT
,
1669 SIGNED_FIXED_POINT_MODE_P (mode
));
1673 temp
= double_int_zero
;
1674 temp_trunc
= double_int_zero
;
1677 /* If FIXED_CST is negative, we need to round the value toward 0.
1678 By checking if the fractional bits are not zero to add 1 to temp. */
1679 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1680 && temp_trunc
.is_negative ()
1681 && TREE_FIXED_CST (arg1
).data
!= temp_trunc
)
1682 temp
+= double_int_one
;
1684 /* Given a fixed-point constant, make new constant with new type,
1685 appropriately sign-extended or truncated. */
1686 t
= force_fit_type (type
, temp
, -1,
1687 (temp
.is_negative ()
1688 && (TYPE_UNSIGNED (type
)
1689 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1690 | TREE_OVERFLOW (arg1
));
1695 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1696 to another floating point type. */
1699 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1701 REAL_VALUE_TYPE value
;
1704 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1705 t
= build_real (type
, value
);
1707 /* If converting an infinity or NAN to a representation that doesn't
1708 have one, set the overflow bit so that we can produce some kind of
1709 error message at the appropriate point if necessary. It's not the
1710 most user-friendly message, but it's better than nothing. */
1711 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1712 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1713 TREE_OVERFLOW (t
) = 1;
1714 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1715 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1716 TREE_OVERFLOW (t
) = 1;
1717 /* Regular overflow, conversion produced an infinity in a mode that
1718 can't represent them. */
1719 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1720 && REAL_VALUE_ISINF (value
)
1721 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1722 TREE_OVERFLOW (t
) = 1;
1724 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1728 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1729 to a floating point type. */
1732 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1734 REAL_VALUE_TYPE value
;
1737 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1738 t
= build_real (type
, value
);
1740 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1744 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1745 to another fixed-point type. */
1748 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1750 FIXED_VALUE_TYPE value
;
1754 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1755 TYPE_SATURATING (type
));
1756 t
= build_fixed (type
, value
);
1758 /* Propagate overflow flags. */
1759 if (overflow_p
| TREE_OVERFLOW (arg1
))
1760 TREE_OVERFLOW (t
) = 1;
1764 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1765 to a fixed-point type. */
1768 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1770 FIXED_VALUE_TYPE value
;
1775 gcc_assert (TREE_INT_CST_NUNITS (arg1
) <= 2);
1777 di
.low
= TREE_INT_CST_ELT (arg1
, 0);
1778 if (TREE_INT_CST_NUNITS (arg1
) == 1)
1779 di
.high
= (HOST_WIDE_INT
) di
.low
< 0 ? (HOST_WIDE_INT
) -1 : 0;
1781 di
.high
= TREE_INT_CST_ELT (arg1
, 1);
1783 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
), di
,
1784 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1785 TYPE_SATURATING (type
));
1786 t
= build_fixed (type
, value
);
1788 /* Propagate overflow flags. */
1789 if (overflow_p
| TREE_OVERFLOW (arg1
))
1790 TREE_OVERFLOW (t
) = 1;
1794 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1795 to a fixed-point type. */
1798 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1800 FIXED_VALUE_TYPE value
;
1804 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1805 &TREE_REAL_CST (arg1
),
1806 TYPE_SATURATING (type
));
1807 t
= build_fixed (type
, value
);
1809 /* Propagate overflow flags. */
1810 if (overflow_p
| TREE_OVERFLOW (arg1
))
1811 TREE_OVERFLOW (t
) = 1;
1815 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1816 type TYPE. If no simplification can be done return NULL_TREE. */
1819 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1821 if (TREE_TYPE (arg1
) == type
)
1824 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1825 || TREE_CODE (type
) == OFFSET_TYPE
)
1827 if (TREE_CODE (arg1
) == INTEGER_CST
)
1828 return fold_convert_const_int_from_int (type
, arg1
);
1829 else if (TREE_CODE (arg1
) == REAL_CST
)
1830 return fold_convert_const_int_from_real (code
, type
, arg1
);
1831 else if (TREE_CODE (arg1
) == FIXED_CST
)
1832 return fold_convert_const_int_from_fixed (type
, arg1
);
1834 else if (TREE_CODE (type
) == REAL_TYPE
)
1836 if (TREE_CODE (arg1
) == INTEGER_CST
)
1837 return build_real_from_int_cst (type
, arg1
);
1838 else if (TREE_CODE (arg1
) == REAL_CST
)
1839 return fold_convert_const_real_from_real (type
, arg1
);
1840 else if (TREE_CODE (arg1
) == FIXED_CST
)
1841 return fold_convert_const_real_from_fixed (type
, arg1
);
1843 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1845 if (TREE_CODE (arg1
) == FIXED_CST
)
1846 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1847 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1848 return fold_convert_const_fixed_from_int (type
, arg1
);
1849 else if (TREE_CODE (arg1
) == REAL_CST
)
1850 return fold_convert_const_fixed_from_real (type
, arg1
);
1855 /* Construct a vector of zero elements of vector type TYPE. */
1858 build_zero_vector (tree type
)
1862 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1863 return build_vector_from_val (type
, t
);
1866 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1869 fold_convertible_p (const_tree type
, const_tree arg
)
1871 tree orig
= TREE_TYPE (arg
);
1876 if (TREE_CODE (arg
) == ERROR_MARK
1877 || TREE_CODE (type
) == ERROR_MARK
1878 || TREE_CODE (orig
) == ERROR_MARK
)
1881 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1884 switch (TREE_CODE (type
))
1886 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1887 case POINTER_TYPE
: case REFERENCE_TYPE
:
1889 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1890 || TREE_CODE (orig
) == OFFSET_TYPE
)
1892 return (TREE_CODE (orig
) == VECTOR_TYPE
1893 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1896 case FIXED_POINT_TYPE
:
1900 return TREE_CODE (type
) == TREE_CODE (orig
);
1907 /* Convert expression ARG to type TYPE. Used by the middle-end for
1908 simple conversions in preference to calling the front-end's convert. */
1911 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1913 tree orig
= TREE_TYPE (arg
);
1919 if (TREE_CODE (arg
) == ERROR_MARK
1920 || TREE_CODE (type
) == ERROR_MARK
1921 || TREE_CODE (orig
) == ERROR_MARK
)
1922 return error_mark_node
;
1924 switch (TREE_CODE (type
))
1927 case REFERENCE_TYPE
:
1928 /* Handle conversions between pointers to different address spaces. */
1929 if (POINTER_TYPE_P (orig
)
1930 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1931 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1932 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1935 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1937 if (TREE_CODE (arg
) == INTEGER_CST
)
1939 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1940 if (tem
!= NULL_TREE
)
1943 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1944 || TREE_CODE (orig
) == OFFSET_TYPE
)
1945 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1946 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1947 return fold_convert_loc (loc
, type
,
1948 fold_build1_loc (loc
, REALPART_EXPR
,
1949 TREE_TYPE (orig
), arg
));
1950 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1951 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1952 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1955 if (TREE_CODE (arg
) == INTEGER_CST
)
1957 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1958 if (tem
!= NULL_TREE
)
1961 else if (TREE_CODE (arg
) == REAL_CST
)
1963 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1964 if (tem
!= NULL_TREE
)
1967 else if (TREE_CODE (arg
) == FIXED_CST
)
1969 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1970 if (tem
!= NULL_TREE
)
1974 switch (TREE_CODE (orig
))
1977 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1978 case POINTER_TYPE
: case REFERENCE_TYPE
:
1979 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1982 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1984 case FIXED_POINT_TYPE
:
1985 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1988 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1989 return fold_convert_loc (loc
, type
, tem
);
1995 case FIXED_POINT_TYPE
:
1996 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1997 || TREE_CODE (arg
) == REAL_CST
)
1999 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
2000 if (tem
!= NULL_TREE
)
2001 goto fold_convert_exit
;
2004 switch (TREE_CODE (orig
))
2006 case FIXED_POINT_TYPE
:
2011 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
2014 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2015 return fold_convert_loc (loc
, type
, tem
);
2022 switch (TREE_CODE (orig
))
2025 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
2026 case POINTER_TYPE
: case REFERENCE_TYPE
:
2028 case FIXED_POINT_TYPE
:
2029 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
2030 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
2031 fold_convert_loc (loc
, TREE_TYPE (type
),
2032 integer_zero_node
));
2037 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
2039 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2040 TREE_OPERAND (arg
, 0));
2041 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
2042 TREE_OPERAND (arg
, 1));
2043 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2046 arg
= save_expr (arg
);
2047 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2048 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2049 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2050 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2051 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2059 if (integer_zerop (arg
))
2060 return build_zero_vector (type
);
2061 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2062 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2063 || TREE_CODE (orig
) == VECTOR_TYPE
);
2064 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2067 tem
= fold_ignored_result (arg
);
2068 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2071 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2072 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2076 protected_set_expr_location_unshare (tem
, loc
);
2080 /* Return false if expr can be assumed not to be an lvalue, true
2084 maybe_lvalue_p (const_tree x
)
2086 /* We only need to wrap lvalue tree codes. */
2087 switch (TREE_CODE (x
))
2100 case ARRAY_RANGE_REF
:
2106 case PREINCREMENT_EXPR
:
2107 case PREDECREMENT_EXPR
:
2109 case TRY_CATCH_EXPR
:
2110 case WITH_CLEANUP_EXPR
:
2119 /* Assume the worst for front-end tree codes. */
2120 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2128 /* Return an expr equal to X but certainly not valid as an lvalue. */
2131 non_lvalue_loc (location_t loc
, tree x
)
2133 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2138 if (! maybe_lvalue_p (x
))
2140 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2143 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2144 Zero means allow extended lvalues. */
2146 int pedantic_lvalues
;
2148 /* When pedantic, return an expr equal to X but certainly not valid as a
2149 pedantic lvalue. Otherwise, return X. */
2152 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2154 if (pedantic_lvalues
)
2155 return non_lvalue_loc (loc
, x
);
2157 return protected_set_expr_location_unshare (x
, loc
);
2160 /* Given a tree comparison code, return the code that is the logical inverse.
2161 It is generally not safe to do this for floating-point comparisons, except
2162 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2163 ERROR_MARK in this case. */
2166 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2168 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
2169 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
)
2179 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2181 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2183 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2185 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2199 return UNORDERED_EXPR
;
2200 case UNORDERED_EXPR
:
2201 return ORDERED_EXPR
;
2207 /* Similar, but return the comparison that results if the operands are
2208 swapped. This is safe for floating-point. */
2211 swap_tree_comparison (enum tree_code code
)
2218 case UNORDERED_EXPR
:
2244 /* Convert a comparison tree code from an enum tree_code representation
2245 into a compcode bit-based encoding. This function is the inverse of
2246 compcode_to_comparison. */
2248 static enum comparison_code
2249 comparison_to_compcode (enum tree_code code
)
2266 return COMPCODE_ORD
;
2267 case UNORDERED_EXPR
:
2268 return COMPCODE_UNORD
;
2270 return COMPCODE_UNLT
;
2272 return COMPCODE_UNEQ
;
2274 return COMPCODE_UNLE
;
2276 return COMPCODE_UNGT
;
2278 return COMPCODE_LTGT
;
2280 return COMPCODE_UNGE
;
2286 /* Convert a compcode bit-based encoding of a comparison operator back
2287 to GCC's enum tree_code representation. This function is the
2288 inverse of comparison_to_compcode. */
2290 static enum tree_code
2291 compcode_to_comparison (enum comparison_code code
)
2308 return ORDERED_EXPR
;
2309 case COMPCODE_UNORD
:
2310 return UNORDERED_EXPR
;
2328 /* Return a tree for the comparison which is the combination of
2329 doing the AND or OR (depending on CODE) of the two operations LCODE
2330 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2331 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2332 if this makes the transformation invalid. */
2335 combine_comparisons (location_t loc
,
2336 enum tree_code code
, enum tree_code lcode
,
2337 enum tree_code rcode
, tree truth_type
,
2338 tree ll_arg
, tree lr_arg
)
2340 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2341 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2342 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2347 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2348 compcode
= lcompcode
& rcompcode
;
2351 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2352 compcode
= lcompcode
| rcompcode
;
2361 /* Eliminate unordered comparisons, as well as LTGT and ORD
2362 which are not used unless the mode has NaNs. */
2363 compcode
&= ~COMPCODE_UNORD
;
2364 if (compcode
== COMPCODE_LTGT
)
2365 compcode
= COMPCODE_NE
;
2366 else if (compcode
== COMPCODE_ORD
)
2367 compcode
= COMPCODE_TRUE
;
2369 else if (flag_trapping_math
)
2371 /* Check that the original operation and the optimized ones will trap
2372 under the same condition. */
2373 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2374 && (lcompcode
!= COMPCODE_EQ
)
2375 && (lcompcode
!= COMPCODE_ORD
);
2376 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2377 && (rcompcode
!= COMPCODE_EQ
)
2378 && (rcompcode
!= COMPCODE_ORD
);
2379 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2380 && (compcode
!= COMPCODE_EQ
)
2381 && (compcode
!= COMPCODE_ORD
);
2383 /* In a short-circuited boolean expression the LHS might be
2384 such that the RHS, if evaluated, will never trap. For
2385 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2386 if neither x nor y is NaN. (This is a mixed blessing: for
2387 example, the expression above will never trap, hence
2388 optimizing it to x < y would be invalid). */
2389 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2390 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2393 /* If the comparison was short-circuited, and only the RHS
2394 trapped, we may now generate a spurious trap. */
2396 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2399 /* If we changed the conditions that cause a trap, we lose. */
2400 if ((ltrap
|| rtrap
) != trap
)
2404 if (compcode
== COMPCODE_TRUE
)
2405 return constant_boolean_node (true, truth_type
);
2406 else if (compcode
== COMPCODE_FALSE
)
2407 return constant_boolean_node (false, truth_type
);
2410 enum tree_code tcode
;
2412 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2413 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2417 /* Return nonzero if two operands (typically of the same tree node)
2418 are necessarily equal. If either argument has side-effects this
2419 function returns zero. FLAGS modifies behavior as follows:
2421 If OEP_ONLY_CONST is set, only return nonzero for constants.
2422 This function tests whether the operands are indistinguishable;
2423 it does not test whether they are equal using C's == operation.
2424 The distinction is important for IEEE floating point, because
2425 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2426 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2428 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2429 even though it may hold multiple values during a function.
2430 This is because a GCC tree node guarantees that nothing else is
2431 executed between the evaluation of its "operands" (which may often
2432 be evaluated in arbitrary order). Hence if the operands themselves
2433 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2434 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2435 unset means assuming isochronic (or instantaneous) tree equivalence.
2436 Unless comparing arbitrary expression trees, such as from different
2437 statements, this flag can usually be left unset.
2439 If OEP_PURE_SAME is set, then pure functions with identical arguments
2440 are considered the same. It is used when the caller has other ways
2441 to ensure that global memory is unchanged in between. */
2444 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2446 /* If either is ERROR_MARK, they aren't equal. */
2447 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2448 || TREE_TYPE (arg0
) == error_mark_node
2449 || TREE_TYPE (arg1
) == error_mark_node
)
2452 /* Similar, if either does not have a type (like a released SSA name),
2453 they aren't equal. */
2454 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2457 /* Check equality of integer constants before bailing out due to
2458 precision differences. */
2459 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2460 return tree_int_cst_equal (arg0
, arg1
);
2462 /* If both types don't have the same signedness, then we can't consider
2463 them equal. We must check this before the STRIP_NOPS calls
2464 because they may change the signedness of the arguments. As pointers
2465 strictly don't have a signedness, require either two pointers or
2466 two non-pointers as well. */
2467 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2468 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2471 /* We cannot consider pointers to different address space equal. */
2472 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2473 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2474 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2477 /* If both types don't have the same precision, then it is not safe
2479 if (element_precision (TREE_TYPE (arg0
))
2480 != element_precision (TREE_TYPE (arg1
)))
2486 /* In case both args are comparisons but with different comparison
2487 code, try to swap the comparison operands of one arg to produce
2488 a match and compare that variant. */
2489 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2490 && COMPARISON_CLASS_P (arg0
)
2491 && COMPARISON_CLASS_P (arg1
))
2493 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2495 if (TREE_CODE (arg0
) == swap_code
)
2496 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2497 TREE_OPERAND (arg1
, 1), flags
)
2498 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2499 TREE_OPERAND (arg1
, 0), flags
);
2502 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2503 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2504 && !(CONVERT_EXPR_P (arg0
) && CONVERT_EXPR_P (arg1
)))
2507 /* This is needed for conversions and for COMPONENT_REF.
2508 Might as well play it safe and always test this. */
2509 if (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2510 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2511 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2514 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2515 We don't care about side effects in that case because the SAVE_EXPR
2516 takes care of that for us. In all other cases, two expressions are
2517 equal if they have no side effects. If we have two identical
2518 expressions with side effects that should be treated the same due
2519 to the only side effects being identical SAVE_EXPR's, that will
2520 be detected in the recursive calls below.
2521 If we are taking an invariant address of two identical objects
2522 they are necessarily equal as well. */
2523 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2524 && (TREE_CODE (arg0
) == SAVE_EXPR
2525 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2526 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2529 /* Next handle constant cases, those for which we can return 1 even
2530 if ONLY_CONST is set. */
2531 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2532 switch (TREE_CODE (arg0
))
2535 return tree_int_cst_equal (arg0
, arg1
);
2538 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2539 TREE_FIXED_CST (arg1
));
2542 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2543 TREE_REAL_CST (arg1
)))
2547 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2549 /* If we do not distinguish between signed and unsigned zero,
2550 consider them equal. */
2551 if (real_zerop (arg0
) && real_zerop (arg1
))
2560 if (VECTOR_CST_NELTS (arg0
) != VECTOR_CST_NELTS (arg1
))
2563 for (i
= 0; i
< VECTOR_CST_NELTS (arg0
); ++i
)
2565 if (!operand_equal_p (VECTOR_CST_ELT (arg0
, i
),
2566 VECTOR_CST_ELT (arg1
, i
), flags
))
2573 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2575 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2579 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2580 && ! memcmp (TREE_STRING_POINTER (arg0
),
2581 TREE_STRING_POINTER (arg1
),
2582 TREE_STRING_LENGTH (arg0
)));
2585 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2586 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2587 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2592 if (flags
& OEP_ONLY_CONST
)
2595 /* Define macros to test an operand from arg0 and arg1 for equality and a
2596 variant that allows null and views null as being different from any
2597 non-null value. In the latter case, if either is null, the both
2598 must be; otherwise, do the normal comparison. */
2599 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2600 TREE_OPERAND (arg1, N), flags)
2602 #define OP_SAME_WITH_NULL(N) \
2603 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2604 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2606 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2609 /* Two conversions are equal only if signedness and modes match. */
2610 switch (TREE_CODE (arg0
))
2613 case FIX_TRUNC_EXPR
:
2614 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2615 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2625 case tcc_comparison
:
2627 if (OP_SAME (0) && OP_SAME (1))
2630 /* For commutative ops, allow the other order. */
2631 return (commutative_tree_code (TREE_CODE (arg0
))
2632 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2633 TREE_OPERAND (arg1
, 1), flags
)
2634 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2635 TREE_OPERAND (arg1
, 0), flags
));
2638 /* If either of the pointer (or reference) expressions we are
2639 dereferencing contain a side effect, these cannot be equal,
2640 but their addresses can be. */
2641 if ((flags
& OEP_CONSTANT_ADDRESS_OF
) == 0
2642 && (TREE_SIDE_EFFECTS (arg0
)
2643 || TREE_SIDE_EFFECTS (arg1
)))
2646 switch (TREE_CODE (arg0
))
2649 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2656 case TARGET_MEM_REF
:
2657 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2658 /* Require equal extra operands and then fall through to MEM_REF
2659 handling of the two common operands. */
2660 if (!OP_SAME_WITH_NULL (2)
2661 || !OP_SAME_WITH_NULL (3)
2662 || !OP_SAME_WITH_NULL (4))
2666 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2667 /* Require equal access sizes, and similar pointer types.
2668 We can have incomplete types for array references of
2669 variable-sized arrays from the Fortran frontend
2670 though. Also verify the types are compatible. */
2671 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2672 || (TYPE_SIZE (TREE_TYPE (arg0
))
2673 && TYPE_SIZE (TREE_TYPE (arg1
))
2674 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2675 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2676 && types_compatible_p (TREE_TYPE (arg0
), TREE_TYPE (arg1
))
2677 && alias_ptr_types_compatible_p
2678 (TREE_TYPE (TREE_OPERAND (arg0
, 1)),
2679 TREE_TYPE (TREE_OPERAND (arg1
, 1)))
2680 && OP_SAME (0) && OP_SAME (1));
2683 case ARRAY_RANGE_REF
:
2684 /* Operands 2 and 3 may be null.
2685 Compare the array index by value if it is constant first as we
2686 may have different types but same value here. */
2689 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2690 return ((tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2691 TREE_OPERAND (arg1
, 1))
2693 && OP_SAME_WITH_NULL (2)
2694 && OP_SAME_WITH_NULL (3));
2697 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2698 may be NULL when we're called to compare MEM_EXPRs. */
2699 if (!OP_SAME_WITH_NULL (0)
2702 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2703 return OP_SAME_WITH_NULL (2);
2708 flags
&= ~OEP_CONSTANT_ADDRESS_OF
;
2709 return OP_SAME (1) && OP_SAME (2);
2715 case tcc_expression
:
2716 switch (TREE_CODE (arg0
))
2719 case TRUTH_NOT_EXPR
:
2722 case TRUTH_ANDIF_EXPR
:
2723 case TRUTH_ORIF_EXPR
:
2724 return OP_SAME (0) && OP_SAME (1);
2727 case WIDEN_MULT_PLUS_EXPR
:
2728 case WIDEN_MULT_MINUS_EXPR
:
2731 /* The multiplcation operands are commutative. */
2734 case TRUTH_AND_EXPR
:
2736 case TRUTH_XOR_EXPR
:
2737 if (OP_SAME (0) && OP_SAME (1))
2740 /* Otherwise take into account this is a commutative operation. */
2741 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2742 TREE_OPERAND (arg1
, 1), flags
)
2743 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2744 TREE_OPERAND (arg1
, 0), flags
));
2749 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2756 switch (TREE_CODE (arg0
))
2759 /* If the CALL_EXPRs call different functions, then they
2760 clearly can not be equal. */
2761 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2766 unsigned int cef
= call_expr_flags (arg0
);
2767 if (flags
& OEP_PURE_SAME
)
2768 cef
&= ECF_CONST
| ECF_PURE
;
2775 /* Now see if all the arguments are the same. */
2777 const_call_expr_arg_iterator iter0
, iter1
;
2779 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2780 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2782 a0
= next_const_call_expr_arg (&iter0
),
2783 a1
= next_const_call_expr_arg (&iter1
))
2784 if (! operand_equal_p (a0
, a1
, flags
))
2787 /* If we get here and both argument lists are exhausted
2788 then the CALL_EXPRs are equal. */
2789 return ! (a0
|| a1
);
2795 case tcc_declaration
:
2796 /* Consider __builtin_sqrt equal to sqrt. */
2797 return (TREE_CODE (arg0
) == FUNCTION_DECL
2798 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2799 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2800 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2807 #undef OP_SAME_WITH_NULL
2810 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2811 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2813 When in doubt, return 0. */
2816 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2818 int unsignedp1
, unsignedpo
;
2819 tree primarg0
, primarg1
, primother
;
2820 unsigned int correct_width
;
2822 if (operand_equal_p (arg0
, arg1
, 0))
2825 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2826 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2829 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2830 and see if the inner values are the same. This removes any
2831 signedness comparison, which doesn't matter here. */
2832 primarg0
= arg0
, primarg1
= arg1
;
2833 STRIP_NOPS (primarg0
);
2834 STRIP_NOPS (primarg1
);
2835 if (operand_equal_p (primarg0
, primarg1
, 0))
2838 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2839 actual comparison operand, ARG0.
2841 First throw away any conversions to wider types
2842 already present in the operands. */
2844 primarg1
= get_narrower (arg1
, &unsignedp1
);
2845 primother
= get_narrower (other
, &unsignedpo
);
2847 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2848 if (unsignedp1
== unsignedpo
2849 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2850 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2852 tree type
= TREE_TYPE (arg0
);
2854 /* Make sure shorter operand is extended the right way
2855 to match the longer operand. */
2856 primarg1
= fold_convert (signed_or_unsigned_type_for
2857 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2859 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2866 /* See if ARG is an expression that is either a comparison or is performing
2867 arithmetic on comparisons. The comparisons must only be comparing
2868 two different values, which will be stored in *CVAL1 and *CVAL2; if
2869 they are nonzero it means that some operands have already been found.
2870 No variables may be used anywhere else in the expression except in the
2871 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2872 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2874 If this is true, return 1. Otherwise, return zero. */
2877 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2879 enum tree_code code
= TREE_CODE (arg
);
2880 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2882 /* We can handle some of the tcc_expression cases here. */
2883 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2885 else if (tclass
== tcc_expression
2886 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2887 || code
== COMPOUND_EXPR
))
2888 tclass
= tcc_binary
;
2890 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2891 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2893 /* If we've already found a CVAL1 or CVAL2, this expression is
2894 two complex to handle. */
2895 if (*cval1
|| *cval2
)
2905 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2908 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2909 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2910 cval1
, cval2
, save_p
));
2915 case tcc_expression
:
2916 if (code
== COND_EXPR
)
2917 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2918 cval1
, cval2
, save_p
)
2919 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2920 cval1
, cval2
, save_p
)
2921 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2922 cval1
, cval2
, save_p
));
2925 case tcc_comparison
:
2926 /* First see if we can handle the first operand, then the second. For
2927 the second operand, we know *CVAL1 can't be zero. It must be that
2928 one side of the comparison is each of the values; test for the
2929 case where this isn't true by failing if the two operands
2932 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2933 TREE_OPERAND (arg
, 1), 0))
2937 *cval1
= TREE_OPERAND (arg
, 0);
2938 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2940 else if (*cval2
== 0)
2941 *cval2
= TREE_OPERAND (arg
, 0);
2942 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2947 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2949 else if (*cval2
== 0)
2950 *cval2
= TREE_OPERAND (arg
, 1);
2951 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2963 /* ARG is a tree that is known to contain just arithmetic operations and
2964 comparisons. Evaluate the operations in the tree substituting NEW0 for
2965 any occurrence of OLD0 as an operand of a comparison and likewise for
2969 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2970 tree old1
, tree new1
)
2972 tree type
= TREE_TYPE (arg
);
2973 enum tree_code code
= TREE_CODE (arg
);
2974 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2976 /* We can handle some of the tcc_expression cases here. */
2977 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2979 else if (tclass
== tcc_expression
2980 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2981 tclass
= tcc_binary
;
2986 return fold_build1_loc (loc
, code
, type
,
2987 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2988 old0
, new0
, old1
, new1
));
2991 return fold_build2_loc (loc
, code
, type
,
2992 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2993 old0
, new0
, old1
, new1
),
2994 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2995 old0
, new0
, old1
, new1
));
2997 case tcc_expression
:
3001 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
3005 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
3009 return fold_build3_loc (loc
, code
, type
,
3010 eval_subst (loc
, TREE_OPERAND (arg
, 0),
3011 old0
, new0
, old1
, new1
),
3012 eval_subst (loc
, TREE_OPERAND (arg
, 1),
3013 old0
, new0
, old1
, new1
),
3014 eval_subst (loc
, TREE_OPERAND (arg
, 2),
3015 old0
, new0
, old1
, new1
));
3019 /* Fall through - ??? */
3021 case tcc_comparison
:
3023 tree arg0
= TREE_OPERAND (arg
, 0);
3024 tree arg1
= TREE_OPERAND (arg
, 1);
3026 /* We need to check both for exact equality and tree equality. The
3027 former will be true if the operand has a side-effect. In that
3028 case, we know the operand occurred exactly once. */
3030 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
3032 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
3035 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
3037 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
3040 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3048 /* Return a tree for the case when the result of an expression is RESULT
3049 converted to TYPE and OMITTED was previously an operand of the expression
3050 but is now not needed (e.g., we folded OMITTED * 0).
3052 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3053 the conversion of RESULT to TYPE. */
3056 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3058 tree t
= fold_convert_loc (loc
, type
, result
);
3060 /* If the resulting operand is an empty statement, just return the omitted
3061 statement casted to void. */
3062 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3063 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3064 fold_ignored_result (omitted
));
3066 if (TREE_SIDE_EFFECTS (omitted
))
3067 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3068 fold_ignored_result (omitted
), t
);
3070 return non_lvalue_loc (loc
, t
);
3073 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3076 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
3079 tree t
= fold_convert_loc (loc
, type
, result
);
3081 /* If the resulting operand is an empty statement, just return the omitted
3082 statement casted to void. */
3083 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3084 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3085 fold_ignored_result (omitted
));
3087 if (TREE_SIDE_EFFECTS (omitted
))
3088 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3089 fold_ignored_result (omitted
), t
);
3091 return pedantic_non_lvalue_loc (loc
, t
);
3094 /* Return a tree for the case when the result of an expression is RESULT
3095 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3096 of the expression but are now not needed.
3098 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3099 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3100 evaluated before OMITTED2. Otherwise, if neither has side effects,
3101 just do the conversion of RESULT to TYPE. */
3104 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3105 tree omitted1
, tree omitted2
)
3107 tree t
= fold_convert_loc (loc
, type
, result
);
3109 if (TREE_SIDE_EFFECTS (omitted2
))
3110 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3111 if (TREE_SIDE_EFFECTS (omitted1
))
3112 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3114 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3118 /* Return a simplified tree node for the truth-negation of ARG. This
3119 never alters ARG itself. We assume that ARG is an operation that
3120 returns a truth value (0 or 1).
3122 FIXME: one would think we would fold the result, but it causes
3123 problems with the dominator optimizer. */
3126 fold_truth_not_expr (location_t loc
, tree arg
)
3128 tree type
= TREE_TYPE (arg
);
3129 enum tree_code code
= TREE_CODE (arg
);
3130 location_t loc1
, loc2
;
3132 /* If this is a comparison, we can simply invert it, except for
3133 floating-point non-equality comparisons, in which case we just
3134 enclose a TRUTH_NOT_EXPR around what we have. */
3136 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3138 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3139 if (FLOAT_TYPE_P (op_type
)
3140 && flag_trapping_math
3141 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3142 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3145 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3146 if (code
== ERROR_MARK
)
3149 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3150 TREE_OPERAND (arg
, 1));
3156 return constant_boolean_node (integer_zerop (arg
), type
);
3158 case TRUTH_AND_EXPR
:
3159 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3160 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3161 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3162 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3163 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3166 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3167 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3168 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3169 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3170 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3172 case TRUTH_XOR_EXPR
:
3173 /* Here we can invert either operand. We invert the first operand
3174 unless the second operand is a TRUTH_NOT_EXPR in which case our
3175 result is the XOR of the first operand with the inside of the
3176 negation of the second operand. */
3178 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3179 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3180 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3182 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3183 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3184 TREE_OPERAND (arg
, 1));
3186 case TRUTH_ANDIF_EXPR
:
3187 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3188 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3189 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3190 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3191 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3193 case TRUTH_ORIF_EXPR
:
3194 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3195 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3196 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3197 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3198 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3200 case TRUTH_NOT_EXPR
:
3201 return TREE_OPERAND (arg
, 0);
3205 tree arg1
= TREE_OPERAND (arg
, 1);
3206 tree arg2
= TREE_OPERAND (arg
, 2);
3208 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3209 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3211 /* A COND_EXPR may have a throw as one operand, which
3212 then has void type. Just leave void operands
3214 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3215 VOID_TYPE_P (TREE_TYPE (arg1
))
3216 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3217 VOID_TYPE_P (TREE_TYPE (arg2
))
3218 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3222 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3223 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3224 TREE_OPERAND (arg
, 0),
3225 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3227 case NON_LVALUE_EXPR
:
3228 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3229 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3232 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3233 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3235 /* ... fall through ... */
3238 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3239 return build1_loc (loc
, TREE_CODE (arg
), type
,
3240 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3243 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3245 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3248 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3250 case CLEANUP_POINT_EXPR
:
3251 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3252 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3253 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3260 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3261 assume that ARG is an operation that returns a truth value (0 or 1
3262 for scalars, 0 or -1 for vectors). Return the folded expression if
3263 folding is successful. Otherwise, return NULL_TREE. */
3266 fold_invert_truthvalue (location_t loc
, tree arg
)
3268 tree type
= TREE_TYPE (arg
);
3269 return fold_unary_loc (loc
, VECTOR_TYPE_P (type
)
3275 /* Return a simplified tree node for the truth-negation of ARG. This
3276 never alters ARG itself. We assume that ARG is an operation that
3277 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3280 invert_truthvalue_loc (location_t loc
, tree arg
)
3282 if (TREE_CODE (arg
) == ERROR_MARK
)
3285 tree type
= TREE_TYPE (arg
);
3286 return fold_build1_loc (loc
, VECTOR_TYPE_P (type
)
3292 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3293 operands are another bit-wise operation with a common input. If so,
3294 distribute the bit operations to save an operation and possibly two if
3295 constants are involved. For example, convert
3296 (A | B) & (A | C) into A | (B & C)
3297 Further simplification will occur if B and C are constants.
3299 If this optimization cannot be done, 0 will be returned. */
3302 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3303 tree arg0
, tree arg1
)
3308 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3309 || TREE_CODE (arg0
) == code
3310 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3311 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3314 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3316 common
= TREE_OPERAND (arg0
, 0);
3317 left
= TREE_OPERAND (arg0
, 1);
3318 right
= TREE_OPERAND (arg1
, 1);
3320 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3322 common
= TREE_OPERAND (arg0
, 0);
3323 left
= TREE_OPERAND (arg0
, 1);
3324 right
= TREE_OPERAND (arg1
, 0);
3326 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3328 common
= TREE_OPERAND (arg0
, 1);
3329 left
= TREE_OPERAND (arg0
, 0);
3330 right
= TREE_OPERAND (arg1
, 1);
3332 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3334 common
= TREE_OPERAND (arg0
, 1);
3335 left
= TREE_OPERAND (arg0
, 0);
3336 right
= TREE_OPERAND (arg1
, 0);
3341 common
= fold_convert_loc (loc
, type
, common
);
3342 left
= fold_convert_loc (loc
, type
, left
);
3343 right
= fold_convert_loc (loc
, type
, right
);
3344 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3345 fold_build2_loc (loc
, code
, type
, left
, right
));
3348 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3349 with code CODE. This optimization is unsafe. */
3351 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3352 tree arg0
, tree arg1
)
3354 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3355 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3357 /* (A / C) +- (B / C) -> (A +- B) / C. */
3359 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3360 TREE_OPERAND (arg1
, 1), 0))
3361 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3362 fold_build2_loc (loc
, code
, type
,
3363 TREE_OPERAND (arg0
, 0),
3364 TREE_OPERAND (arg1
, 0)),
3365 TREE_OPERAND (arg0
, 1));
3367 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3368 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3369 TREE_OPERAND (arg1
, 0), 0)
3370 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3371 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3373 REAL_VALUE_TYPE r0
, r1
;
3374 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3375 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3377 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3379 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3380 real_arithmetic (&r0
, code
, &r0
, &r1
);
3381 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3382 TREE_OPERAND (arg0
, 0),
3383 build_real (type
, r0
));
3389 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3390 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3393 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3394 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3396 tree result
, bftype
;
3400 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3401 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3402 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3403 && tree_fits_shwi_p (size
)
3404 && tree_to_shwi (size
) == bitsize
)
3405 return fold_convert_loc (loc
, type
, inner
);
3409 if (TYPE_PRECISION (bftype
) != bitsize
3410 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3411 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3413 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3414 size_int (bitsize
), bitsize_int (bitpos
));
3417 result
= fold_convert_loc (loc
, type
, result
);
3422 /* Optimize a bit-field compare.
3424 There are two cases: First is a compare against a constant and the
3425 second is a comparison of two items where the fields are at the same
3426 bit position relative to the start of a chunk (byte, halfword, word)
3427 large enough to contain it. In these cases we can avoid the shift
3428 implicit in bitfield extractions.
3430 For constants, we emit a compare of the shifted constant with the
3431 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3432 compared. For two fields at the same position, we do the ANDs with the
3433 similar mask and compare the result of the ANDs.
3435 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3436 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3437 are the left and right operands of the comparison, respectively.
3439 If the optimization described above can be done, we return the resulting
3440 tree. Otherwise we return zero. */
3443 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3444 tree compare_type
, tree lhs
, tree rhs
)
3446 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3447 tree type
= TREE_TYPE (lhs
);
3449 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3450 enum machine_mode lmode
, rmode
, nmode
;
3451 int lunsignedp
, runsignedp
;
3452 int lvolatilep
= 0, rvolatilep
= 0;
3453 tree linner
, rinner
= NULL_TREE
;
3457 /* Get all the information about the extractions being done. If the bit size
3458 if the same as the size of the underlying object, we aren't doing an
3459 extraction at all and so can do nothing. We also don't want to
3460 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3461 then will no longer be able to replace it. */
3462 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3463 &lunsignedp
, &lvolatilep
, false);
3464 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3465 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
|| lvolatilep
)
3470 /* If this is not a constant, we can only do something if bit positions,
3471 sizes, and signedness are the same. */
3472 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3473 &runsignedp
, &rvolatilep
, false);
3475 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3476 || lunsignedp
!= runsignedp
|| offset
!= 0
3477 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
|| rvolatilep
)
3481 /* See if we can find a mode to refer to this field. We should be able to,
3482 but fail if we can't. */
3483 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3484 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3485 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3486 TYPE_ALIGN (TREE_TYPE (rinner
))),
3488 if (nmode
== VOIDmode
)
3491 /* Set signed and unsigned types of the precision of this mode for the
3493 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3495 /* Compute the bit position and size for the new reference and our offset
3496 within it. If the new reference is the same size as the original, we
3497 won't optimize anything, so return zero. */
3498 nbitsize
= GET_MODE_BITSIZE (nmode
);
3499 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3501 if (nbitsize
== lbitsize
)
3504 if (BYTES_BIG_ENDIAN
)
3505 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3507 /* Make the mask to be used against the extracted field. */
3508 mask
= build_int_cst_type (unsigned_type
, -1);
3509 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3510 mask
= const_binop (RSHIFT_EXPR
, mask
,
3511 size_int (nbitsize
- lbitsize
- lbitpos
));
3514 /* If not comparing with constant, just rework the comparison
3516 return fold_build2_loc (loc
, code
, compare_type
,
3517 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3518 make_bit_field_ref (loc
, linner
,
3523 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3524 make_bit_field_ref (loc
, rinner
,
3530 /* Otherwise, we are handling the constant case. See if the constant is too
3531 big for the field. Warn and return a tree of for 0 (false) if so. We do
3532 this not only for its own sake, but to avoid having to test for this
3533 error case below. If we didn't, we might generate wrong code.
3535 For unsigned fields, the constant shifted right by the field length should
3536 be all zero. For signed fields, the high-order bits should agree with
3541 if (wi::lrshift (rhs
, lbitsize
) != 0)
3543 warning (0, "comparison is always %d due to width of bit-field",
3545 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3550 wide_int tem
= wi::arshift (rhs
, lbitsize
- 1);
3551 if (tem
!= 0 && tem
!= -1)
3553 warning (0, "comparison is always %d due to width of bit-field",
3555 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3559 /* Single-bit compares should always be against zero. */
3560 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3562 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3563 rhs
= build_int_cst (type
, 0);
3566 /* Make a new bitfield reference, shift the constant over the
3567 appropriate number of bits and mask it with the computed mask
3568 (in case this was a signed field). If we changed it, make a new one. */
3569 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3571 rhs
= const_binop (BIT_AND_EXPR
,
3572 const_binop (LSHIFT_EXPR
,
3573 fold_convert_loc (loc
, unsigned_type
, rhs
),
3574 size_int (lbitpos
)),
3577 lhs
= build2_loc (loc
, code
, compare_type
,
3578 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3582 /* Subroutine for fold_truth_andor_1: decode a field reference.
3584 If EXP is a comparison reference, we return the innermost reference.
3586 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3587 set to the starting bit number.
3589 If the innermost field can be completely contained in a mode-sized
3590 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3592 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3593 otherwise it is not changed.
3595 *PUNSIGNEDP is set to the signedness of the field.
3597 *PMASK is set to the mask used. This is either contained in a
3598 BIT_AND_EXPR or derived from the width of the field.
3600 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3602 Return 0 if this is not a component reference or is one that we can't
3603 do anything with. */
3606 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3607 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3608 int *punsignedp
, int *pvolatilep
,
3609 tree
*pmask
, tree
*pand_mask
)
3611 tree outer_type
= 0;
3613 tree mask
, inner
, offset
;
3615 unsigned int precision
;
3617 /* All the optimizations using this function assume integer fields.
3618 There are problems with FP fields since the type_for_size call
3619 below can fail for, e.g., XFmode. */
3620 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3623 /* We are interested in the bare arrangement of bits, so strip everything
3624 that doesn't affect the machine mode. However, record the type of the
3625 outermost expression if it may matter below. */
3626 if (CONVERT_EXPR_P (exp
)
3627 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3628 outer_type
= TREE_TYPE (exp
);
3631 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3633 and_mask
= TREE_OPERAND (exp
, 1);
3634 exp
= TREE_OPERAND (exp
, 0);
3635 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3636 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3640 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3641 punsignedp
, pvolatilep
, false);
3642 if ((inner
== exp
&& and_mask
== 0)
3643 || *pbitsize
< 0 || offset
!= 0
3644 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3647 /* If the number of bits in the reference is the same as the bitsize of
3648 the outer type, then the outer type gives the signedness. Otherwise
3649 (in case of a small bitfield) the signedness is unchanged. */
3650 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3651 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3653 /* Compute the mask to access the bitfield. */
3654 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3655 precision
= TYPE_PRECISION (unsigned_type
);
3657 mask
= build_int_cst_type (unsigned_type
, -1);
3659 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3660 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3662 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3664 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3665 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3668 *pand_mask
= and_mask
;
3672 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3673 bit positions and MASK is SIGNED. */
3676 all_ones_mask_p (const_tree mask
, unsigned int size
)
3678 tree type
= TREE_TYPE (mask
);
3679 unsigned int precision
= TYPE_PRECISION (type
);
3681 /* If this function returns true when the type of the mask is
3682 UNSIGNED, then there will be errors. In particular see
3683 gcc.c-torture/execute/990326-1.c. There does not appear to be
3684 any documentation paper trail as to why this is so. But the pre
3685 wide-int worked with that restriction and it has been preserved
3687 if (size
> precision
|| TYPE_SIGN (type
) == UNSIGNED
)
3690 return wi::mask (size
, false, precision
) == mask
;
3693 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3694 represents the sign bit of EXP's type. If EXP represents a sign
3695 or zero extension, also test VAL against the unextended type.
3696 The return value is the (sub)expression whose sign bit is VAL,
3697 or NULL_TREE otherwise. */
3700 sign_bit_p (tree exp
, const_tree val
)
3705 /* Tree EXP must have an integral type. */
3706 t
= TREE_TYPE (exp
);
3707 if (! INTEGRAL_TYPE_P (t
))
3710 /* Tree VAL must be an integer constant. */
3711 if (TREE_CODE (val
) != INTEGER_CST
3712 || TREE_OVERFLOW (val
))
3715 width
= TYPE_PRECISION (t
);
3716 if (wi::only_sign_bit_p (val
, width
))
3719 /* Handle extension from a narrower type. */
3720 if (TREE_CODE (exp
) == NOP_EXPR
3721 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3722 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3727 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3728 to be evaluated unconditionally. */
3731 simple_operand_p (const_tree exp
)
3733 /* Strip any conversions that don't change the machine mode. */
3736 return (CONSTANT_CLASS_P (exp
)
3737 || TREE_CODE (exp
) == SSA_NAME
3739 && ! TREE_ADDRESSABLE (exp
)
3740 && ! TREE_THIS_VOLATILE (exp
)
3741 && ! DECL_NONLOCAL (exp
)
3742 /* Don't regard global variables as simple. They may be
3743 allocated in ways unknown to the compiler (shared memory,
3744 #pragma weak, etc). */
3745 && ! TREE_PUBLIC (exp
)
3746 && ! DECL_EXTERNAL (exp
)
3747 /* Weakrefs are not safe to be read, since they can be NULL.
3748 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3749 have DECL_WEAK flag set. */
3750 && (! VAR_OR_FUNCTION_DECL_P (exp
) || ! DECL_WEAK (exp
))
3751 /* Loading a static variable is unduly expensive, but global
3752 registers aren't expensive. */
3753 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3756 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3757 to be evaluated unconditionally.
3758 I addition to simple_operand_p, we assume that comparisons, conversions,
3759 and logic-not operations are simple, if their operands are simple, too. */
3762 simple_operand_p_2 (tree exp
)
3764 enum tree_code code
;
3766 if (TREE_SIDE_EFFECTS (exp
)
3767 || tree_could_trap_p (exp
))
3770 while (CONVERT_EXPR_P (exp
))
3771 exp
= TREE_OPERAND (exp
, 0);
3773 code
= TREE_CODE (exp
);
3775 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3776 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3777 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3779 if (code
== TRUTH_NOT_EXPR
)
3780 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3782 return simple_operand_p (exp
);
3786 /* The following functions are subroutines to fold_range_test and allow it to
3787 try to change a logical combination of comparisons into a range test.
3790 X == 2 || X == 3 || X == 4 || X == 5
3794 (unsigned) (X - 2) <= 3
3796 We describe each set of comparisons as being either inside or outside
3797 a range, using a variable named like IN_P, and then describe the
3798 range with a lower and upper bound. If one of the bounds is omitted,
3799 it represents either the highest or lowest value of the type.
3801 In the comments below, we represent a range by two numbers in brackets
3802 preceded by a "+" to designate being inside that range, or a "-" to
3803 designate being outside that range, so the condition can be inverted by
3804 flipping the prefix. An omitted bound is represented by a "-". For
3805 example, "- [-, 10]" means being outside the range starting at the lowest
3806 possible value and ending at 10, in other words, being greater than 10.
3807 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3810 We set up things so that the missing bounds are handled in a consistent
3811 manner so neither a missing bound nor "true" and "false" need to be
3812 handled using a special case. */
3814 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3815 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3816 and UPPER1_P are nonzero if the respective argument is an upper bound
3817 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3818 must be specified for a comparison. ARG1 will be converted to ARG0's
3819 type if both are specified. */
3822 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3823 tree arg1
, int upper1_p
)
3829 /* If neither arg represents infinity, do the normal operation.
3830 Else, if not a comparison, return infinity. Else handle the special
3831 comparison rules. Note that most of the cases below won't occur, but
3832 are handled for consistency. */
3834 if (arg0
!= 0 && arg1
!= 0)
3836 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3837 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3839 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3842 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3845 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3846 for neither. In real maths, we cannot assume open ended ranges are
3847 the same. But, this is computer arithmetic, where numbers are finite.
3848 We can therefore make the transformation of any unbounded range with
3849 the value Z, Z being greater than any representable number. This permits
3850 us to treat unbounded ranges as equal. */
3851 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3852 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3856 result
= sgn0
== sgn1
;
3859 result
= sgn0
!= sgn1
;
3862 result
= sgn0
< sgn1
;
3865 result
= sgn0
<= sgn1
;
3868 result
= sgn0
> sgn1
;
3871 result
= sgn0
>= sgn1
;
3877 return constant_boolean_node (result
, type
);
3880 /* Helper routine for make_range. Perform one step for it, return
3881 new expression if the loop should continue or NULL_TREE if it should
3885 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3886 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3887 bool *strict_overflow_p
)
3889 tree arg0_type
= TREE_TYPE (arg0
);
3890 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3891 int in_p
= *p_in_p
, n_in_p
;
3895 case TRUTH_NOT_EXPR
:
3896 /* We can only do something if the range is testing for zero. */
3897 if (low
== NULL_TREE
|| high
== NULL_TREE
3898 || ! integer_zerop (low
) || ! integer_zerop (high
))
3903 case EQ_EXPR
: case NE_EXPR
:
3904 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3905 /* We can only do something if the range is testing for zero
3906 and if the second operand is an integer constant. Note that
3907 saying something is "in" the range we make is done by
3908 complementing IN_P since it will set in the initial case of
3909 being not equal to zero; "out" is leaving it alone. */
3910 if (low
== NULL_TREE
|| high
== NULL_TREE
3911 || ! integer_zerop (low
) || ! integer_zerop (high
)
3912 || TREE_CODE (arg1
) != INTEGER_CST
)
3917 case NE_EXPR
: /* - [c, c] */
3920 case EQ_EXPR
: /* + [c, c] */
3921 in_p
= ! in_p
, low
= high
= arg1
;
3923 case GT_EXPR
: /* - [-, c] */
3924 low
= 0, high
= arg1
;
3926 case GE_EXPR
: /* + [c, -] */
3927 in_p
= ! in_p
, low
= arg1
, high
= 0;
3929 case LT_EXPR
: /* - [c, -] */
3930 low
= arg1
, high
= 0;
3932 case LE_EXPR
: /* + [-, c] */
3933 in_p
= ! in_p
, low
= 0, high
= arg1
;
3939 /* If this is an unsigned comparison, we also know that EXP is
3940 greater than or equal to zero. We base the range tests we make
3941 on that fact, so we record it here so we can parse existing
3942 range tests. We test arg0_type since often the return type
3943 of, e.g. EQ_EXPR, is boolean. */
3944 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3946 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3948 build_int_cst (arg0_type
, 0),
3952 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3954 /* If the high bound is missing, but we have a nonzero low
3955 bound, reverse the range so it goes from zero to the low bound
3957 if (high
== 0 && low
&& ! integer_zerop (low
))
3960 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3961 build_int_cst (TREE_TYPE (low
), 1), 0);
3962 low
= build_int_cst (arg0_type
, 0);
3972 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3973 low and high are non-NULL, then normalize will DTRT. */
3974 if (!TYPE_UNSIGNED (arg0_type
)
3975 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3977 if (low
== NULL_TREE
)
3978 low
= TYPE_MIN_VALUE (arg0_type
);
3979 if (high
== NULL_TREE
)
3980 high
= TYPE_MAX_VALUE (arg0_type
);
3983 /* (-x) IN [a,b] -> x in [-b, -a] */
3984 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3985 build_int_cst (exp_type
, 0),
3987 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3988 build_int_cst (exp_type
, 0),
3990 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
3996 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3997 build_int_cst (exp_type
, 1));
4001 if (TREE_CODE (arg1
) != INTEGER_CST
)
4004 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4005 move a constant to the other side. */
4006 if (!TYPE_UNSIGNED (arg0_type
)
4007 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4010 /* If EXP is signed, any overflow in the computation is undefined,
4011 so we don't worry about it so long as our computations on
4012 the bounds don't overflow. For unsigned, overflow is defined
4013 and this is exactly the right thing. */
4014 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4015 arg0_type
, low
, 0, arg1
, 0);
4016 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4017 arg0_type
, high
, 1, arg1
, 0);
4018 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4019 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4022 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4023 *strict_overflow_p
= true;
4026 /* Check for an unsigned range which has wrapped around the maximum
4027 value thus making n_high < n_low, and normalize it. */
4028 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4030 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4031 build_int_cst (TREE_TYPE (n_high
), 1), 0);
4032 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4033 build_int_cst (TREE_TYPE (n_low
), 1), 0);
4035 /* If the range is of the form +/- [ x+1, x ], we won't
4036 be able to normalize it. But then, it represents the
4037 whole range or the empty set, so make it
4039 if (tree_int_cst_equal (n_low
, low
)
4040 && tree_int_cst_equal (n_high
, high
))
4046 low
= n_low
, high
= n_high
;
4054 case NON_LVALUE_EXPR
:
4055 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4058 if (! INTEGRAL_TYPE_P (arg0_type
)
4059 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4060 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4063 n_low
= low
, n_high
= high
;
4066 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4069 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4071 /* If we're converting arg0 from an unsigned type, to exp,
4072 a signed type, we will be doing the comparison as unsigned.
4073 The tests above have already verified that LOW and HIGH
4076 So we have to ensure that we will handle large unsigned
4077 values the same way that the current signed bounds treat
4080 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4084 /* For fixed-point modes, we need to pass the saturating flag
4085 as the 2nd parameter. */
4086 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4088 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4089 TYPE_SATURATING (arg0_type
));
4092 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4094 /* A range without an upper bound is, naturally, unbounded.
4095 Since convert would have cropped a very large value, use
4096 the max value for the destination type. */
4098 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4099 : TYPE_MAX_VALUE (arg0_type
);
4101 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4102 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4103 fold_convert_loc (loc
, arg0_type
,
4105 build_int_cst (arg0_type
, 1));
4107 /* If the low bound is specified, "and" the range with the
4108 range for which the original unsigned value will be
4112 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4113 1, fold_convert_loc (loc
, arg0_type
,
4118 in_p
= (n_in_p
== in_p
);
4122 /* Otherwise, "or" the range with the range of the input
4123 that will be interpreted as negative. */
4124 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4125 1, fold_convert_loc (loc
, arg0_type
,
4130 in_p
= (in_p
!= n_in_p
);
4144 /* Given EXP, a logical expression, set the range it is testing into
4145 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4146 actually being tested. *PLOW and *PHIGH will be made of the same
4147 type as the returned expression. If EXP is not a comparison, we
4148 will most likely not be returning a useful value and range. Set
4149 *STRICT_OVERFLOW_P to true if the return value is only valid
4150 because signed overflow is undefined; otherwise, do not change
4151 *STRICT_OVERFLOW_P. */
4154 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4155 bool *strict_overflow_p
)
4157 enum tree_code code
;
4158 tree arg0
, arg1
= NULL_TREE
;
4159 tree exp_type
, nexp
;
4162 location_t loc
= EXPR_LOCATION (exp
);
4164 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4165 and see if we can refine the range. Some of the cases below may not
4166 happen, but it doesn't seem worth worrying about this. We "continue"
4167 the outer loop when we've changed something; otherwise we "break"
4168 the switch, which will "break" the while. */
4171 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4175 code
= TREE_CODE (exp
);
4176 exp_type
= TREE_TYPE (exp
);
4179 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4181 if (TREE_OPERAND_LENGTH (exp
) > 0)
4182 arg0
= TREE_OPERAND (exp
, 0);
4183 if (TREE_CODE_CLASS (code
) == tcc_binary
4184 || TREE_CODE_CLASS (code
) == tcc_comparison
4185 || (TREE_CODE_CLASS (code
) == tcc_expression
4186 && TREE_OPERAND_LENGTH (exp
) > 1))
4187 arg1
= TREE_OPERAND (exp
, 1);
4189 if (arg0
== NULL_TREE
)
4192 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4193 &high
, &in_p
, strict_overflow_p
);
4194 if (nexp
== NULL_TREE
)
4199 /* If EXP is a constant, we can evaluate whether this is true or false. */
4200 if (TREE_CODE (exp
) == INTEGER_CST
)
4202 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4204 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4210 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4214 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4215 type, TYPE, return an expression to test if EXP is in (or out of, depending
4216 on IN_P) the range. Return 0 if the test couldn't be created. */
4219 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4220 tree low
, tree high
)
4222 tree etype
= TREE_TYPE (exp
), value
;
4224 #ifdef HAVE_canonicalize_funcptr_for_compare
4225 /* Disable this optimization for function pointer expressions
4226 on targets that require function pointer canonicalization. */
4227 if (HAVE_canonicalize_funcptr_for_compare
4228 && TREE_CODE (etype
) == POINTER_TYPE
4229 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4235 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4237 return invert_truthvalue_loc (loc
, value
);
4242 if (low
== 0 && high
== 0)
4243 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 1), exp
);
4246 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4247 fold_convert_loc (loc
, etype
, high
));
4250 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4251 fold_convert_loc (loc
, etype
, low
));
4253 if (operand_equal_p (low
, high
, 0))
4254 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4255 fold_convert_loc (loc
, etype
, low
));
4257 if (integer_zerop (low
))
4259 if (! TYPE_UNSIGNED (etype
))
4261 etype
= unsigned_type_for (etype
);
4262 high
= fold_convert_loc (loc
, etype
, high
);
4263 exp
= fold_convert_loc (loc
, etype
, exp
);
4265 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4268 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4269 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4271 int prec
= TYPE_PRECISION (etype
);
4273 if (wi::mask (prec
- 1, false, prec
) == high
)
4275 if (TYPE_UNSIGNED (etype
))
4277 tree signed_etype
= signed_type_for (etype
);
4278 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4280 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4282 etype
= signed_etype
;
4283 exp
= fold_convert_loc (loc
, etype
, exp
);
4285 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4286 build_int_cst (etype
, 0));
4290 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4291 This requires wrap-around arithmetics for the type of the expression.
4292 First make sure that arithmetics in this type is valid, then make sure
4293 that it wraps around. */
4294 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4295 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4296 TYPE_UNSIGNED (etype
));
4298 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4300 tree utype
, minv
, maxv
;
4302 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4303 for the type in question, as we rely on this here. */
4304 utype
= unsigned_type_for (etype
);
4305 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4306 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4307 build_int_cst (TREE_TYPE (maxv
), 1), 1);
4308 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4310 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4317 high
= fold_convert_loc (loc
, etype
, high
);
4318 low
= fold_convert_loc (loc
, etype
, low
);
4319 exp
= fold_convert_loc (loc
, etype
, exp
);
4321 value
= const_binop (MINUS_EXPR
, high
, low
);
4324 if (POINTER_TYPE_P (etype
))
4326 if (value
!= 0 && !TREE_OVERFLOW (value
))
4328 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4329 return build_range_check (loc
, type
,
4330 fold_build_pointer_plus_loc (loc
, exp
, low
),
4331 1, build_int_cst (etype
, 0), value
);
4336 if (value
!= 0 && !TREE_OVERFLOW (value
))
4337 return build_range_check (loc
, type
,
4338 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4339 1, build_int_cst (etype
, 0), value
);
4344 /* Return the predecessor of VAL in its type, handling the infinite case. */
4347 range_predecessor (tree val
)
4349 tree type
= TREE_TYPE (val
);
4351 if (INTEGRAL_TYPE_P (type
)
4352 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4355 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0,
4356 build_int_cst (TREE_TYPE (val
), 1), 0);
4359 /* Return the successor of VAL in its type, handling the infinite case. */
4362 range_successor (tree val
)
4364 tree type
= TREE_TYPE (val
);
4366 if (INTEGRAL_TYPE_P (type
)
4367 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4370 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0,
4371 build_int_cst (TREE_TYPE (val
), 1), 0);
4374 /* Given two ranges, see if we can merge them into one. Return 1 if we
4375 can, 0 if we can't. Set the output range into the specified parameters. */
4378 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4379 tree high0
, int in1_p
, tree low1
, tree high1
)
4387 int lowequal
= ((low0
== 0 && low1
== 0)
4388 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4389 low0
, 0, low1
, 0)));
4390 int highequal
= ((high0
== 0 && high1
== 0)
4391 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4392 high0
, 1, high1
, 1)));
4394 /* Make range 0 be the range that starts first, or ends last if they
4395 start at the same value. Swap them if it isn't. */
4396 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4399 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4400 high1
, 1, high0
, 1))))
4402 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4403 tem
= low0
, low0
= low1
, low1
= tem
;
4404 tem
= high0
, high0
= high1
, high1
= tem
;
4407 /* Now flag two cases, whether the ranges are disjoint or whether the
4408 second range is totally subsumed in the first. Note that the tests
4409 below are simplified by the ones above. */
4410 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4411 high0
, 1, low1
, 0));
4412 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4413 high1
, 1, high0
, 1));
4415 /* We now have four cases, depending on whether we are including or
4416 excluding the two ranges. */
4419 /* If they don't overlap, the result is false. If the second range
4420 is a subset it is the result. Otherwise, the range is from the start
4421 of the second to the end of the first. */
4423 in_p
= 0, low
= high
= 0;
4425 in_p
= 1, low
= low1
, high
= high1
;
4427 in_p
= 1, low
= low1
, high
= high0
;
4430 else if (in0_p
&& ! in1_p
)
4432 /* If they don't overlap, the result is the first range. If they are
4433 equal, the result is false. If the second range is a subset of the
4434 first, and the ranges begin at the same place, we go from just after
4435 the end of the second range to the end of the first. If the second
4436 range is not a subset of the first, or if it is a subset and both
4437 ranges end at the same place, the range starts at the start of the
4438 first range and ends just before the second range.
4439 Otherwise, we can't describe this as a single range. */
4441 in_p
= 1, low
= low0
, high
= high0
;
4442 else if (lowequal
&& highequal
)
4443 in_p
= 0, low
= high
= 0;
4444 else if (subset
&& lowequal
)
4446 low
= range_successor (high1
);
4451 /* We are in the weird situation where high0 > high1 but
4452 high1 has no successor. Punt. */
4456 else if (! subset
|| highequal
)
4459 high
= range_predecessor (low1
);
4463 /* low0 < low1 but low1 has no predecessor. Punt. */
4471 else if (! in0_p
&& in1_p
)
4473 /* If they don't overlap, the result is the second range. If the second
4474 is a subset of the first, the result is false. Otherwise,
4475 the range starts just after the first range and ends at the
4476 end of the second. */
4478 in_p
= 1, low
= low1
, high
= high1
;
4479 else if (subset
|| highequal
)
4480 in_p
= 0, low
= high
= 0;
4483 low
= range_successor (high0
);
4488 /* high1 > high0 but high0 has no successor. Punt. */
4496 /* The case where we are excluding both ranges. Here the complex case
4497 is if they don't overlap. In that case, the only time we have a
4498 range is if they are adjacent. If the second is a subset of the
4499 first, the result is the first. Otherwise, the range to exclude
4500 starts at the beginning of the first range and ends at the end of the
4504 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4505 range_successor (high0
),
4507 in_p
= 0, low
= low0
, high
= high1
;
4510 /* Canonicalize - [min, x] into - [-, x]. */
4511 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4512 switch (TREE_CODE (TREE_TYPE (low0
)))
4515 if (TYPE_PRECISION (TREE_TYPE (low0
))
4516 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4520 if (tree_int_cst_equal (low0
,
4521 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4525 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4526 && integer_zerop (low0
))
4533 /* Canonicalize - [x, max] into - [x, -]. */
4534 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4535 switch (TREE_CODE (TREE_TYPE (high1
)))
4538 if (TYPE_PRECISION (TREE_TYPE (high1
))
4539 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4543 if (tree_int_cst_equal (high1
,
4544 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4548 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4549 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4551 build_int_cst (TREE_TYPE (high1
), 1),
4559 /* The ranges might be also adjacent between the maximum and
4560 minimum values of the given type. For
4561 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4562 return + [x + 1, y - 1]. */
4563 if (low0
== 0 && high1
== 0)
4565 low
= range_successor (high0
);
4566 high
= range_predecessor (low1
);
4567 if (low
== 0 || high
== 0)
4577 in_p
= 0, low
= low0
, high
= high0
;
4579 in_p
= 0, low
= low0
, high
= high1
;
4582 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4587 /* Subroutine of fold, looking inside expressions of the form
4588 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4589 of the COND_EXPR. This function is being used also to optimize
4590 A op B ? C : A, by reversing the comparison first.
4592 Return a folded expression whose code is not a COND_EXPR
4593 anymore, or NULL_TREE if no folding opportunity is found. */
4596 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4597 tree arg0
, tree arg1
, tree arg2
)
4599 enum tree_code comp_code
= TREE_CODE (arg0
);
4600 tree arg00
= TREE_OPERAND (arg0
, 0);
4601 tree arg01
= TREE_OPERAND (arg0
, 1);
4602 tree arg1_type
= TREE_TYPE (arg1
);
4608 /* If we have A op 0 ? A : -A, consider applying the following
4611 A == 0? A : -A same as -A
4612 A != 0? A : -A same as A
4613 A >= 0? A : -A same as abs (A)
4614 A > 0? A : -A same as abs (A)
4615 A <= 0? A : -A same as -abs (A)
4616 A < 0? A : -A same as -abs (A)
4618 None of these transformations work for modes with signed
4619 zeros. If A is +/-0, the first two transformations will
4620 change the sign of the result (from +0 to -0, or vice
4621 versa). The last four will fix the sign of the result,
4622 even though the original expressions could be positive or
4623 negative, depending on the sign of A.
4625 Note that all these transformations are correct if A is
4626 NaN, since the two alternatives (A and -A) are also NaNs. */
4627 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4628 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4629 ? real_zerop (arg01
)
4630 : integer_zerop (arg01
))
4631 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4632 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4633 /* In the case that A is of the form X-Y, '-A' (arg2) may
4634 have already been folded to Y-X, check for that. */
4635 || (TREE_CODE (arg1
) == MINUS_EXPR
4636 && TREE_CODE (arg2
) == MINUS_EXPR
4637 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4638 TREE_OPERAND (arg2
, 1), 0)
4639 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4640 TREE_OPERAND (arg2
, 0), 0))))
4645 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4646 return pedantic_non_lvalue_loc (loc
,
4647 fold_convert_loc (loc
, type
,
4648 negate_expr (tem
)));
4651 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4654 if (flag_trapping_math
)
4659 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4660 arg1
= fold_convert_loc (loc
, signed_type_for
4661 (TREE_TYPE (arg1
)), arg1
);
4662 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4663 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4666 if (flag_trapping_math
)
4670 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4671 arg1
= fold_convert_loc (loc
, signed_type_for
4672 (TREE_TYPE (arg1
)), arg1
);
4673 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4674 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4676 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4680 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4681 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4682 both transformations are correct when A is NaN: A != 0
4683 is then true, and A == 0 is false. */
4685 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4686 && integer_zerop (arg01
) && integer_zerop (arg2
))
4688 if (comp_code
== NE_EXPR
)
4689 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4690 else if (comp_code
== EQ_EXPR
)
4691 return build_zero_cst (type
);
4694 /* Try some transformations of A op B ? A : B.
4696 A == B? A : B same as B
4697 A != B? A : B same as A
4698 A >= B? A : B same as max (A, B)
4699 A > B? A : B same as max (B, A)
4700 A <= B? A : B same as min (A, B)
4701 A < B? A : B same as min (B, A)
4703 As above, these transformations don't work in the presence
4704 of signed zeros. For example, if A and B are zeros of
4705 opposite sign, the first two transformations will change
4706 the sign of the result. In the last four, the original
4707 expressions give different results for (A=+0, B=-0) and
4708 (A=-0, B=+0), but the transformed expressions do not.
4710 The first two transformations are correct if either A or B
4711 is a NaN. In the first transformation, the condition will
4712 be false, and B will indeed be chosen. In the case of the
4713 second transformation, the condition A != B will be true,
4714 and A will be chosen.
4716 The conversions to max() and min() are not correct if B is
4717 a number and A is not. The conditions in the original
4718 expressions will be false, so all four give B. The min()
4719 and max() versions would give a NaN instead. */
4720 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4721 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4722 /* Avoid these transformations if the COND_EXPR may be used
4723 as an lvalue in the C++ front-end. PR c++/19199. */
4725 || VECTOR_TYPE_P (type
)
4726 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4727 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4728 || ! maybe_lvalue_p (arg1
)
4729 || ! maybe_lvalue_p (arg2
)))
4731 tree comp_op0
= arg00
;
4732 tree comp_op1
= arg01
;
4733 tree comp_type
= TREE_TYPE (comp_op0
);
4735 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4736 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4746 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4748 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4753 /* In C++ a ?: expression can be an lvalue, so put the
4754 operand which will be used if they are equal first
4755 so that we can convert this back to the
4756 corresponding COND_EXPR. */
4757 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4759 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4760 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4761 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4762 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4763 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4764 comp_op1
, comp_op0
);
4765 return pedantic_non_lvalue_loc (loc
,
4766 fold_convert_loc (loc
, type
, tem
));
4773 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4775 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4776 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4777 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4778 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4779 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4780 comp_op1
, comp_op0
);
4781 return pedantic_non_lvalue_loc (loc
,
4782 fold_convert_loc (loc
, type
, tem
));
4786 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4787 return pedantic_non_lvalue_loc (loc
,
4788 fold_convert_loc (loc
, type
, arg2
));
4791 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4792 return pedantic_non_lvalue_loc (loc
,
4793 fold_convert_loc (loc
, type
, arg1
));
4796 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4801 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4802 we might still be able to simplify this. For example,
4803 if C1 is one less or one more than C2, this might have started
4804 out as a MIN or MAX and been transformed by this function.
4805 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4807 if (INTEGRAL_TYPE_P (type
)
4808 && TREE_CODE (arg01
) == INTEGER_CST
4809 && TREE_CODE (arg2
) == INTEGER_CST
)
4813 if (TREE_CODE (arg1
) == INTEGER_CST
)
4815 /* We can replace A with C1 in this case. */
4816 arg1
= fold_convert_loc (loc
, type
, arg01
);
4817 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4820 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4821 MIN_EXPR, to preserve the signedness of the comparison. */
4822 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4824 && operand_equal_p (arg01
,
4825 const_binop (PLUS_EXPR
, arg2
,
4826 build_int_cst (type
, 1)),
4829 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4830 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4832 return pedantic_non_lvalue_loc (loc
,
4833 fold_convert_loc (loc
, type
, tem
));
4838 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4840 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4842 && operand_equal_p (arg01
,
4843 const_binop (MINUS_EXPR
, arg2
,
4844 build_int_cst (type
, 1)),
4847 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4848 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4850 return pedantic_non_lvalue_loc (loc
,
4851 fold_convert_loc (loc
, type
, tem
));
4856 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4857 MAX_EXPR, to preserve the signedness of the comparison. */
4858 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4860 && operand_equal_p (arg01
,
4861 const_binop (MINUS_EXPR
, arg2
,
4862 build_int_cst (type
, 1)),
4865 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4866 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4868 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4873 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4874 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4876 && operand_equal_p (arg01
,
4877 const_binop (PLUS_EXPR
, arg2
,
4878 build_int_cst (type
, 1)),
4881 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4882 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4884 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4898 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4899 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4900 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4904 /* EXP is some logical combination of boolean tests. See if we can
4905 merge it into some range test. Return the new tree if so. */
4908 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4911 int or_op
= (code
== TRUTH_ORIF_EXPR
4912 || code
== TRUTH_OR_EXPR
);
4913 int in0_p
, in1_p
, in_p
;
4914 tree low0
, low1
, low
, high0
, high1
, high
;
4915 bool strict_overflow_p
= false;
4917 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4918 "when simplifying range test");
4920 if (!INTEGRAL_TYPE_P (type
))
4923 lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4924 rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4926 /* If this is an OR operation, invert both sides; we will invert
4927 again at the end. */
4929 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4931 /* If both expressions are the same, if we can merge the ranges, and we
4932 can build the range test, return it or it inverted. If one of the
4933 ranges is always true or always false, consider it to be the same
4934 expression as the other. */
4935 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4936 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4938 && 0 != (tem
= (build_range_check (loc
, type
,
4940 : rhs
!= 0 ? rhs
: integer_zero_node
,
4943 if (strict_overflow_p
)
4944 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4945 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4948 /* On machines where the branch cost is expensive, if this is a
4949 short-circuited branch and the underlying object on both sides
4950 is the same, make a non-short-circuit operation. */
4951 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4952 && lhs
!= 0 && rhs
!= 0
4953 && (code
== TRUTH_ANDIF_EXPR
4954 || code
== TRUTH_ORIF_EXPR
)
4955 && operand_equal_p (lhs
, rhs
, 0))
4957 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4958 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4959 which cases we can't do this. */
4960 if (simple_operand_p (lhs
))
4961 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4962 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4965 else if (!lang_hooks
.decls
.global_bindings_p ()
4966 && !CONTAINS_PLACEHOLDER_P (lhs
))
4968 tree common
= save_expr (lhs
);
4970 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4971 or_op
? ! in0_p
: in0_p
,
4973 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4974 or_op
? ! in1_p
: in1_p
,
4977 if (strict_overflow_p
)
4978 fold_overflow_warning (warnmsg
,
4979 WARN_STRICT_OVERFLOW_COMPARISON
);
4980 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4981 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4990 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4991 bit value. Arrange things so the extra bits will be set to zero if and
4992 only if C is signed-extended to its full width. If MASK is nonzero,
4993 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4996 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4998 tree type
= TREE_TYPE (c
);
4999 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
5002 if (p
== modesize
|| unsignedp
)
5005 /* We work by getting just the sign bit into the low-order bit, then
5006 into the high-order bit, then sign-extend. We then XOR that value
5008 temp
= build_int_cst (TREE_TYPE (c
), wi::extract_uhwi (c
, p
- 1, 1));
5010 /* We must use a signed type in order to get an arithmetic right shift.
5011 However, we must also avoid introducing accidental overflows, so that
5012 a subsequent call to integer_zerop will work. Hence we must
5013 do the type conversion here. At this point, the constant is either
5014 zero or one, and the conversion to a signed type can never overflow.
5015 We could get an overflow if this conversion is done anywhere else. */
5016 if (TYPE_UNSIGNED (type
))
5017 temp
= fold_convert (signed_type_for (type
), temp
);
5019 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
5020 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
5022 temp
= const_binop (BIT_AND_EXPR
, temp
,
5023 fold_convert (TREE_TYPE (c
), mask
));
5024 /* If necessary, convert the type back to match the type of C. */
5025 if (TYPE_UNSIGNED (type
))
5026 temp
= fold_convert (type
, temp
);
5028 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
5031 /* For an expression that has the form
5035 we can drop one of the inner expressions and simplify to
5039 LOC is the location of the resulting expression. OP is the inner
5040 logical operation; the left-hand side in the examples above, while CMPOP
5041 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5042 removing a condition that guards another, as in
5043 (A != NULL && A->...) || A == NULL
5044 which we must not transform. If RHS_ONLY is true, only eliminate the
5045 right-most operand of the inner logical operation. */
5048 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5051 tree type
= TREE_TYPE (cmpop
);
5052 enum tree_code code
= TREE_CODE (cmpop
);
5053 enum tree_code truthop_code
= TREE_CODE (op
);
5054 tree lhs
= TREE_OPERAND (op
, 0);
5055 tree rhs
= TREE_OPERAND (op
, 1);
5056 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5057 enum tree_code rhs_code
= TREE_CODE (rhs
);
5058 enum tree_code lhs_code
= TREE_CODE (lhs
);
5059 enum tree_code inv_code
;
5061 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5064 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5067 if (rhs_code
== truthop_code
)
5069 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5070 if (newrhs
!= NULL_TREE
)
5073 rhs_code
= TREE_CODE (rhs
);
5076 if (lhs_code
== truthop_code
&& !rhs_only
)
5078 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5079 if (newlhs
!= NULL_TREE
)
5082 lhs_code
= TREE_CODE (lhs
);
5086 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5087 if (inv_code
== rhs_code
5088 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5089 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5091 if (!rhs_only
&& inv_code
== lhs_code
5092 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5093 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5095 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5096 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5101 /* Find ways of folding logical expressions of LHS and RHS:
5102 Try to merge two comparisons to the same innermost item.
5103 Look for range tests like "ch >= '0' && ch <= '9'".
5104 Look for combinations of simple terms on machines with expensive branches
5105 and evaluate the RHS unconditionally.
5107 For example, if we have p->a == 2 && p->b == 4 and we can make an
5108 object large enough to span both A and B, we can do this with a comparison
5109 against the object ANDed with the a mask.
5111 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5112 operations to do this with one comparison.
5114 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5115 function and the one above.
5117 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5118 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5120 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5123 We return the simplified tree or 0 if no optimization is possible. */
5126 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5129 /* If this is the "or" of two comparisons, we can do something if
5130 the comparisons are NE_EXPR. If this is the "and", we can do something
5131 if the comparisons are EQ_EXPR. I.e.,
5132 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5134 WANTED_CODE is this operation code. For single bit fields, we can
5135 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5136 comparison for one-bit fields. */
5138 enum tree_code wanted_code
;
5139 enum tree_code lcode
, rcode
;
5140 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5141 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5142 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5143 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5144 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5145 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5146 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5147 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5148 enum machine_mode lnmode
, rnmode
;
5149 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5150 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5151 tree l_const
, r_const
;
5152 tree lntype
, rntype
, result
;
5153 HOST_WIDE_INT first_bit
, end_bit
;
5156 /* Start by getting the comparison codes. Fail if anything is volatile.
5157 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5158 it were surrounded with a NE_EXPR. */
5160 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5163 lcode
= TREE_CODE (lhs
);
5164 rcode
= TREE_CODE (rhs
);
5166 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5168 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5169 build_int_cst (TREE_TYPE (lhs
), 0));
5173 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5175 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5176 build_int_cst (TREE_TYPE (rhs
), 0));
5180 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5181 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5184 ll_arg
= TREE_OPERAND (lhs
, 0);
5185 lr_arg
= TREE_OPERAND (lhs
, 1);
5186 rl_arg
= TREE_OPERAND (rhs
, 0);
5187 rr_arg
= TREE_OPERAND (rhs
, 1);
5189 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5190 if (simple_operand_p (ll_arg
)
5191 && simple_operand_p (lr_arg
))
5193 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5194 && operand_equal_p (lr_arg
, rr_arg
, 0))
5196 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5197 truth_type
, ll_arg
, lr_arg
);
5201 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5202 && operand_equal_p (lr_arg
, rl_arg
, 0))
5204 result
= combine_comparisons (loc
, code
, lcode
,
5205 swap_tree_comparison (rcode
),
5206 truth_type
, ll_arg
, lr_arg
);
5212 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5213 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5215 /* If the RHS can be evaluated unconditionally and its operands are
5216 simple, it wins to evaluate the RHS unconditionally on machines
5217 with expensive branches. In this case, this isn't a comparison
5218 that can be merged. */
5220 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5222 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5223 && simple_operand_p (rl_arg
)
5224 && simple_operand_p (rr_arg
))
5226 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5227 if (code
== TRUTH_OR_EXPR
5228 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5229 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5230 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5231 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5232 return build2_loc (loc
, NE_EXPR
, truth_type
,
5233 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5235 build_int_cst (TREE_TYPE (ll_arg
), 0));
5237 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5238 if (code
== TRUTH_AND_EXPR
5239 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5240 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5241 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5242 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5243 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5244 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5246 build_int_cst (TREE_TYPE (ll_arg
), 0));
5249 /* See if the comparisons can be merged. Then get all the parameters for
5252 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5253 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5257 ll_inner
= decode_field_reference (loc
, ll_arg
,
5258 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5259 &ll_unsignedp
, &volatilep
, &ll_mask
,
5261 lr_inner
= decode_field_reference (loc
, lr_arg
,
5262 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5263 &lr_unsignedp
, &volatilep
, &lr_mask
,
5265 rl_inner
= decode_field_reference (loc
, rl_arg
,
5266 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5267 &rl_unsignedp
, &volatilep
, &rl_mask
,
5269 rr_inner
= decode_field_reference (loc
, rr_arg
,
5270 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5271 &rr_unsignedp
, &volatilep
, &rr_mask
,
5274 /* It must be true that the inner operation on the lhs of each
5275 comparison must be the same if we are to be able to do anything.
5276 Then see if we have constants. If not, the same must be true for
5278 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5279 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5282 if (TREE_CODE (lr_arg
) == INTEGER_CST
5283 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5284 l_const
= lr_arg
, r_const
= rr_arg
;
5285 else if (lr_inner
== 0 || rr_inner
== 0
5286 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5289 l_const
= r_const
= 0;
5291 /* If either comparison code is not correct for our logical operation,
5292 fail. However, we can convert a one-bit comparison against zero into
5293 the opposite comparison against that bit being set in the field. */
5295 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5296 if (lcode
!= wanted_code
)
5298 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5300 /* Make the left operand unsigned, since we are only interested
5301 in the value of one bit. Otherwise we are doing the wrong
5310 /* This is analogous to the code for l_const above. */
5311 if (rcode
!= wanted_code
)
5313 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5322 /* See if we can find a mode that contains both fields being compared on
5323 the left. If we can't, fail. Otherwise, update all constants and masks
5324 to be relative to a field of that size. */
5325 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5326 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5327 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5328 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5330 if (lnmode
== VOIDmode
)
5333 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5334 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5335 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5336 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5338 if (BYTES_BIG_ENDIAN
)
5340 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5341 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5344 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5345 size_int (xll_bitpos
));
5346 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5347 size_int (xrl_bitpos
));
5351 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5352 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5353 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5354 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5355 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5358 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5360 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5365 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5366 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5367 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5368 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5369 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5372 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5374 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5378 /* If the right sides are not constant, do the same for it. Also,
5379 disallow this optimization if a size or signedness mismatch occurs
5380 between the left and right sides. */
5383 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5384 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5385 /* Make sure the two fields on the right
5386 correspond to the left without being swapped. */
5387 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5390 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5391 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5392 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5393 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5395 if (rnmode
== VOIDmode
)
5398 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5399 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5400 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5401 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5403 if (BYTES_BIG_ENDIAN
)
5405 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5406 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5409 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5411 size_int (xlr_bitpos
));
5412 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5414 size_int (xrr_bitpos
));
5416 /* Make a mask that corresponds to both fields being compared.
5417 Do this for both items being compared. If the operands are the
5418 same size and the bits being compared are in the same position
5419 then we can do this by masking both and comparing the masked
5421 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5422 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5423 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5425 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5426 ll_unsignedp
|| rl_unsignedp
);
5427 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5428 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5430 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5431 lr_unsignedp
|| rr_unsignedp
);
5432 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5433 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5435 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5438 /* There is still another way we can do something: If both pairs of
5439 fields being compared are adjacent, we may be able to make a wider
5440 field containing them both.
5442 Note that we still must mask the lhs/rhs expressions. Furthermore,
5443 the mask must be shifted to account for the shift done by
5444 make_bit_field_ref. */
5445 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5446 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5447 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5448 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5452 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5453 ll_bitsize
+ rl_bitsize
,
5454 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5455 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5456 lr_bitsize
+ rr_bitsize
,
5457 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5459 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5460 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5461 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5462 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5464 /* Convert to the smaller type before masking out unwanted bits. */
5466 if (lntype
!= rntype
)
5468 if (lnbitsize
> rnbitsize
)
5470 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5471 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5474 else if (lnbitsize
< rnbitsize
)
5476 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5477 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5482 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5483 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5485 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5486 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5488 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5494 /* Handle the case of comparisons with constants. If there is something in
5495 common between the masks, those bits of the constants must be the same.
5496 If not, the condition is always false. Test for this to avoid generating
5497 incorrect code below. */
5498 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5499 if (! integer_zerop (result
)
5500 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5501 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5503 if (wanted_code
== NE_EXPR
)
5505 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5506 return constant_boolean_node (true, truth_type
);
5510 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5511 return constant_boolean_node (false, truth_type
);
5515 /* Construct the expression we will return. First get the component
5516 reference we will make. Unless the mask is all ones the width of
5517 that field, perform the mask operation. Then compare with the
5519 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5520 ll_unsignedp
|| rl_unsignedp
);
5522 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5523 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5524 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5526 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5527 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5530 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5534 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5538 enum tree_code op_code
;
5541 int consts_equal
, consts_lt
;
5544 STRIP_SIGN_NOPS (arg0
);
5546 op_code
= TREE_CODE (arg0
);
5547 minmax_const
= TREE_OPERAND (arg0
, 1);
5548 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5549 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5550 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5551 inner
= TREE_OPERAND (arg0
, 0);
5553 /* If something does not permit us to optimize, return the original tree. */
5554 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5555 || TREE_CODE (comp_const
) != INTEGER_CST
5556 || TREE_OVERFLOW (comp_const
)
5557 || TREE_CODE (minmax_const
) != INTEGER_CST
5558 || TREE_OVERFLOW (minmax_const
))
5561 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5562 and GT_EXPR, doing the rest with recursive calls using logical
5566 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5569 = optimize_minmax_comparison (loc
,
5570 invert_tree_comparison (code
, false),
5573 return invert_truthvalue_loc (loc
, tem
);
5579 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5580 optimize_minmax_comparison
5581 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5582 optimize_minmax_comparison
5583 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5586 if (op_code
== MAX_EXPR
&& consts_equal
)
5587 /* MAX (X, 0) == 0 -> X <= 0 */
5588 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5590 else if (op_code
== MAX_EXPR
&& consts_lt
)
5591 /* MAX (X, 0) == 5 -> X == 5 */
5592 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5594 else if (op_code
== MAX_EXPR
)
5595 /* MAX (X, 0) == -1 -> false */
5596 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5598 else if (consts_equal
)
5599 /* MIN (X, 0) == 0 -> X >= 0 */
5600 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5603 /* MIN (X, 0) == 5 -> false */
5604 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5607 /* MIN (X, 0) == -1 -> X == -1 */
5608 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5611 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5612 /* MAX (X, 0) > 0 -> X > 0
5613 MAX (X, 0) > 5 -> X > 5 */
5614 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5616 else if (op_code
== MAX_EXPR
)
5617 /* MAX (X, 0) > -1 -> true */
5618 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5620 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5621 /* MIN (X, 0) > 0 -> false
5622 MIN (X, 0) > 5 -> false */
5623 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5626 /* MIN (X, 0) > -1 -> X > -1 */
5627 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5634 /* T is an integer expression that is being multiplied, divided, or taken a
5635 modulus (CODE says which and what kind of divide or modulus) by a
5636 constant C. See if we can eliminate that operation by folding it with
5637 other operations already in T. WIDE_TYPE, if non-null, is a type that
5638 should be used for the computation if wider than our type.
5640 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5641 (X * 2) + (Y * 4). We must, however, be assured that either the original
5642 expression would not overflow or that overflow is undefined for the type
5643 in the language in question.
5645 If we return a non-null expression, it is an equivalent form of the
5646 original computation, but need not be in the original type.
5648 We set *STRICT_OVERFLOW_P to true if the return values depends on
5649 signed overflow being undefined. Otherwise we do not change
5650 *STRICT_OVERFLOW_P. */
5653 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5654 bool *strict_overflow_p
)
5656 /* To avoid exponential search depth, refuse to allow recursion past
5657 three levels. Beyond that (1) it's highly unlikely that we'll find
5658 something interesting and (2) we've probably processed it before
5659 when we built the inner expression. */
5668 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5675 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5676 bool *strict_overflow_p
)
5678 tree type
= TREE_TYPE (t
);
5679 enum tree_code tcode
= TREE_CODE (t
);
5680 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5681 > GET_MODE_SIZE (TYPE_MODE (type
)))
5682 ? wide_type
: type
);
5684 int same_p
= tcode
== code
;
5685 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5686 bool sub_strict_overflow_p
;
5688 /* Don't deal with constants of zero here; they confuse the code below. */
5689 if (integer_zerop (c
))
5692 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5693 op0
= TREE_OPERAND (t
, 0);
5695 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5696 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5698 /* Note that we need not handle conditional operations here since fold
5699 already handles those cases. So just do arithmetic here. */
5703 /* For a constant, we can always simplify if we are a multiply
5704 or (for divide and modulus) if it is a multiple of our constant. */
5705 if (code
== MULT_EXPR
5706 || wi::multiple_of_p (t
, c
, TYPE_SIGN (type
)))
5707 return const_binop (code
, fold_convert (ctype
, t
),
5708 fold_convert (ctype
, c
));
5711 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5712 /* If op0 is an expression ... */
5713 if ((COMPARISON_CLASS_P (op0
)
5714 || UNARY_CLASS_P (op0
)
5715 || BINARY_CLASS_P (op0
)
5716 || VL_EXP_CLASS_P (op0
)
5717 || EXPRESSION_CLASS_P (op0
))
5718 /* ... and has wrapping overflow, and its type is smaller
5719 than ctype, then we cannot pass through as widening. */
5720 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5721 && (TYPE_PRECISION (ctype
)
5722 > TYPE_PRECISION (TREE_TYPE (op0
))))
5723 /* ... or this is a truncation (t is narrower than op0),
5724 then we cannot pass through this narrowing. */
5725 || (TYPE_PRECISION (type
)
5726 < TYPE_PRECISION (TREE_TYPE (op0
)))
5727 /* ... or signedness changes for division or modulus,
5728 then we cannot pass through this conversion. */
5729 || (code
!= MULT_EXPR
5730 && (TYPE_UNSIGNED (ctype
)
5731 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5732 /* ... or has undefined overflow while the converted to
5733 type has not, we cannot do the operation in the inner type
5734 as that would introduce undefined overflow. */
5735 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5736 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5739 /* Pass the constant down and see if we can make a simplification. If
5740 we can, replace this expression with the inner simplification for
5741 possible later conversion to our or some other type. */
5742 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5743 && TREE_CODE (t2
) == INTEGER_CST
5744 && !TREE_OVERFLOW (t2
)
5745 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5747 ? ctype
: NULL_TREE
,
5748 strict_overflow_p
))))
5753 /* If widening the type changes it from signed to unsigned, then we
5754 must avoid building ABS_EXPR itself as unsigned. */
5755 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5757 tree cstype
= (*signed_type_for
) (ctype
);
5758 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5761 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5762 return fold_convert (ctype
, t1
);
5766 /* If the constant is negative, we cannot simplify this. */
5767 if (tree_int_cst_sgn (c
) == -1)
5771 /* For division and modulus, type can't be unsigned, as e.g.
5772 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5773 For signed types, even with wrapping overflow, this is fine. */
5774 if (code
!= MULT_EXPR
&& TYPE_UNSIGNED (type
))
5776 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5778 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5781 case MIN_EXPR
: case MAX_EXPR
:
5782 /* If widening the type changes the signedness, then we can't perform
5783 this optimization as that changes the result. */
5784 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5787 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5788 sub_strict_overflow_p
= false;
5789 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5790 &sub_strict_overflow_p
)) != 0
5791 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5792 &sub_strict_overflow_p
)) != 0)
5794 if (tree_int_cst_sgn (c
) < 0)
5795 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5796 if (sub_strict_overflow_p
)
5797 *strict_overflow_p
= true;
5798 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5799 fold_convert (ctype
, t2
));
5803 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5804 /* If the second operand is constant, this is a multiplication
5805 or floor division, by a power of two, so we can treat it that
5806 way unless the multiplier or divisor overflows. Signed
5807 left-shift overflow is implementation-defined rather than
5808 undefined in C90, so do not convert signed left shift into
5810 if (TREE_CODE (op1
) == INTEGER_CST
5811 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5812 /* const_binop may not detect overflow correctly,
5813 so check for it explicitly here. */
5814 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
5815 && 0 != (t1
= fold_convert (ctype
,
5816 const_binop (LSHIFT_EXPR
,
5819 && !TREE_OVERFLOW (t1
))
5820 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5821 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5823 fold_convert (ctype
, op0
),
5825 c
, code
, wide_type
, strict_overflow_p
);
5828 case PLUS_EXPR
: case MINUS_EXPR
:
5829 /* See if we can eliminate the operation on both sides. If we can, we
5830 can return a new PLUS or MINUS. If we can't, the only remaining
5831 cases where we can do anything are if the second operand is a
5833 sub_strict_overflow_p
= false;
5834 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5835 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5836 if (t1
!= 0 && t2
!= 0
5837 && (code
== MULT_EXPR
5838 /* If not multiplication, we can only do this if both operands
5839 are divisible by c. */
5840 || (multiple_of_p (ctype
, op0
, c
)
5841 && multiple_of_p (ctype
, op1
, c
))))
5843 if (sub_strict_overflow_p
)
5844 *strict_overflow_p
= true;
5845 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5846 fold_convert (ctype
, t2
));
5849 /* If this was a subtraction, negate OP1 and set it to be an addition.
5850 This simplifies the logic below. */
5851 if (tcode
== MINUS_EXPR
)
5853 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5854 /* If OP1 was not easily negatable, the constant may be OP0. */
5855 if (TREE_CODE (op0
) == INTEGER_CST
)
5866 if (TREE_CODE (op1
) != INTEGER_CST
)
5869 /* If either OP1 or C are negative, this optimization is not safe for
5870 some of the division and remainder types while for others we need
5871 to change the code. */
5872 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5874 if (code
== CEIL_DIV_EXPR
)
5875 code
= FLOOR_DIV_EXPR
;
5876 else if (code
== FLOOR_DIV_EXPR
)
5877 code
= CEIL_DIV_EXPR
;
5878 else if (code
!= MULT_EXPR
5879 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5883 /* If it's a multiply or a division/modulus operation of a multiple
5884 of our constant, do the operation and verify it doesn't overflow. */
5885 if (code
== MULT_EXPR
5886 || wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5888 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5889 fold_convert (ctype
, c
));
5890 /* We allow the constant to overflow with wrapping semantics. */
5892 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5898 /* If we have an unsigned type, we cannot widen the operation since it
5899 will change the result if the original computation overflowed. */
5900 if (TYPE_UNSIGNED (ctype
) && ctype
!= type
)
5903 /* If we were able to eliminate our operation from the first side,
5904 apply our operation to the second side and reform the PLUS. */
5905 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5906 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5908 /* The last case is if we are a multiply. In that case, we can
5909 apply the distributive law to commute the multiply and addition
5910 if the multiplication of the constants doesn't overflow
5911 and overflow is defined. With undefined overflow
5912 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5913 if (code
== MULT_EXPR
&& TYPE_OVERFLOW_WRAPS (ctype
))
5914 return fold_build2 (tcode
, ctype
,
5915 fold_build2 (code
, ctype
,
5916 fold_convert (ctype
, op0
),
5917 fold_convert (ctype
, c
)),
5923 /* We have a special case here if we are doing something like
5924 (C * 8) % 4 since we know that's zero. */
5925 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5926 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5927 /* If the multiplication can overflow we cannot optimize this. */
5928 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5929 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5930 && wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5932 *strict_overflow_p
= true;
5933 return omit_one_operand (type
, integer_zero_node
, op0
);
5936 /* ... fall through ... */
5938 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5939 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5940 /* If we can extract our operation from the LHS, do so and return a
5941 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5942 do something only if the second operand is a constant. */
5944 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5945 strict_overflow_p
)) != 0)
5946 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5947 fold_convert (ctype
, op1
));
5948 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5949 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5950 strict_overflow_p
)) != 0)
5951 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5952 fold_convert (ctype
, t1
));
5953 else if (TREE_CODE (op1
) != INTEGER_CST
)
5956 /* If these are the same operation types, we can associate them
5957 assuming no overflow. */
5960 bool overflow_p
= false;
5961 bool overflow_mul_p
;
5962 signop sign
= TYPE_SIGN (ctype
);
5963 wide_int mul
= wi::mul (op1
, c
, sign
, &overflow_mul_p
);
5964 overflow_p
= TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
);
5966 && ((sign
== UNSIGNED
&& tcode
!= MULT_EXPR
) || sign
== SIGNED
))
5969 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5970 wide_int_to_tree (ctype
, mul
));
5973 /* If these operations "cancel" each other, we have the main
5974 optimizations of this pass, which occur when either constant is a
5975 multiple of the other, in which case we replace this with either an
5976 operation or CODE or TCODE.
5978 If we have an unsigned type, we cannot do this since it will change
5979 the result if the original computation overflowed. */
5980 if (TYPE_OVERFLOW_UNDEFINED (ctype
)
5981 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5982 || (tcode
== MULT_EXPR
5983 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5984 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5985 && code
!= MULT_EXPR
)))
5987 if (wi::multiple_of_p (op1
, c
, TYPE_SIGN (type
)))
5989 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5990 *strict_overflow_p
= true;
5991 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5992 fold_convert (ctype
,
5993 const_binop (TRUNC_DIV_EXPR
,
5996 else if (wi::multiple_of_p (c
, op1
, TYPE_SIGN (type
)))
5998 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5999 *strict_overflow_p
= true;
6000 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
6001 fold_convert (ctype
,
6002 const_binop (TRUNC_DIV_EXPR
,
6015 /* Return a node which has the indicated constant VALUE (either 0 or
6016 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6017 and is of the indicated TYPE. */
6020 constant_boolean_node (bool value
, tree type
)
6022 if (type
== integer_type_node
)
6023 return value
? integer_one_node
: integer_zero_node
;
6024 else if (type
== boolean_type_node
)
6025 return value
? boolean_true_node
: boolean_false_node
;
6026 else if (TREE_CODE (type
) == VECTOR_TYPE
)
6027 return build_vector_from_val (type
,
6028 build_int_cst (TREE_TYPE (type
),
6031 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
6035 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6036 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6037 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6038 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6039 COND is the first argument to CODE; otherwise (as in the example
6040 given here), it is the second argument. TYPE is the type of the
6041 original expression. Return NULL_TREE if no simplification is
6045 fold_binary_op_with_conditional_arg (location_t loc
,
6046 enum tree_code code
,
6047 tree type
, tree op0
, tree op1
,
6048 tree cond
, tree arg
, int cond_first_p
)
6050 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6051 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6052 tree test
, true_value
, false_value
;
6053 tree lhs
= NULL_TREE
;
6054 tree rhs
= NULL_TREE
;
6055 enum tree_code cond_code
= COND_EXPR
;
6057 if (TREE_CODE (cond
) == COND_EXPR
6058 || TREE_CODE (cond
) == VEC_COND_EXPR
)
6060 test
= TREE_OPERAND (cond
, 0);
6061 true_value
= TREE_OPERAND (cond
, 1);
6062 false_value
= TREE_OPERAND (cond
, 2);
6063 /* If this operand throws an expression, then it does not make
6064 sense to try to perform a logical or arithmetic operation
6066 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6068 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6073 tree testtype
= TREE_TYPE (cond
);
6075 true_value
= constant_boolean_node (true, testtype
);
6076 false_value
= constant_boolean_node (false, testtype
);
6079 if (TREE_CODE (TREE_TYPE (test
)) == VECTOR_TYPE
)
6080 cond_code
= VEC_COND_EXPR
;
6082 /* This transformation is only worthwhile if we don't have to wrap ARG
6083 in a SAVE_EXPR and the operation can be simplified without recursing
6084 on at least one of the branches once its pushed inside the COND_EXPR. */
6085 if (!TREE_CONSTANT (arg
)
6086 && (TREE_SIDE_EFFECTS (arg
)
6087 || TREE_CODE (arg
) == COND_EXPR
|| TREE_CODE (arg
) == VEC_COND_EXPR
6088 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6091 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6094 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6096 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6098 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6102 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6104 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6106 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6109 /* Check that we have simplified at least one of the branches. */
6110 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6113 return fold_build3_loc (loc
, cond_code
, type
, test
, lhs
, rhs
);
6117 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6119 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6120 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6121 ADDEND is the same as X.
6123 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6124 and finite. The problematic cases are when X is zero, and its mode
6125 has signed zeros. In the case of rounding towards -infinity,
6126 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6127 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6130 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6132 if (!real_zerop (addend
))
6135 /* Don't allow the fold with -fsignaling-nans. */
6136 if (HONOR_SNANS (TYPE_MODE (type
)))
6139 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6140 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6143 /* In a vector or complex, we would need to check the sign of all zeros. */
6144 if (TREE_CODE (addend
) != REAL_CST
)
6147 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6148 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6151 /* The mode has signed zeros, and we have to honor their sign.
6152 In this situation, there is only one case we can return true for.
6153 X - 0 is the same as X unless rounding towards -infinity is
6155 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6158 /* Subroutine of fold() that checks comparisons of built-in math
6159 functions against real constants.
6161 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6162 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6163 is the type of the result and ARG0 and ARG1 are the operands of the
6164 comparison. ARG1 must be a TREE_REAL_CST.
6166 The function returns the constant folded tree if a simplification
6167 can be made, and NULL_TREE otherwise. */
6170 fold_mathfn_compare (location_t loc
,
6171 enum built_in_function fcode
, enum tree_code code
,
6172 tree type
, tree arg0
, tree arg1
)
6176 if (BUILTIN_SQRT_P (fcode
))
6178 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6179 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6181 c
= TREE_REAL_CST (arg1
);
6182 if (REAL_VALUE_NEGATIVE (c
))
6184 /* sqrt(x) < y is always false, if y is negative. */
6185 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6186 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6188 /* sqrt(x) > y is always true, if y is negative and we
6189 don't care about NaNs, i.e. negative values of x. */
6190 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6191 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6193 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6194 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6195 build_real (TREE_TYPE (arg
), dconst0
));
6197 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6201 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6202 real_convert (&c2
, mode
, &c2
);
6204 if (REAL_VALUE_ISINF (c2
))
6206 /* sqrt(x) > y is x == +Inf, when y is very large. */
6207 if (HONOR_INFINITIES (mode
))
6208 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6209 build_real (TREE_TYPE (arg
), c2
));
6211 /* sqrt(x) > y is always false, when y is very large
6212 and we don't care about infinities. */
6213 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6216 /* sqrt(x) > c is the same as x > c*c. */
6217 return fold_build2_loc (loc
, code
, type
, arg
,
6218 build_real (TREE_TYPE (arg
), c2
));
6220 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6224 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6225 real_convert (&c2
, mode
, &c2
);
6227 if (REAL_VALUE_ISINF (c2
))
6229 /* sqrt(x) < y is always true, when y is a very large
6230 value and we don't care about NaNs or Infinities. */
6231 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6232 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6234 /* sqrt(x) < y is x != +Inf when y is very large and we
6235 don't care about NaNs. */
6236 if (! HONOR_NANS (mode
))
6237 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6238 build_real (TREE_TYPE (arg
), c2
));
6240 /* sqrt(x) < y is x >= 0 when y is very large and we
6241 don't care about Infinities. */
6242 if (! HONOR_INFINITIES (mode
))
6243 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6244 build_real (TREE_TYPE (arg
), dconst0
));
6246 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6247 arg
= save_expr (arg
);
6248 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6249 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6250 build_real (TREE_TYPE (arg
),
6252 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6253 build_real (TREE_TYPE (arg
),
6257 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6258 if (! HONOR_NANS (mode
))
6259 return fold_build2_loc (loc
, code
, type
, arg
,
6260 build_real (TREE_TYPE (arg
), c2
));
6262 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6263 arg
= save_expr (arg
);
6264 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6265 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6266 build_real (TREE_TYPE (arg
),
6268 fold_build2_loc (loc
, code
, type
, arg
,
6269 build_real (TREE_TYPE (arg
),
6277 /* Subroutine of fold() that optimizes comparisons against Infinities,
6278 either +Inf or -Inf.
6280 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6281 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6282 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6284 The function returns the constant folded tree if a simplification
6285 can be made, and NULL_TREE otherwise. */
6288 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6289 tree arg0
, tree arg1
)
6291 enum machine_mode mode
;
6292 REAL_VALUE_TYPE max
;
6296 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6298 /* For negative infinity swap the sense of the comparison. */
6299 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6301 code
= swap_tree_comparison (code
);
6306 /* x > +Inf is always false, if with ignore sNANs. */
6307 if (HONOR_SNANS (mode
))
6309 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6312 /* x <= +Inf is always true, if we don't case about NaNs. */
6313 if (! HONOR_NANS (mode
))
6314 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6316 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6317 arg0
= save_expr (arg0
);
6318 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6322 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6323 real_maxval (&max
, neg
, mode
);
6324 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6325 arg0
, build_real (TREE_TYPE (arg0
), max
));
6328 /* x < +Inf is always equal to x <= DBL_MAX. */
6329 real_maxval (&max
, neg
, mode
);
6330 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6331 arg0
, build_real (TREE_TYPE (arg0
), max
));
6334 /* x != +Inf is always equal to !(x > DBL_MAX). */
6335 real_maxval (&max
, neg
, mode
);
6336 if (! HONOR_NANS (mode
))
6337 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6338 arg0
, build_real (TREE_TYPE (arg0
), max
));
6340 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6341 arg0
, build_real (TREE_TYPE (arg0
), max
));
6342 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6351 /* Subroutine of fold() that optimizes comparisons of a division by
6352 a nonzero integer constant against an integer constant, i.e.
6355 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6356 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6357 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6359 The function returns the constant folded tree if a simplification
6360 can be made, and NULL_TREE otherwise. */
6363 fold_div_compare (location_t loc
,
6364 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6366 tree prod
, tmp
, hi
, lo
;
6367 tree arg00
= TREE_OPERAND (arg0
, 0);
6368 tree arg01
= TREE_OPERAND (arg0
, 1);
6369 signop sign
= TYPE_SIGN (TREE_TYPE (arg0
));
6370 bool neg_overflow
= false;
6373 /* We have to do this the hard way to detect unsigned overflow.
6374 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6375 wide_int val
= wi::mul (arg01
, arg1
, sign
, &overflow
);
6376 prod
= force_fit_type (TREE_TYPE (arg00
), val
, -1, overflow
);
6377 neg_overflow
= false;
6379 if (sign
== UNSIGNED
)
6381 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6382 build_int_cst (TREE_TYPE (arg01
), 1));
6385 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6386 val
= wi::add (prod
, tmp
, sign
, &overflow
);
6387 hi
= force_fit_type (TREE_TYPE (arg00
), val
,
6388 -1, overflow
| TREE_OVERFLOW (prod
));
6390 else if (tree_int_cst_sgn (arg01
) >= 0)
6392 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6393 build_int_cst (TREE_TYPE (arg01
), 1));
6394 switch (tree_int_cst_sgn (arg1
))
6397 neg_overflow
= true;
6398 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6403 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6408 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6418 /* A negative divisor reverses the relational operators. */
6419 code
= swap_tree_comparison (code
);
6421 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6422 build_int_cst (TREE_TYPE (arg01
), 1));
6423 switch (tree_int_cst_sgn (arg1
))
6426 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6431 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6436 neg_overflow
= true;
6437 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6449 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6450 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6451 if (TREE_OVERFLOW (hi
))
6452 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6453 if (TREE_OVERFLOW (lo
))
6454 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6455 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6458 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6459 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6460 if (TREE_OVERFLOW (hi
))
6461 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6462 if (TREE_OVERFLOW (lo
))
6463 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6464 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6467 if (TREE_OVERFLOW (lo
))
6469 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6470 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6472 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6475 if (TREE_OVERFLOW (hi
))
6477 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6478 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6480 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6483 if (TREE_OVERFLOW (hi
))
6485 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6486 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6488 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6491 if (TREE_OVERFLOW (lo
))
6493 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6494 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6496 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6506 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6507 equality/inequality test, then return a simplified form of the test
6508 using a sign testing. Otherwise return NULL. TYPE is the desired
6512 fold_single_bit_test_into_sign_test (location_t loc
,
6513 enum tree_code code
, tree arg0
, tree arg1
,
6516 /* If this is testing a single bit, we can optimize the test. */
6517 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6518 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6519 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6521 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6522 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6523 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6525 if (arg00
!= NULL_TREE
6526 /* This is only a win if casting to a signed type is cheap,
6527 i.e. when arg00's type is not a partial mode. */
6528 && TYPE_PRECISION (TREE_TYPE (arg00
))
6529 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00
))))
6531 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6532 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6534 fold_convert_loc (loc
, stype
, arg00
),
6535 build_int_cst (stype
, 0));
6542 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6543 equality/inequality test, then return a simplified form of
6544 the test using shifts and logical operations. Otherwise return
6545 NULL. TYPE is the desired result type. */
6548 fold_single_bit_test (location_t loc
, enum tree_code code
,
6549 tree arg0
, tree arg1
, tree result_type
)
6551 /* If this is testing a single bit, we can optimize the test. */
6552 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6553 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6554 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6556 tree inner
= TREE_OPERAND (arg0
, 0);
6557 tree type
= TREE_TYPE (arg0
);
6558 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6559 enum machine_mode operand_mode
= TYPE_MODE (type
);
6561 tree signed_type
, unsigned_type
, intermediate_type
;
6564 /* First, see if we can fold the single bit test into a sign-bit
6566 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6571 /* Otherwise we have (A & C) != 0 where C is a single bit,
6572 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6573 Similarly for (A & C) == 0. */
6575 /* If INNER is a right shift of a constant and it plus BITNUM does
6576 not overflow, adjust BITNUM and INNER. */
6577 if (TREE_CODE (inner
) == RSHIFT_EXPR
6578 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6579 && bitnum
< TYPE_PRECISION (type
)
6580 && wi::ltu_p (TREE_OPERAND (inner
, 1),
6581 TYPE_PRECISION (type
) - bitnum
))
6583 bitnum
+= tree_to_uhwi (TREE_OPERAND (inner
, 1));
6584 inner
= TREE_OPERAND (inner
, 0);
6587 /* If we are going to be able to omit the AND below, we must do our
6588 operations as unsigned. If we must use the AND, we have a choice.
6589 Normally unsigned is faster, but for some machines signed is. */
6590 #ifdef LOAD_EXTEND_OP
6591 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6592 && !flag_syntax_only
) ? 0 : 1;
6597 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6598 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6599 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6600 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6603 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6604 inner
, size_int (bitnum
));
6606 one
= build_int_cst (intermediate_type
, 1);
6608 if (code
== EQ_EXPR
)
6609 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6611 /* Put the AND last so it can combine with more things. */
6612 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6614 /* Make sure to return the proper type. */
6615 inner
= fold_convert_loc (loc
, result_type
, inner
);
6622 /* Check whether we are allowed to reorder operands arg0 and arg1,
6623 such that the evaluation of arg1 occurs before arg0. */
6626 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6628 if (! flag_evaluation_order
)
6630 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6632 return ! TREE_SIDE_EFFECTS (arg0
)
6633 && ! TREE_SIDE_EFFECTS (arg1
);
6636 /* Test whether it is preferable two swap two operands, ARG0 and
6637 ARG1, for example because ARG0 is an integer constant and ARG1
6638 isn't. If REORDER is true, only recommend swapping if we can
6639 evaluate the operands in reverse order. */
6642 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6644 STRIP_SIGN_NOPS (arg0
);
6645 STRIP_SIGN_NOPS (arg1
);
6647 if (TREE_CODE (arg1
) == INTEGER_CST
)
6649 if (TREE_CODE (arg0
) == INTEGER_CST
)
6652 if (TREE_CODE (arg1
) == REAL_CST
)
6654 if (TREE_CODE (arg0
) == REAL_CST
)
6657 if (TREE_CODE (arg1
) == FIXED_CST
)
6659 if (TREE_CODE (arg0
) == FIXED_CST
)
6662 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6664 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6667 if (TREE_CONSTANT (arg1
))
6669 if (TREE_CONSTANT (arg0
))
6672 if (optimize_function_for_size_p (cfun
))
6675 if (reorder
&& flag_evaluation_order
6676 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6679 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6680 for commutative and comparison operators. Ensuring a canonical
6681 form allows the optimizers to find additional redundancies without
6682 having to explicitly check for both orderings. */
6683 if (TREE_CODE (arg0
) == SSA_NAME
6684 && TREE_CODE (arg1
) == SSA_NAME
6685 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6688 /* Put SSA_NAMEs last. */
6689 if (TREE_CODE (arg1
) == SSA_NAME
)
6691 if (TREE_CODE (arg0
) == SSA_NAME
)
6694 /* Put variables last. */
6703 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6704 ARG0 is extended to a wider type. */
6707 fold_widened_comparison (location_t loc
, enum tree_code code
,
6708 tree type
, tree arg0
, tree arg1
)
6710 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6712 tree shorter_type
, outer_type
;
6716 if (arg0_unw
== arg0
)
6718 shorter_type
= TREE_TYPE (arg0_unw
);
6720 #ifdef HAVE_canonicalize_funcptr_for_compare
6721 /* Disable this optimization if we're casting a function pointer
6722 type on targets that require function pointer canonicalization. */
6723 if (HAVE_canonicalize_funcptr_for_compare
6724 && TREE_CODE (shorter_type
) == POINTER_TYPE
6725 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6729 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6732 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6734 /* If possible, express the comparison in the shorter mode. */
6735 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6736 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6737 && (TREE_TYPE (arg1_unw
) == shorter_type
6738 || ((TYPE_PRECISION (shorter_type
)
6739 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6740 && (TYPE_UNSIGNED (shorter_type
)
6741 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6742 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6743 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6744 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6745 && int_fits_type_p (arg1_unw
, shorter_type
))))
6746 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6747 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6749 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6750 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6751 || !int_fits_type_p (arg1_unw
, shorter_type
))
6754 /* If we are comparing with the integer that does not fit into the range
6755 of the shorter type, the result is known. */
6756 outer_type
= TREE_TYPE (arg1_unw
);
6757 min
= lower_bound_in_type (outer_type
, shorter_type
);
6758 max
= upper_bound_in_type (outer_type
, shorter_type
);
6760 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6762 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6769 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6774 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6780 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6782 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6787 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6789 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6798 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6799 ARG0 just the signedness is changed. */
6802 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6803 tree arg0
, tree arg1
)
6806 tree inner_type
, outer_type
;
6808 if (!CONVERT_EXPR_P (arg0
))
6811 outer_type
= TREE_TYPE (arg0
);
6812 arg0_inner
= TREE_OPERAND (arg0
, 0);
6813 inner_type
= TREE_TYPE (arg0_inner
);
6815 #ifdef HAVE_canonicalize_funcptr_for_compare
6816 /* Disable this optimization if we're casting a function pointer
6817 type on targets that require function pointer canonicalization. */
6818 if (HAVE_canonicalize_funcptr_for_compare
6819 && TREE_CODE (inner_type
) == POINTER_TYPE
6820 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6824 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6827 if (TREE_CODE (arg1
) != INTEGER_CST
6828 && !(CONVERT_EXPR_P (arg1
)
6829 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6832 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6837 if (POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6840 if (TREE_CODE (arg1
) == INTEGER_CST
)
6841 arg1
= force_fit_type (inner_type
, wi::to_widest (arg1
), 0,
6842 TREE_OVERFLOW (arg1
));
6844 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6846 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6849 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6850 step of the array. Reconstructs s and delta in the case of s *
6851 delta being an integer constant (and thus already folded). ADDR is
6852 the address. MULT is the multiplicative expression. If the
6853 function succeeds, the new address expression is returned.
6854 Otherwise NULL_TREE is returned. LOC is the location of the
6855 resulting expression. */
6858 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6860 tree s
, delta
, step
;
6861 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6866 /* Strip the nops that might be added when converting op1 to sizetype. */
6869 /* Canonicalize op1 into a possibly non-constant delta
6870 and an INTEGER_CST s. */
6871 if (TREE_CODE (op1
) == MULT_EXPR
)
6873 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6878 if (TREE_CODE (arg0
) == INTEGER_CST
)
6883 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6891 else if (TREE_CODE (op1
) == INTEGER_CST
)
6898 /* Simulate we are delta * 1. */
6900 s
= integer_one_node
;
6903 /* Handle &x.array the same as we would handle &x.array[0]. */
6904 if (TREE_CODE (ref
) == COMPONENT_REF
6905 && TREE_CODE (TREE_TYPE (ref
)) == ARRAY_TYPE
)
6909 /* Remember if this was a multi-dimensional array. */
6910 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6913 domain
= TYPE_DOMAIN (TREE_TYPE (ref
));
6916 itype
= TREE_TYPE (domain
);
6918 step
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref
)));
6919 if (TREE_CODE (step
) != INTEGER_CST
)
6924 if (! tree_int_cst_equal (step
, s
))
6929 /* Try if delta is a multiple of step. */
6930 tree tmp
= div_if_zero_remainder (op1
, step
);
6936 /* Only fold here if we can verify we do not overflow one
6937 dimension of a multi-dimensional array. */
6942 if (!TYPE_MIN_VALUE (domain
)
6943 || !TYPE_MAX_VALUE (domain
)
6944 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6947 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6948 fold_convert_loc (loc
, itype
,
6949 TYPE_MIN_VALUE (domain
)),
6950 fold_convert_loc (loc
, itype
, delta
));
6951 if (TREE_CODE (tmp
) != INTEGER_CST
6952 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6956 /* We found a suitable component reference. */
6958 pref
= TREE_OPERAND (addr
, 0);
6959 ret
= copy_node (pref
);
6960 SET_EXPR_LOCATION (ret
, loc
);
6962 ret
= build4_loc (loc
, ARRAY_REF
, TREE_TYPE (TREE_TYPE (ref
)), ret
,
6964 (loc
, PLUS_EXPR
, itype
,
6965 fold_convert_loc (loc
, itype
,
6967 (TYPE_DOMAIN (TREE_TYPE (ref
)))),
6968 fold_convert_loc (loc
, itype
, delta
)),
6969 NULL_TREE
, NULL_TREE
);
6970 return build_fold_addr_expr_loc (loc
, ret
);
6975 for (;; ref
= TREE_OPERAND (ref
, 0))
6977 if (TREE_CODE (ref
) == ARRAY_REF
)
6981 /* Remember if this was a multi-dimensional array. */
6982 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6985 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6988 itype
= TREE_TYPE (domain
);
6990 step
= array_ref_element_size (ref
);
6991 if (TREE_CODE (step
) != INTEGER_CST
)
6996 if (! tree_int_cst_equal (step
, s
))
7001 /* Try if delta is a multiple of step. */
7002 tree tmp
= div_if_zero_remainder (op1
, step
);
7008 /* Only fold here if we can verify we do not overflow one
7009 dimension of a multi-dimensional array. */
7014 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
7015 || !TYPE_MAX_VALUE (domain
)
7016 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
7019 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
7020 fold_convert_loc (loc
, itype
,
7021 TREE_OPERAND (ref
, 1)),
7022 fold_convert_loc (loc
, itype
, delta
));
7024 || TREE_CODE (tmp
) != INTEGER_CST
7025 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
7034 if (!handled_component_p (ref
))
7038 /* We found the suitable array reference. So copy everything up to it,
7039 and replace the index. */
7041 pref
= TREE_OPERAND (addr
, 0);
7042 ret
= copy_node (pref
);
7043 SET_EXPR_LOCATION (ret
, loc
);
7048 pref
= TREE_OPERAND (pref
, 0);
7049 TREE_OPERAND (pos
, 0) = copy_node (pref
);
7050 pos
= TREE_OPERAND (pos
, 0);
7053 TREE_OPERAND (pos
, 1)
7054 = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
7055 fold_convert_loc (loc
, itype
, TREE_OPERAND (pos
, 1)),
7056 fold_convert_loc (loc
, itype
, delta
));
7057 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
7061 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7062 means A >= Y && A != MAX, but in this case we know that
7063 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7066 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7068 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7070 if (TREE_CODE (bound
) == LT_EXPR
)
7071 a
= TREE_OPERAND (bound
, 0);
7072 else if (TREE_CODE (bound
) == GT_EXPR
)
7073 a
= TREE_OPERAND (bound
, 1);
7077 typea
= TREE_TYPE (a
);
7078 if (!INTEGRAL_TYPE_P (typea
)
7079 && !POINTER_TYPE_P (typea
))
7082 if (TREE_CODE (ineq
) == LT_EXPR
)
7084 a1
= TREE_OPERAND (ineq
, 1);
7085 y
= TREE_OPERAND (ineq
, 0);
7087 else if (TREE_CODE (ineq
) == GT_EXPR
)
7089 a1
= TREE_OPERAND (ineq
, 0);
7090 y
= TREE_OPERAND (ineq
, 1);
7095 if (TREE_TYPE (a1
) != typea
)
7098 if (POINTER_TYPE_P (typea
))
7100 /* Convert the pointer types into integer before taking the difference. */
7101 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7102 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7103 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7106 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7108 if (!diff
|| !integer_onep (diff
))
7111 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7114 /* Fold a sum or difference of at least one multiplication.
7115 Returns the folded tree or NULL if no simplification could be made. */
7118 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7119 tree arg0
, tree arg1
)
7121 tree arg00
, arg01
, arg10
, arg11
;
7122 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7124 /* (A * C) +- (B * C) -> (A+-B) * C.
7125 (A * C) +- A -> A * (C+-1).
7126 We are most concerned about the case where C is a constant,
7127 but other combinations show up during loop reduction. Since
7128 it is not difficult, try all four possibilities. */
7130 if (TREE_CODE (arg0
) == MULT_EXPR
)
7132 arg00
= TREE_OPERAND (arg0
, 0);
7133 arg01
= TREE_OPERAND (arg0
, 1);
7135 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7137 arg00
= build_one_cst (type
);
7142 /* We cannot generate constant 1 for fract. */
7143 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7146 arg01
= build_one_cst (type
);
7148 if (TREE_CODE (arg1
) == MULT_EXPR
)
7150 arg10
= TREE_OPERAND (arg1
, 0);
7151 arg11
= TREE_OPERAND (arg1
, 1);
7153 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7155 arg10
= build_one_cst (type
);
7156 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7157 the purpose of this canonicalization. */
7158 if (wi::neg_p (arg1
, TYPE_SIGN (TREE_TYPE (arg1
)))
7159 && negate_expr_p (arg1
)
7160 && code
== PLUS_EXPR
)
7162 arg11
= negate_expr (arg1
);
7170 /* We cannot generate constant 1 for fract. */
7171 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7174 arg11
= build_one_cst (type
);
7178 if (operand_equal_p (arg01
, arg11
, 0))
7179 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7180 else if (operand_equal_p (arg00
, arg10
, 0))
7181 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7182 else if (operand_equal_p (arg00
, arg11
, 0))
7183 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7184 else if (operand_equal_p (arg01
, arg10
, 0))
7185 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7187 /* No identical multiplicands; see if we can find a common
7188 power-of-two factor in non-power-of-two multiplies. This
7189 can help in multi-dimensional array access. */
7190 else if (tree_fits_shwi_p (arg01
)
7191 && tree_fits_shwi_p (arg11
))
7193 HOST_WIDE_INT int01
, int11
, tmp
;
7196 int01
= tree_to_shwi (arg01
);
7197 int11
= tree_to_shwi (arg11
);
7199 /* Move min of absolute values to int11. */
7200 if (absu_hwi (int01
) < absu_hwi (int11
))
7202 tmp
= int01
, int01
= int11
, int11
= tmp
;
7203 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7210 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7211 /* The remainder should not be a constant, otherwise we
7212 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7213 increased the number of multiplications necessary. */
7214 && TREE_CODE (arg10
) != INTEGER_CST
)
7216 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7217 build_int_cst (TREE_TYPE (arg00
),
7222 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7227 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7228 fold_build2_loc (loc
, code
, type
,
7229 fold_convert_loc (loc
, type
, alt0
),
7230 fold_convert_loc (loc
, type
, alt1
)),
7231 fold_convert_loc (loc
, type
, same
));
7236 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7237 specified by EXPR into the buffer PTR of length LEN bytes.
7238 Return the number of bytes placed in the buffer, or zero
7242 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7244 tree type
= TREE_TYPE (expr
);
7245 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7246 int byte
, offset
, word
, words
;
7247 unsigned char value
;
7249 if (total_bytes
> len
)
7251 words
= total_bytes
/ UNITS_PER_WORD
;
7253 for (byte
= 0; byte
< total_bytes
; byte
++)
7255 int bitpos
= byte
* BITS_PER_UNIT
;
7256 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7258 value
= wi::extract_uhwi (wi::to_widest (expr
), bitpos
, BITS_PER_UNIT
);
7260 if (total_bytes
> UNITS_PER_WORD
)
7262 word
= byte
/ UNITS_PER_WORD
;
7263 if (WORDS_BIG_ENDIAN
)
7264 word
= (words
- 1) - word
;
7265 offset
= word
* UNITS_PER_WORD
;
7266 if (BYTES_BIG_ENDIAN
)
7267 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7269 offset
+= byte
% UNITS_PER_WORD
;
7272 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7273 ptr
[offset
] = value
;
7279 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7280 specified by EXPR into the buffer PTR of length LEN bytes.
7281 Return the number of bytes placed in the buffer, or zero
7285 native_encode_fixed (const_tree expr
, unsigned char *ptr
, int len
)
7287 tree type
= TREE_TYPE (expr
);
7288 enum machine_mode mode
= TYPE_MODE (type
);
7289 int total_bytes
= GET_MODE_SIZE (mode
);
7290 FIXED_VALUE_TYPE value
;
7291 tree i_value
, i_type
;
7293 if (total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7296 i_type
= lang_hooks
.types
.type_for_size (GET_MODE_BITSIZE (mode
), 1);
7298 if (NULL_TREE
== i_type
7299 || TYPE_PRECISION (i_type
) != total_bytes
)
7302 value
= TREE_FIXED_CST (expr
);
7303 i_value
= double_int_to_tree (i_type
, value
.data
);
7305 return native_encode_int (i_value
, ptr
, len
);
7309 /* Subroutine of native_encode_expr. Encode the REAL_CST
7310 specified by EXPR into the buffer PTR of length LEN bytes.
7311 Return the number of bytes placed in the buffer, or zero
7315 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7317 tree type
= TREE_TYPE (expr
);
7318 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7319 int byte
, offset
, word
, words
, bitpos
;
7320 unsigned char value
;
7322 /* There are always 32 bits in each long, no matter the size of
7323 the hosts long. We handle floating point representations with
7327 if (total_bytes
> len
)
7329 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7331 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7333 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7334 bitpos
+= BITS_PER_UNIT
)
7336 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7337 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7339 if (UNITS_PER_WORD
< 4)
7341 word
= byte
/ UNITS_PER_WORD
;
7342 if (WORDS_BIG_ENDIAN
)
7343 word
= (words
- 1) - word
;
7344 offset
= word
* UNITS_PER_WORD
;
7345 if (BYTES_BIG_ENDIAN
)
7346 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7348 offset
+= byte
% UNITS_PER_WORD
;
7351 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7352 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7357 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7358 specified by EXPR into the buffer PTR of length LEN bytes.
7359 Return the number of bytes placed in the buffer, or zero
7363 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7368 part
= TREE_REALPART (expr
);
7369 rsize
= native_encode_expr (part
, ptr
, len
);
7372 part
= TREE_IMAGPART (expr
);
7373 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7376 return rsize
+ isize
;
7380 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7381 specified by EXPR into the buffer PTR of length LEN bytes.
7382 Return the number of bytes placed in the buffer, or zero
7386 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7393 count
= VECTOR_CST_NELTS (expr
);
7394 itype
= TREE_TYPE (TREE_TYPE (expr
));
7395 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7396 for (i
= 0; i
< count
; i
++)
7398 elem
= VECTOR_CST_ELT (expr
, i
);
7399 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7407 /* Subroutine of native_encode_expr. Encode the STRING_CST
7408 specified by EXPR into the buffer PTR of length LEN bytes.
7409 Return the number of bytes placed in the buffer, or zero
7413 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
7415 tree type
= TREE_TYPE (expr
);
7416 HOST_WIDE_INT total_bytes
;
7418 if (TREE_CODE (type
) != ARRAY_TYPE
7419 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7420 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7421 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type
)))
7423 total_bytes
= tree_to_shwi (TYPE_SIZE_UNIT (type
));
7424 if (total_bytes
> len
)
7426 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
7428 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
7429 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
7430 total_bytes
- TREE_STRING_LENGTH (expr
));
7433 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
7438 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7439 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7440 buffer PTR of length LEN bytes. Return the number of bytes
7441 placed in the buffer, or zero upon failure. */
7444 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7446 switch (TREE_CODE (expr
))
7449 return native_encode_int (expr
, ptr
, len
);
7452 return native_encode_real (expr
, ptr
, len
);
7455 return native_encode_fixed (expr
, ptr
, len
);
7458 return native_encode_complex (expr
, ptr
, len
);
7461 return native_encode_vector (expr
, ptr
, len
);
7464 return native_encode_string (expr
, ptr
, len
);
7472 /* Subroutine of native_interpret_expr. Interpret the contents of
7473 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7474 If the buffer cannot be interpreted, return NULL_TREE. */
7477 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7479 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7481 if (total_bytes
> len
7482 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7485 wide_int result
= wi::from_buffer (ptr
, total_bytes
);
7487 return wide_int_to_tree (type
, result
);
7491 /* Subroutine of native_interpret_expr. Interpret the contents of
7492 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7493 If the buffer cannot be interpreted, return NULL_TREE. */
7496 native_interpret_fixed (tree type
, const unsigned char *ptr
, int len
)
7498 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7500 FIXED_VALUE_TYPE fixed_value
;
7502 if (total_bytes
> len
7503 || total_bytes
* BITS_PER_UNIT
> HOST_BITS_PER_DOUBLE_INT
)
7506 result
= double_int::from_buffer (ptr
, total_bytes
);
7507 fixed_value
= fixed_from_double_int (result
, TYPE_MODE (type
));
7509 return build_fixed (type
, fixed_value
);
7513 /* Subroutine of native_interpret_expr. Interpret the contents of
7514 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7515 If the buffer cannot be interpreted, return NULL_TREE. */
7518 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7520 enum machine_mode mode
= TYPE_MODE (type
);
7521 int total_bytes
= GET_MODE_SIZE (mode
);
7522 int byte
, offset
, word
, words
, bitpos
;
7523 unsigned char value
;
7524 /* There are always 32 bits in each long, no matter the size of
7525 the hosts long. We handle floating point representations with
7530 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7531 if (total_bytes
> len
|| total_bytes
> 24)
7533 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7535 memset (tmp
, 0, sizeof (tmp
));
7536 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7537 bitpos
+= BITS_PER_UNIT
)
7539 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7540 if (UNITS_PER_WORD
< 4)
7542 word
= byte
/ UNITS_PER_WORD
;
7543 if (WORDS_BIG_ENDIAN
)
7544 word
= (words
- 1) - word
;
7545 offset
= word
* UNITS_PER_WORD
;
7546 if (BYTES_BIG_ENDIAN
)
7547 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7549 offset
+= byte
% UNITS_PER_WORD
;
7552 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7553 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7555 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7558 real_from_target (&r
, tmp
, mode
);
7559 return build_real (type
, r
);
7563 /* Subroutine of native_interpret_expr. Interpret the contents of
7564 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7565 If the buffer cannot be interpreted, return NULL_TREE. */
7568 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7570 tree etype
, rpart
, ipart
;
7573 etype
= TREE_TYPE (type
);
7574 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7577 rpart
= native_interpret_expr (etype
, ptr
, size
);
7580 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7583 return build_complex (type
, rpart
, ipart
);
7587 /* Subroutine of native_interpret_expr. Interpret the contents of
7588 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7589 If the buffer cannot be interpreted, return NULL_TREE. */
7592 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7598 etype
= TREE_TYPE (type
);
7599 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7600 count
= TYPE_VECTOR_SUBPARTS (type
);
7601 if (size
* count
> len
)
7604 elements
= XALLOCAVEC (tree
, count
);
7605 for (i
= count
- 1; i
>= 0; i
--)
7607 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7612 return build_vector (type
, elements
);
7616 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7617 the buffer PTR of length LEN as a constant of type TYPE. For
7618 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7619 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7620 return NULL_TREE. */
7623 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7625 switch (TREE_CODE (type
))
7631 case REFERENCE_TYPE
:
7632 return native_interpret_int (type
, ptr
, len
);
7635 return native_interpret_real (type
, ptr
, len
);
7637 case FIXED_POINT_TYPE
:
7638 return native_interpret_fixed (type
, ptr
, len
);
7641 return native_interpret_complex (type
, ptr
, len
);
7644 return native_interpret_vector (type
, ptr
, len
);
7651 /* Returns true if we can interpret the contents of a native encoding
7655 can_native_interpret_type_p (tree type
)
7657 switch (TREE_CODE (type
))
7663 case REFERENCE_TYPE
:
7664 case FIXED_POINT_TYPE
:
7674 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7675 TYPE at compile-time. If we're unable to perform the conversion
7676 return NULL_TREE. */
7679 fold_view_convert_expr (tree type
, tree expr
)
7681 /* We support up to 512-bit values (for V8DFmode). */
7682 unsigned char buffer
[64];
7685 /* Check that the host and target are sane. */
7686 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7689 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7693 return native_interpret_expr (type
, buffer
, len
);
7696 /* Build an expression for the address of T. Folds away INDIRECT_REF
7697 to avoid confusing the gimplify process. */
7700 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7702 /* The size of the object is not relevant when talking about its address. */
7703 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7704 t
= TREE_OPERAND (t
, 0);
7706 if (TREE_CODE (t
) == INDIRECT_REF
)
7708 t
= TREE_OPERAND (t
, 0);
7710 if (TREE_TYPE (t
) != ptrtype
)
7711 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7713 else if (TREE_CODE (t
) == MEM_REF
7714 && integer_zerop (TREE_OPERAND (t
, 1)))
7715 return TREE_OPERAND (t
, 0);
7716 else if (TREE_CODE (t
) == MEM_REF
7717 && TREE_CODE (TREE_OPERAND (t
, 0)) == INTEGER_CST
)
7718 return fold_binary (POINTER_PLUS_EXPR
, ptrtype
,
7719 TREE_OPERAND (t
, 0),
7720 convert_to_ptrofftype (TREE_OPERAND (t
, 1)));
7721 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7723 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7725 if (TREE_TYPE (t
) != ptrtype
)
7726 t
= fold_convert_loc (loc
, ptrtype
, t
);
7729 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7734 /* Build an expression for the address of T. */
7737 build_fold_addr_expr_loc (location_t loc
, tree t
)
7739 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7741 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7744 static bool vec_cst_ctor_to_array (tree
, tree
*);
7746 /* Fold a unary expression of code CODE and type TYPE with operand
7747 OP0. Return the folded expression if folding is successful.
7748 Otherwise, return NULL_TREE. */
7751 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7755 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7757 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7758 && TREE_CODE_LENGTH (code
) == 1);
7763 if (CONVERT_EXPR_CODE_P (code
)
7764 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7766 /* Don't use STRIP_NOPS, because signedness of argument type
7768 STRIP_SIGN_NOPS (arg0
);
7772 /* Strip any conversions that don't change the mode. This
7773 is safe for every expression, except for a comparison
7774 expression because its signedness is derived from its
7777 Note that this is done as an internal manipulation within
7778 the constant folder, in order to find the simplest
7779 representation of the arguments so that their form can be
7780 studied. In any cases, the appropriate type conversions
7781 should be put back in the tree that will get out of the
7787 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7789 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7790 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7791 fold_build1_loc (loc
, code
, type
,
7792 fold_convert_loc (loc
, TREE_TYPE (op0
),
7793 TREE_OPERAND (arg0
, 1))));
7794 else if (TREE_CODE (arg0
) == COND_EXPR
)
7796 tree arg01
= TREE_OPERAND (arg0
, 1);
7797 tree arg02
= TREE_OPERAND (arg0
, 2);
7798 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7799 arg01
= fold_build1_loc (loc
, code
, type
,
7800 fold_convert_loc (loc
,
7801 TREE_TYPE (op0
), arg01
));
7802 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7803 arg02
= fold_build1_loc (loc
, code
, type
,
7804 fold_convert_loc (loc
,
7805 TREE_TYPE (op0
), arg02
));
7806 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7809 /* If this was a conversion, and all we did was to move into
7810 inside the COND_EXPR, bring it back out. But leave it if
7811 it is a conversion from integer to integer and the
7812 result precision is no wider than a word since such a
7813 conversion is cheap and may be optimized away by combine,
7814 while it couldn't if it were outside the COND_EXPR. Then return
7815 so we don't get into an infinite recursion loop taking the
7816 conversion out and then back in. */
7818 if ((CONVERT_EXPR_CODE_P (code
)
7819 || code
== NON_LVALUE_EXPR
)
7820 && TREE_CODE (tem
) == COND_EXPR
7821 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7822 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7823 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7824 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7825 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7826 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7827 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7829 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7830 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7831 || flag_syntax_only
))
7832 tem
= build1_loc (loc
, code
, type
,
7834 TREE_TYPE (TREE_OPERAND
7835 (TREE_OPERAND (tem
, 1), 0)),
7836 TREE_OPERAND (tem
, 0),
7837 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7838 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7847 /* Re-association barriers around constants and other re-association
7848 barriers can be removed. */
7849 if (CONSTANT_CLASS_P (op0
)
7850 || TREE_CODE (op0
) == PAREN_EXPR
)
7851 return fold_convert_loc (loc
, type
, op0
);
7854 case NON_LVALUE_EXPR
:
7855 if (!maybe_lvalue_p (op0
))
7856 return fold_convert_loc (loc
, type
, op0
);
7861 case FIX_TRUNC_EXPR
:
7862 if (TREE_TYPE (op0
) == type
)
7865 if (COMPARISON_CLASS_P (op0
))
7867 /* If we have (type) (a CMP b) and type is an integral type, return
7868 new expression involving the new type. Canonicalize
7869 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7871 Do not fold the result as that would not simplify further, also
7872 folding again results in recursions. */
7873 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7874 return build2_loc (loc
, TREE_CODE (op0
), type
,
7875 TREE_OPERAND (op0
, 0),
7876 TREE_OPERAND (op0
, 1));
7877 else if (!INTEGRAL_TYPE_P (type
) && !VOID_TYPE_P (type
)
7878 && TREE_CODE (type
) != VECTOR_TYPE
)
7879 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7880 constant_boolean_node (true, type
),
7881 constant_boolean_node (false, type
));
7884 /* Handle cases of two conversions in a row. */
7885 if (CONVERT_EXPR_P (op0
))
7887 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7888 tree inter_type
= TREE_TYPE (op0
);
7889 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7890 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7891 int inside_float
= FLOAT_TYPE_P (inside_type
);
7892 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7893 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7894 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7895 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7896 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7897 int inter_float
= FLOAT_TYPE_P (inter_type
);
7898 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7899 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7900 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7901 int final_int
= INTEGRAL_TYPE_P (type
);
7902 int final_ptr
= POINTER_TYPE_P (type
);
7903 int final_float
= FLOAT_TYPE_P (type
);
7904 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7905 unsigned int final_prec
= TYPE_PRECISION (type
);
7906 int final_unsignedp
= TYPE_UNSIGNED (type
);
7908 /* In addition to the cases of two conversions in a row
7909 handled below, if we are converting something to its own
7910 type via an object of identical or wider precision, neither
7911 conversion is needed. */
7912 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7913 && (((inter_int
|| inter_ptr
) && final_int
)
7914 || (inter_float
&& final_float
))
7915 && inter_prec
>= final_prec
)
7916 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7918 /* Likewise, if the intermediate and initial types are either both
7919 float or both integer, we don't need the middle conversion if the
7920 former is wider than the latter and doesn't change the signedness
7921 (for integers). Avoid this if the final type is a pointer since
7922 then we sometimes need the middle conversion. Likewise if the
7923 final type has a precision not equal to the size of its mode. */
7924 if (((inter_int
&& inside_int
)
7925 || (inter_float
&& inside_float
)
7926 || (inter_vec
&& inside_vec
))
7927 && inter_prec
>= inside_prec
7928 && (inter_float
|| inter_vec
7929 || inter_unsignedp
== inside_unsignedp
)
7930 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7931 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7933 && (! final_vec
|| inter_prec
== inside_prec
))
7934 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7936 /* If we have a sign-extension of a zero-extended value, we can
7937 replace that by a single zero-extension. Likewise if the
7938 final conversion does not change precision we can drop the
7939 intermediate conversion. */
7940 if (inside_int
&& inter_int
&& final_int
7941 && ((inside_prec
< inter_prec
&& inter_prec
< final_prec
7942 && inside_unsignedp
&& !inter_unsignedp
)
7943 || final_prec
== inter_prec
))
7944 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7946 /* Two conversions in a row are not needed unless:
7947 - some conversion is floating-point (overstrict for now), or
7948 - some conversion is a vector (overstrict for now), or
7949 - the intermediate type is narrower than both initial and
7951 - the intermediate type and innermost type differ in signedness,
7952 and the outermost type is wider than the intermediate, or
7953 - the initial type is a pointer type and the precisions of the
7954 intermediate and final types differ, or
7955 - the final type is a pointer type and the precisions of the
7956 initial and intermediate types differ. */
7957 if (! inside_float
&& ! inter_float
&& ! final_float
7958 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7959 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7960 && ! (inside_int
&& inter_int
7961 && inter_unsignedp
!= inside_unsignedp
7962 && inter_prec
< final_prec
)
7963 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7964 == (final_unsignedp
&& final_prec
> inter_prec
))
7965 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7966 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7967 && ! (final_prec
!= GET_MODE_PRECISION (TYPE_MODE (type
))
7968 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7969 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7972 /* Handle (T *)&A.B.C for A being of type T and B and C
7973 living at offset zero. This occurs frequently in
7974 C++ upcasting and then accessing the base. */
7975 if (TREE_CODE (op0
) == ADDR_EXPR
7976 && POINTER_TYPE_P (type
)
7977 && handled_component_p (TREE_OPERAND (op0
, 0)))
7979 HOST_WIDE_INT bitsize
, bitpos
;
7981 enum machine_mode mode
;
7982 int unsignedp
, volatilep
;
7983 tree base
= TREE_OPERAND (op0
, 0);
7984 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7985 &mode
, &unsignedp
, &volatilep
, false);
7986 /* If the reference was to a (constant) zero offset, we can use
7987 the address of the base if it has the same base type
7988 as the result type and the pointer type is unqualified. */
7989 if (! offset
&& bitpos
== 0
7990 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7991 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7992 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7993 return fold_convert_loc (loc
, type
,
7994 build_fold_addr_expr_loc (loc
, base
));
7997 if (TREE_CODE (op0
) == MODIFY_EXPR
7998 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7999 /* Detect assigning a bitfield. */
8000 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
8002 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
8004 /* Don't leave an assignment inside a conversion
8005 unless assigning a bitfield. */
8006 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
8007 /* First do the assignment, then return converted constant. */
8008 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
8009 TREE_NO_WARNING (tem
) = 1;
8010 TREE_USED (tem
) = 1;
8014 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8015 constants (if x has signed type, the sign bit cannot be set
8016 in c). This folds extension into the BIT_AND_EXPR.
8017 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8018 very likely don't have maximal range for their precision and this
8019 transformation effectively doesn't preserve non-maximal ranges. */
8020 if (TREE_CODE (type
) == INTEGER_TYPE
8021 && TREE_CODE (op0
) == BIT_AND_EXPR
8022 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
8024 tree and_expr
= op0
;
8025 tree and0
= TREE_OPERAND (and_expr
, 0);
8026 tree and1
= TREE_OPERAND (and_expr
, 1);
8029 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
8030 || (TYPE_PRECISION (type
)
8031 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
8033 else if (TYPE_PRECISION (TREE_TYPE (and1
))
8034 <= HOST_BITS_PER_WIDE_INT
8035 && tree_fits_uhwi_p (and1
))
8037 unsigned HOST_WIDE_INT cst
;
8039 cst
= tree_to_uhwi (and1
);
8040 cst
&= HOST_WIDE_INT_M1U
8041 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
8042 change
= (cst
== 0);
8043 #ifdef LOAD_EXTEND_OP
8045 && !flag_syntax_only
8046 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
8049 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
8050 and0
= fold_convert_loc (loc
, uns
, and0
);
8051 and1
= fold_convert_loc (loc
, uns
, and1
);
8057 tem
= force_fit_type (type
, wi::to_widest (and1
), 0,
8058 TREE_OVERFLOW (and1
));
8059 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
8060 fold_convert_loc (loc
, type
, and0
), tem
);
8064 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8065 when one of the new casts will fold away. Conservatively we assume
8066 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8067 if (POINTER_TYPE_P (type
)
8068 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8069 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
8070 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8071 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
8072 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
8074 tree arg00
= TREE_OPERAND (arg0
, 0);
8075 tree arg01
= TREE_OPERAND (arg0
, 1);
8077 return fold_build_pointer_plus_loc
8078 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
8081 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8082 of the same precision, and X is an integer type not narrower than
8083 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8084 if (INTEGRAL_TYPE_P (type
)
8085 && TREE_CODE (op0
) == BIT_NOT_EXPR
8086 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8087 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
8088 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8090 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
8091 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
8092 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
8093 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
8094 fold_convert_loc (loc
, type
, tem
));
8097 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8098 type of X and Y (integer types only). */
8099 if (INTEGRAL_TYPE_P (type
)
8100 && TREE_CODE (op0
) == MULT_EXPR
8101 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8102 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8104 /* Be careful not to introduce new overflows. */
8106 if (TYPE_OVERFLOW_WRAPS (type
))
8109 mult_type
= unsigned_type_for (type
);
8111 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8113 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8114 fold_convert_loc (loc
, mult_type
,
8115 TREE_OPERAND (op0
, 0)),
8116 fold_convert_loc (loc
, mult_type
,
8117 TREE_OPERAND (op0
, 1)));
8118 return fold_convert_loc (loc
, type
, tem
);
8122 tem
= fold_convert_const (code
, type
, arg0
);
8123 return tem
? tem
: NULL_TREE
;
8125 case ADDR_SPACE_CONVERT_EXPR
:
8126 if (integer_zerop (arg0
))
8127 return fold_convert_const (code
, type
, arg0
);
8130 case FIXED_CONVERT_EXPR
:
8131 tem
= fold_convert_const (code
, type
, arg0
);
8132 return tem
? tem
: NULL_TREE
;
8134 case VIEW_CONVERT_EXPR
:
8135 if (TREE_TYPE (op0
) == type
)
8137 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8138 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8139 type
, TREE_OPERAND (op0
, 0));
8140 if (TREE_CODE (op0
) == MEM_REF
)
8141 return fold_build2_loc (loc
, MEM_REF
, type
,
8142 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8144 /* For integral conversions with the same precision or pointer
8145 conversions use a NOP_EXPR instead. */
8146 if ((INTEGRAL_TYPE_P (type
)
8147 || POINTER_TYPE_P (type
))
8148 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8149 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8150 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8151 return fold_convert_loc (loc
, type
, op0
);
8153 /* Strip inner integral conversions that do not change the precision. */
8154 if (CONVERT_EXPR_P (op0
)
8155 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8156 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8157 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8158 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8159 && (TYPE_PRECISION (TREE_TYPE (op0
))
8160 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8161 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8162 type
, TREE_OPERAND (op0
, 0));
8164 return fold_view_convert_expr (type
, op0
);
8167 tem
= fold_negate_expr (loc
, arg0
);
8169 return fold_convert_loc (loc
, type
, tem
);
8173 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8174 return fold_abs_const (arg0
, type
);
8175 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8176 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8177 /* Convert fabs((double)float) into (double)fabsf(float). */
8178 else if (TREE_CODE (arg0
) == NOP_EXPR
8179 && TREE_CODE (type
) == REAL_TYPE
)
8181 tree targ0
= strip_float_extensions (arg0
);
8183 return fold_convert_loc (loc
, type
,
8184 fold_build1_loc (loc
, ABS_EXPR
,
8188 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8189 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8191 else if (tree_expr_nonnegative_p (arg0
))
8194 /* Strip sign ops from argument. */
8195 if (TREE_CODE (type
) == REAL_TYPE
)
8197 tem
= fold_strip_sign_ops (arg0
);
8199 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8200 fold_convert_loc (loc
, type
, tem
));
8205 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8206 return fold_convert_loc (loc
, type
, arg0
);
8207 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8209 tree itype
= TREE_TYPE (type
);
8210 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8211 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8212 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8213 negate_expr (ipart
));
8215 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8217 tree itype
= TREE_TYPE (type
);
8218 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8219 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8220 return build_complex (type
, rpart
, negate_expr (ipart
));
8222 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8223 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8227 if (TREE_CODE (arg0
) == INTEGER_CST
)
8228 return fold_not_const (arg0
, type
);
8229 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8230 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8231 /* Convert ~ (-A) to A - 1. */
8232 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8233 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8234 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8235 build_int_cst (type
, 1));
8236 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8237 else if (INTEGRAL_TYPE_P (type
)
8238 && ((TREE_CODE (arg0
) == MINUS_EXPR
8239 && integer_onep (TREE_OPERAND (arg0
, 1)))
8240 || (TREE_CODE (arg0
) == PLUS_EXPR
8241 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8242 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8243 fold_convert_loc (loc
, type
,
8244 TREE_OPERAND (arg0
, 0)));
8245 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8246 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8247 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8248 fold_convert_loc (loc
, type
,
8249 TREE_OPERAND (arg0
, 0)))))
8250 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8251 fold_convert_loc (loc
, type
,
8252 TREE_OPERAND (arg0
, 1)));
8253 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8254 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8255 fold_convert_loc (loc
, type
,
8256 TREE_OPERAND (arg0
, 1)))))
8257 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8258 fold_convert_loc (loc
, type
,
8259 TREE_OPERAND (arg0
, 0)), tem
);
8260 /* Perform BIT_NOT_EXPR on each element individually. */
8261 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8265 unsigned count
= VECTOR_CST_NELTS (arg0
), i
;
8267 elements
= XALLOCAVEC (tree
, count
);
8268 for (i
= 0; i
< count
; i
++)
8270 elem
= VECTOR_CST_ELT (arg0
, i
);
8271 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8272 if (elem
== NULL_TREE
)
8277 return build_vector (type
, elements
);
8279 else if (COMPARISON_CLASS_P (arg0
)
8280 && (VECTOR_TYPE_P (type
)
8281 || (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) == 1)))
8283 tree op_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
8284 enum tree_code subcode
= invert_tree_comparison (TREE_CODE (arg0
),
8285 HONOR_NANS (TYPE_MODE (op_type
)));
8286 if (subcode
!= ERROR_MARK
)
8287 return build2_loc (loc
, subcode
, type
, TREE_OPERAND (arg0
, 0),
8288 TREE_OPERAND (arg0
, 1));
8294 case TRUTH_NOT_EXPR
:
8295 /* Note that the operand of this must be an int
8296 and its values must be 0 or 1.
8297 ("true" is a fixed value perhaps depending on the language,
8298 but we don't handle values other than 1 correctly yet.) */
8299 tem
= fold_truth_not_expr (loc
, arg0
);
8302 return fold_convert_loc (loc
, type
, tem
);
8305 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8306 return fold_convert_loc (loc
, type
, arg0
);
8307 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8308 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8309 TREE_OPERAND (arg0
, 1));
8310 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8311 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8312 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8314 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8315 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8316 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8317 TREE_OPERAND (arg0
, 0)),
8318 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8319 TREE_OPERAND (arg0
, 1)));
8320 return fold_convert_loc (loc
, type
, tem
);
8322 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8324 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8325 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8326 TREE_OPERAND (arg0
, 0));
8327 return fold_convert_loc (loc
, type
, tem
);
8329 if (TREE_CODE (arg0
) == CALL_EXPR
)
8331 tree fn
= get_callee_fndecl (arg0
);
8332 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8333 switch (DECL_FUNCTION_CODE (fn
))
8335 CASE_FLT_FN (BUILT_IN_CEXPI
):
8336 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8338 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8348 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8349 return build_zero_cst (type
);
8350 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8351 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8352 TREE_OPERAND (arg0
, 0));
8353 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8354 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8355 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8357 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8358 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8359 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8360 TREE_OPERAND (arg0
, 0)),
8361 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8362 TREE_OPERAND (arg0
, 1)));
8363 return fold_convert_loc (loc
, type
, tem
);
8365 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8367 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8368 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8369 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8371 if (TREE_CODE (arg0
) == CALL_EXPR
)
8373 tree fn
= get_callee_fndecl (arg0
);
8374 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8375 switch (DECL_FUNCTION_CODE (fn
))
8377 CASE_FLT_FN (BUILT_IN_CEXPI
):
8378 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8380 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8390 /* Fold *&X to X if X is an lvalue. */
8391 if (TREE_CODE (op0
) == ADDR_EXPR
)
8393 tree op00
= TREE_OPERAND (op0
, 0);
8394 if ((TREE_CODE (op00
) == VAR_DECL
8395 || TREE_CODE (op00
) == PARM_DECL
8396 || TREE_CODE (op00
) == RESULT_DECL
)
8397 && !TREE_READONLY (op00
))
8402 case VEC_UNPACK_LO_EXPR
:
8403 case VEC_UNPACK_HI_EXPR
:
8404 case VEC_UNPACK_FLOAT_LO_EXPR
:
8405 case VEC_UNPACK_FLOAT_HI_EXPR
:
8407 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8409 enum tree_code subcode
;
8411 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8412 if (TREE_CODE (arg0
) != VECTOR_CST
)
8415 elts
= XALLOCAVEC (tree
, nelts
* 2);
8416 if (!vec_cst_ctor_to_array (arg0
, elts
))
8419 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8420 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8423 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8426 subcode
= FLOAT_EXPR
;
8428 for (i
= 0; i
< nelts
; i
++)
8430 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8431 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8435 return build_vector (type
, elts
);
8438 case REDUC_MIN_EXPR
:
8439 case REDUC_MAX_EXPR
:
8440 case REDUC_PLUS_EXPR
:
8442 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8444 enum tree_code subcode
;
8446 if (TREE_CODE (op0
) != VECTOR_CST
)
8449 elts
= XALLOCAVEC (tree
, nelts
);
8450 if (!vec_cst_ctor_to_array (op0
, elts
))
8455 case REDUC_MIN_EXPR
: subcode
= MIN_EXPR
; break;
8456 case REDUC_MAX_EXPR
: subcode
= MAX_EXPR
; break;
8457 case REDUC_PLUS_EXPR
: subcode
= PLUS_EXPR
; break;
8458 default: gcc_unreachable ();
8461 for (i
= 1; i
< nelts
; i
++)
8463 elts
[0] = const_binop (subcode
, elts
[0], elts
[i
]);
8464 if (elts
[0] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[0]))
8466 elts
[i
] = build_zero_cst (TREE_TYPE (type
));
8469 return build_vector (type
, elts
);
8474 } /* switch (code) */
8478 /* If the operation was a conversion do _not_ mark a resulting constant
8479 with TREE_OVERFLOW if the original constant was not. These conversions
8480 have implementation defined behavior and retaining the TREE_OVERFLOW
8481 flag here would confuse later passes such as VRP. */
8483 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8484 tree type
, tree op0
)
8486 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8488 && TREE_CODE (res
) == INTEGER_CST
8489 && TREE_CODE (op0
) == INTEGER_CST
8490 && CONVERT_EXPR_CODE_P (code
))
8491 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8496 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8497 operands OP0 and OP1. LOC is the location of the resulting expression.
8498 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8499 Return the folded expression if folding is successful. Otherwise,
8500 return NULL_TREE. */
8502 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8503 tree arg0
, tree arg1
, tree op0
, tree op1
)
8507 /* We only do these simplifications if we are optimizing. */
8511 /* Check for things like (A || B) && (A || C). We can convert this
8512 to A || (B && C). Note that either operator can be any of the four
8513 truth and/or operations and the transformation will still be
8514 valid. Also note that we only care about order for the
8515 ANDIF and ORIF operators. If B contains side effects, this
8516 might change the truth-value of A. */
8517 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8518 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8519 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8520 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8521 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8522 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8524 tree a00
= TREE_OPERAND (arg0
, 0);
8525 tree a01
= TREE_OPERAND (arg0
, 1);
8526 tree a10
= TREE_OPERAND (arg1
, 0);
8527 tree a11
= TREE_OPERAND (arg1
, 1);
8528 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8529 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8530 && (code
== TRUTH_AND_EXPR
8531 || code
== TRUTH_OR_EXPR
));
8533 if (operand_equal_p (a00
, a10
, 0))
8534 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8535 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8536 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8537 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8538 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8539 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8540 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8541 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8543 /* This case if tricky because we must either have commutative
8544 operators or else A10 must not have side-effects. */
8546 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8547 && operand_equal_p (a01
, a11
, 0))
8548 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8549 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8553 /* See if we can build a range comparison. */
8554 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8557 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8558 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8560 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8562 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8565 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8566 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8568 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8570 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8573 /* Check for the possibility of merging component references. If our
8574 lhs is another similar operation, try to merge its rhs with our
8575 rhs. Then try to merge our lhs and rhs. */
8576 if (TREE_CODE (arg0
) == code
8577 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8578 TREE_OPERAND (arg0
, 1), arg1
)))
8579 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8581 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8584 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8585 && (code
== TRUTH_AND_EXPR
8586 || code
== TRUTH_ANDIF_EXPR
8587 || code
== TRUTH_OR_EXPR
8588 || code
== TRUTH_ORIF_EXPR
))
8590 enum tree_code ncode
, icode
;
8592 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8593 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8594 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8596 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8597 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8598 We don't want to pack more than two leafs to a non-IF AND/OR
8600 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8601 equal to IF-CODE, then we don't want to add right-hand operand.
8602 If the inner right-hand side of left-hand operand has
8603 side-effects, or isn't simple, then we can't add to it,
8604 as otherwise we might destroy if-sequence. */
8605 if (TREE_CODE (arg0
) == icode
8606 && simple_operand_p_2 (arg1
)
8607 /* Needed for sequence points to handle trappings, and
8609 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8611 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8613 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8616 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8617 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8618 else if (TREE_CODE (arg1
) == icode
8619 && simple_operand_p_2 (arg0
)
8620 /* Needed for sequence points to handle trappings, and
8622 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8624 tem
= fold_build2_loc (loc
, ncode
, type
,
8625 arg0
, TREE_OPERAND (arg1
, 0));
8626 return fold_build2_loc (loc
, icode
, type
, tem
,
8627 TREE_OPERAND (arg1
, 1));
8629 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8631 For sequence point consistancy, we need to check for trapping,
8632 and side-effects. */
8633 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8634 && simple_operand_p_2 (arg1
))
8635 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8641 /* Fold a binary expression of code CODE and type TYPE with operands
8642 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8643 Return the folded expression if folding is successful. Otherwise,
8644 return NULL_TREE. */
8647 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8649 enum tree_code compl_code
;
8651 if (code
== MIN_EXPR
)
8652 compl_code
= MAX_EXPR
;
8653 else if (code
== MAX_EXPR
)
8654 compl_code
= MIN_EXPR
;
8658 /* MIN (MAX (a, b), b) == b. */
8659 if (TREE_CODE (op0
) == compl_code
8660 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8661 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8663 /* MIN (MAX (b, a), b) == b. */
8664 if (TREE_CODE (op0
) == compl_code
8665 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8666 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8667 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8669 /* MIN (a, MAX (a, b)) == a. */
8670 if (TREE_CODE (op1
) == compl_code
8671 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8672 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8673 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8675 /* MIN (a, MAX (b, a)) == a. */
8676 if (TREE_CODE (op1
) == compl_code
8677 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8678 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8679 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8684 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8685 by changing CODE to reduce the magnitude of constants involved in
8686 ARG0 of the comparison.
8687 Returns a canonicalized comparison tree if a simplification was
8688 possible, otherwise returns NULL_TREE.
8689 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8690 valid if signed overflow is undefined. */
8693 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8694 tree arg0
, tree arg1
,
8695 bool *strict_overflow_p
)
8697 enum tree_code code0
= TREE_CODE (arg0
);
8698 tree t
, cst0
= NULL_TREE
;
8702 /* Match A +- CST code arg1 and CST code arg1. We can change the
8703 first form only if overflow is undefined. */
8704 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8705 /* In principle pointers also have undefined overflow behavior,
8706 but that causes problems elsewhere. */
8707 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8708 && (code0
== MINUS_EXPR
8709 || code0
== PLUS_EXPR
)
8710 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8711 || code0
== INTEGER_CST
))
8714 /* Identify the constant in arg0 and its sign. */
8715 if (code0
== INTEGER_CST
)
8718 cst0
= TREE_OPERAND (arg0
, 1);
8719 sgn0
= tree_int_cst_sgn (cst0
);
8721 /* Overflowed constants and zero will cause problems. */
8722 if (integer_zerop (cst0
)
8723 || TREE_OVERFLOW (cst0
))
8726 /* See if we can reduce the magnitude of the constant in
8727 arg0 by changing the comparison code. */
8728 if (code0
== INTEGER_CST
)
8730 /* CST <= arg1 -> CST-1 < arg1. */
8731 if (code
== LE_EXPR
&& sgn0
== 1)
8733 /* -CST < arg1 -> -CST-1 <= arg1. */
8734 else if (code
== LT_EXPR
&& sgn0
== -1)
8736 /* CST > arg1 -> CST-1 >= arg1. */
8737 else if (code
== GT_EXPR
&& sgn0
== 1)
8739 /* -CST >= arg1 -> -CST-1 > arg1. */
8740 else if (code
== GE_EXPR
&& sgn0
== -1)
8744 /* arg1 code' CST' might be more canonical. */
8749 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8751 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8753 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8754 else if (code
== GT_EXPR
8755 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8757 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8758 else if (code
== LE_EXPR
8759 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8761 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8762 else if (code
== GE_EXPR
8763 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8767 *strict_overflow_p
= true;
8770 /* Now build the constant reduced in magnitude. But not if that
8771 would produce one outside of its types range. */
8772 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8774 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8775 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8777 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8778 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8779 /* We cannot swap the comparison here as that would cause us to
8780 endlessly recurse. */
8783 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8784 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8785 if (code0
!= INTEGER_CST
)
8786 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8787 t
= fold_convert (TREE_TYPE (arg1
), t
);
8789 /* If swapping might yield to a more canonical form, do so. */
8791 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8793 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8796 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8797 overflow further. Try to decrease the magnitude of constants involved
8798 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8799 and put sole constants at the second argument position.
8800 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8803 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8804 tree arg0
, tree arg1
)
8807 bool strict_overflow_p
;
8808 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8809 "when reducing constant in comparison");
8811 /* Try canonicalization by simplifying arg0. */
8812 strict_overflow_p
= false;
8813 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8814 &strict_overflow_p
);
8817 if (strict_overflow_p
)
8818 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8822 /* Try canonicalization by simplifying arg1 using the swapped
8824 code
= swap_tree_comparison (code
);
8825 strict_overflow_p
= false;
8826 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8827 &strict_overflow_p
);
8828 if (t
&& strict_overflow_p
)
8829 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8833 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8834 space. This is used to avoid issuing overflow warnings for
8835 expressions like &p->x which can not wrap. */
8838 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8840 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8847 int precision
= TYPE_PRECISION (TREE_TYPE (base
));
8848 if (offset
== NULL_TREE
)
8849 wi_offset
= wi::zero (precision
);
8850 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8856 wide_int units
= wi::shwi (bitpos
/ BITS_PER_UNIT
, precision
);
8857 wide_int total
= wi::add (wi_offset
, units
, UNSIGNED
, &overflow
);
8861 if (!wi::fits_uhwi_p (total
))
8864 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8868 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8870 if (TREE_CODE (base
) == ADDR_EXPR
)
8872 HOST_WIDE_INT base_size
;
8874 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8875 if (base_size
> 0 && size
< base_size
)
8879 return total
.to_uhwi () > (unsigned HOST_WIDE_INT
) size
;
8882 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8883 kind INTEGER_CST. This makes sure to properly sign-extend the
8886 static HOST_WIDE_INT
8887 size_low_cst (const_tree t
)
8889 HOST_WIDE_INT w
= TREE_INT_CST_ELT (t
, 0);
8890 int prec
= TYPE_PRECISION (TREE_TYPE (t
));
8891 if (prec
< HOST_BITS_PER_WIDE_INT
)
8892 return sext_hwi (w
, prec
);
8896 /* Subroutine of fold_binary. This routine performs all of the
8897 transformations that are common to the equality/inequality
8898 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8899 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8900 fold_binary should call fold_binary. Fold a comparison with
8901 tree code CODE and type TYPE with operands OP0 and OP1. Return
8902 the folded comparison or NULL_TREE. */
8905 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8908 const bool equality_code
= (code
== EQ_EXPR
|| code
== NE_EXPR
);
8909 tree arg0
, arg1
, tem
;
8914 STRIP_SIGN_NOPS (arg0
);
8915 STRIP_SIGN_NOPS (arg1
);
8917 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8918 if (tem
!= NULL_TREE
)
8921 /* If one arg is a real or integer constant, put it last. */
8922 if (tree_swap_operands_p (arg0
, arg1
, true))
8923 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8925 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8926 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8927 && (equality_code
|| TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8928 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8929 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8930 && TREE_CODE (arg1
) == INTEGER_CST
8931 && !TREE_OVERFLOW (arg1
))
8933 const enum tree_code
8934 reverse_op
= TREE_CODE (arg0
) == PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
;
8935 tree const1
= TREE_OPERAND (arg0
, 1);
8936 tree const2
= fold_convert_loc (loc
, TREE_TYPE (const1
), arg1
);
8937 tree variable
= TREE_OPERAND (arg0
, 0);
8938 tree new_const
= int_const_binop (reverse_op
, const2
, const1
);
8940 /* If the constant operation overflowed this can be
8941 simplified as a comparison against INT_MAX/INT_MIN. */
8942 if (TREE_OVERFLOW (new_const
))
8944 int const1_sgn
= tree_int_cst_sgn (const1
);
8945 enum tree_code code2
= code
;
8947 /* Get the sign of the constant on the lhs if the
8948 operation were VARIABLE + CONST1. */
8949 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8950 const1_sgn
= -const1_sgn
;
8952 /* The sign of the constant determines if we overflowed
8953 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8954 Canonicalize to the INT_MIN overflow by swapping the comparison
8956 if (const1_sgn
== -1)
8957 code2
= swap_tree_comparison (code
);
8959 /* We now can look at the canonicalized case
8960 VARIABLE + 1 CODE2 INT_MIN
8961 and decide on the result. */
8968 omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8974 omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8983 fold_overflow_warning ("assuming signed overflow does not occur "
8984 "when changing X +- C1 cmp C2 to "
8986 WARN_STRICT_OVERFLOW_COMPARISON
);
8987 return fold_build2_loc (loc
, code
, type
, variable
, new_const
);
8991 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8992 if (TREE_CODE (arg0
) == MINUS_EXPR
8993 && (equality_code
|| TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
)))
8994 && integer_zerop (arg1
))
8997 fold_overflow_warning ("assuming signed overflow does not occur "
8998 "when changing X - Y cmp 0 to X cmp Y",
8999 WARN_STRICT_OVERFLOW_COMPARISON
);
9000 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
9001 TREE_OPERAND (arg0
, 1));
9004 /* For comparisons of pointers we can decompose it to a compile time
9005 comparison of the base objects and the offsets into the object.
9006 This requires at least one operand being an ADDR_EXPR or a
9007 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9008 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
9009 && (TREE_CODE (arg0
) == ADDR_EXPR
9010 || TREE_CODE (arg1
) == ADDR_EXPR
9011 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
9012 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
9014 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
9015 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
9016 enum machine_mode mode
;
9017 int volatilep
, unsignedp
;
9018 bool indirect_base0
= false, indirect_base1
= false;
9020 /* Get base and offset for the access. Strip ADDR_EXPR for
9021 get_inner_reference, but put it back by stripping INDIRECT_REF
9022 off the base object if possible. indirect_baseN will be true
9023 if baseN is not an address but refers to the object itself. */
9025 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9027 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
9028 &bitsize
, &bitpos0
, &offset0
, &mode
,
9029 &unsignedp
, &volatilep
, false);
9030 if (TREE_CODE (base0
) == INDIRECT_REF
)
9031 base0
= TREE_OPERAND (base0
, 0);
9033 indirect_base0
= true;
9035 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9037 base0
= TREE_OPERAND (arg0
, 0);
9038 STRIP_SIGN_NOPS (base0
);
9039 if (TREE_CODE (base0
) == ADDR_EXPR
)
9041 base0
= TREE_OPERAND (base0
, 0);
9042 indirect_base0
= true;
9044 offset0
= TREE_OPERAND (arg0
, 1);
9045 if (tree_fits_shwi_p (offset0
))
9047 HOST_WIDE_INT off
= size_low_cst (offset0
);
9048 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
9050 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
9052 bitpos0
= off
* BITS_PER_UNIT
;
9053 offset0
= NULL_TREE
;
9059 if (TREE_CODE (arg1
) == ADDR_EXPR
)
9061 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
9062 &bitsize
, &bitpos1
, &offset1
, &mode
,
9063 &unsignedp
, &volatilep
, false);
9064 if (TREE_CODE (base1
) == INDIRECT_REF
)
9065 base1
= TREE_OPERAND (base1
, 0);
9067 indirect_base1
= true;
9069 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
9071 base1
= TREE_OPERAND (arg1
, 0);
9072 STRIP_SIGN_NOPS (base1
);
9073 if (TREE_CODE (base1
) == ADDR_EXPR
)
9075 base1
= TREE_OPERAND (base1
, 0);
9076 indirect_base1
= true;
9078 offset1
= TREE_OPERAND (arg1
, 1);
9079 if (tree_fits_shwi_p (offset1
))
9081 HOST_WIDE_INT off
= size_low_cst (offset1
);
9082 if ((HOST_WIDE_INT
) (((unsigned HOST_WIDE_INT
) off
)
9084 / BITS_PER_UNIT
== (HOST_WIDE_INT
) off
)
9086 bitpos1
= off
* BITS_PER_UNIT
;
9087 offset1
= NULL_TREE
;
9092 /* A local variable can never be pointed to by
9093 the default SSA name of an incoming parameter. */
9094 if ((TREE_CODE (arg0
) == ADDR_EXPR
9096 && TREE_CODE (base0
) == VAR_DECL
9097 && auto_var_in_fn_p (base0
, current_function_decl
)
9099 && TREE_CODE (base1
) == SSA_NAME
9100 && SSA_NAME_IS_DEFAULT_DEF (base1
)
9101 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
)
9102 || (TREE_CODE (arg1
) == ADDR_EXPR
9104 && TREE_CODE (base1
) == VAR_DECL
9105 && auto_var_in_fn_p (base1
, current_function_decl
)
9107 && TREE_CODE (base0
) == SSA_NAME
9108 && SSA_NAME_IS_DEFAULT_DEF (base0
)
9109 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
))
9111 if (code
== NE_EXPR
)
9112 return constant_boolean_node (1, type
);
9113 else if (code
== EQ_EXPR
)
9114 return constant_boolean_node (0, type
);
9116 /* If we have equivalent bases we might be able to simplify. */
9117 else if (indirect_base0
== indirect_base1
9118 && operand_equal_p (base0
, base1
, 0))
9120 /* We can fold this expression to a constant if the non-constant
9121 offset parts are equal. */
9122 if ((offset0
== offset1
9123 || (offset0
&& offset1
9124 && operand_equal_p (offset0
, offset1
, 0)))
9127 || (indirect_base0
&& DECL_P (base0
))
9128 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9132 && bitpos0
!= bitpos1
9133 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9134 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9135 fold_overflow_warning (("assuming pointer wraparound does not "
9136 "occur when comparing P +- C1 with "
9138 WARN_STRICT_OVERFLOW_CONDITIONAL
);
9143 return constant_boolean_node (bitpos0
== bitpos1
, type
);
9145 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
9147 return constant_boolean_node (bitpos0
< bitpos1
, type
);
9149 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
9151 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
9153 return constant_boolean_node (bitpos0
> bitpos1
, type
);
9157 /* We can simplify the comparison to a comparison of the variable
9158 offset parts if the constant offset parts are equal.
9159 Be careful to use signed sizetype here because otherwise we
9160 mess with array offsets in the wrong way. This is possible
9161 because pointer arithmetic is restricted to retain within an
9162 object and overflow on pointer differences is undefined as of
9163 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9164 else if (bitpos0
== bitpos1
9166 || (indirect_base0
&& DECL_P (base0
))
9167 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
9169 /* By converting to signed sizetype we cover middle-end pointer
9170 arithmetic which operates on unsigned pointer types of size
9171 type size and ARRAY_REF offsets which are properly sign or
9172 zero extended from their type in case it is narrower than
9174 if (offset0
== NULL_TREE
)
9175 offset0
= build_int_cst (ssizetype
, 0);
9177 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9178 if (offset1
== NULL_TREE
)
9179 offset1
= build_int_cst (ssizetype
, 0);
9181 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9184 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9185 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9186 fold_overflow_warning (("assuming pointer wraparound does not "
9187 "occur when comparing P +- C1 with "
9189 WARN_STRICT_OVERFLOW_COMPARISON
);
9191 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9194 /* For non-equal bases we can simplify if they are addresses
9195 of local binding decls or constants. */
9196 else if (indirect_base0
&& indirect_base1
9197 /* We know that !operand_equal_p (base0, base1, 0)
9198 because the if condition was false. But make
9199 sure two decls are not the same. */
9201 && TREE_CODE (arg0
) == ADDR_EXPR
9202 && TREE_CODE (arg1
) == ADDR_EXPR
9203 && (((TREE_CODE (base0
) == VAR_DECL
9204 || TREE_CODE (base0
) == PARM_DECL
)
9205 && (targetm
.binds_local_p (base0
)
9206 || CONSTANT_CLASS_P (base1
)))
9207 || CONSTANT_CLASS_P (base0
))
9208 && (((TREE_CODE (base1
) == VAR_DECL
9209 || TREE_CODE (base1
) == PARM_DECL
)
9210 && (targetm
.binds_local_p (base1
)
9211 || CONSTANT_CLASS_P (base0
)))
9212 || CONSTANT_CLASS_P (base1
)))
9214 if (code
== EQ_EXPR
)
9215 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9217 else if (code
== NE_EXPR
)
9218 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9221 /* For equal offsets we can simplify to a comparison of the
9223 else if (bitpos0
== bitpos1
9225 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9227 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9228 && ((offset0
== offset1
)
9229 || (offset0
&& offset1
9230 && operand_equal_p (offset0
, offset1
, 0))))
9233 base0
= build_fold_addr_expr_loc (loc
, base0
);
9235 base1
= build_fold_addr_expr_loc (loc
, base1
);
9236 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9240 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9241 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9242 the resulting offset is smaller in absolute value than the
9243 original one and has the same sign. */
9244 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9245 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9246 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9247 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9248 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9249 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9250 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9252 tree const1
= TREE_OPERAND (arg0
, 1);
9253 tree const2
= TREE_OPERAND (arg1
, 1);
9254 tree variable1
= TREE_OPERAND (arg0
, 0);
9255 tree variable2
= TREE_OPERAND (arg1
, 0);
9257 const char * const warnmsg
= G_("assuming signed overflow does not "
9258 "occur when combining constants around "
9261 /* Put the constant on the side where it doesn't overflow and is
9262 of lower absolute value and of same sign than before. */
9263 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9264 ? MINUS_EXPR
: PLUS_EXPR
,
9266 if (!TREE_OVERFLOW (cst
)
9267 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
)
9268 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const2
))
9270 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9271 return fold_build2_loc (loc
, code
, type
,
9273 fold_build2_loc (loc
, TREE_CODE (arg1
),
9278 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9279 ? MINUS_EXPR
: PLUS_EXPR
,
9281 if (!TREE_OVERFLOW (cst
)
9282 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
)
9283 && tree_int_cst_sgn (cst
) == tree_int_cst_sgn (const1
))
9285 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9286 return fold_build2_loc (loc
, code
, type
,
9287 fold_build2_loc (loc
, TREE_CODE (arg0
),
9294 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9295 signed arithmetic case. That form is created by the compiler
9296 often enough for folding it to be of value. One example is in
9297 computing loop trip counts after Operator Strength Reduction. */
9298 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9299 && TREE_CODE (arg0
) == MULT_EXPR
9300 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9301 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9302 && integer_zerop (arg1
))
9304 tree const1
= TREE_OPERAND (arg0
, 1);
9305 tree const2
= arg1
; /* zero */
9306 tree variable1
= TREE_OPERAND (arg0
, 0);
9307 enum tree_code cmp_code
= code
;
9309 /* Handle unfolded multiplication by zero. */
9310 if (integer_zerop (const1
))
9311 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9313 fold_overflow_warning (("assuming signed overflow does not occur when "
9314 "eliminating multiplication in comparison "
9316 WARN_STRICT_OVERFLOW_COMPARISON
);
9318 /* If const1 is negative we swap the sense of the comparison. */
9319 if (tree_int_cst_sgn (const1
) < 0)
9320 cmp_code
= swap_tree_comparison (cmp_code
);
9322 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9325 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9329 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9331 tree targ0
= strip_float_extensions (arg0
);
9332 tree targ1
= strip_float_extensions (arg1
);
9333 tree newtype
= TREE_TYPE (targ0
);
9335 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9336 newtype
= TREE_TYPE (targ1
);
9338 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9339 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9340 return fold_build2_loc (loc
, code
, type
,
9341 fold_convert_loc (loc
, newtype
, targ0
),
9342 fold_convert_loc (loc
, newtype
, targ1
));
9344 /* (-a) CMP (-b) -> b CMP a */
9345 if (TREE_CODE (arg0
) == NEGATE_EXPR
9346 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9347 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9348 TREE_OPERAND (arg0
, 0));
9350 if (TREE_CODE (arg1
) == REAL_CST
)
9352 REAL_VALUE_TYPE cst
;
9353 cst
= TREE_REAL_CST (arg1
);
9355 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9356 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9357 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9358 TREE_OPERAND (arg0
, 0),
9359 build_real (TREE_TYPE (arg1
),
9360 real_value_negate (&cst
)));
9362 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9363 /* a CMP (-0) -> a CMP 0 */
9364 if (REAL_VALUE_MINUS_ZERO (cst
))
9365 return fold_build2_loc (loc
, code
, type
, arg0
,
9366 build_real (TREE_TYPE (arg1
), dconst0
));
9368 /* x != NaN is always true, other ops are always false. */
9369 if (REAL_VALUE_ISNAN (cst
)
9370 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9372 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9373 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9376 /* Fold comparisons against infinity. */
9377 if (REAL_VALUE_ISINF (cst
)
9378 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9380 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9381 if (tem
!= NULL_TREE
)
9386 /* If this is a comparison of a real constant with a PLUS_EXPR
9387 or a MINUS_EXPR of a real constant, we can convert it into a
9388 comparison with a revised real constant as long as no overflow
9389 occurs when unsafe_math_optimizations are enabled. */
9390 if (flag_unsafe_math_optimizations
9391 && TREE_CODE (arg1
) == REAL_CST
9392 && (TREE_CODE (arg0
) == PLUS_EXPR
9393 || TREE_CODE (arg0
) == MINUS_EXPR
)
9394 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9395 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9396 ? MINUS_EXPR
: PLUS_EXPR
,
9397 arg1
, TREE_OPERAND (arg0
, 1)))
9398 && !TREE_OVERFLOW (tem
))
9399 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9401 /* Likewise, we can simplify a comparison of a real constant with
9402 a MINUS_EXPR whose first operand is also a real constant, i.e.
9403 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9404 floating-point types only if -fassociative-math is set. */
9405 if (flag_associative_math
9406 && TREE_CODE (arg1
) == REAL_CST
9407 && TREE_CODE (arg0
) == MINUS_EXPR
9408 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9409 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9411 && !TREE_OVERFLOW (tem
))
9412 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9413 TREE_OPERAND (arg0
, 1), tem
);
9415 /* Fold comparisons against built-in math functions. */
9416 if (TREE_CODE (arg1
) == REAL_CST
9417 && flag_unsafe_math_optimizations
9418 && ! flag_errno_math
)
9420 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9422 if (fcode
!= END_BUILTINS
)
9424 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9425 if (tem
!= NULL_TREE
)
9431 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9432 && CONVERT_EXPR_P (arg0
))
9434 /* If we are widening one operand of an integer comparison,
9435 see if the other operand is similarly being widened. Perhaps we
9436 can do the comparison in the narrower type. */
9437 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9441 /* Or if we are changing signedness. */
9442 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9447 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9448 constant, we can simplify it. */
9449 if (TREE_CODE (arg1
) == INTEGER_CST
9450 && (TREE_CODE (arg0
) == MIN_EXPR
9451 || TREE_CODE (arg0
) == MAX_EXPR
)
9452 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9454 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9459 /* Simplify comparison of something with itself. (For IEEE
9460 floating-point, we can only do some of these simplifications.) */
9461 if (operand_equal_p (arg0
, arg1
, 0))
9466 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9467 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9468 return constant_boolean_node (1, type
);
9473 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9474 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9475 return constant_boolean_node (1, type
);
9476 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9479 /* For NE, we can only do this simplification if integer
9480 or we don't honor IEEE floating point NaNs. */
9481 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9482 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9484 /* ... fall through ... */
9487 return constant_boolean_node (0, type
);
9493 /* If we are comparing an expression that just has comparisons
9494 of two integer values, arithmetic expressions of those comparisons,
9495 and constants, we can simplify it. There are only three cases
9496 to check: the two values can either be equal, the first can be
9497 greater, or the second can be greater. Fold the expression for
9498 those three values. Since each value must be 0 or 1, we have
9499 eight possibilities, each of which corresponds to the constant 0
9500 or 1 or one of the six possible comparisons.
9502 This handles common cases like (a > b) == 0 but also handles
9503 expressions like ((x > y) - (y > x)) > 0, which supposedly
9504 occur in macroized code. */
9506 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9508 tree cval1
= 0, cval2
= 0;
9511 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9512 /* Don't handle degenerate cases here; they should already
9513 have been handled anyway. */
9514 && cval1
!= 0 && cval2
!= 0
9515 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9516 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9517 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9518 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9519 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9520 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9521 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9523 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9524 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9526 /* We can't just pass T to eval_subst in case cval1 or cval2
9527 was the same as ARG1. */
9530 = fold_build2_loc (loc
, code
, type
,
9531 eval_subst (loc
, arg0
, cval1
, maxval
,
9535 = fold_build2_loc (loc
, code
, type
,
9536 eval_subst (loc
, arg0
, cval1
, maxval
,
9540 = fold_build2_loc (loc
, code
, type
,
9541 eval_subst (loc
, arg0
, cval1
, minval
,
9545 /* All three of these results should be 0 or 1. Confirm they are.
9546 Then use those values to select the proper code to use. */
9548 if (TREE_CODE (high_result
) == INTEGER_CST
9549 && TREE_CODE (equal_result
) == INTEGER_CST
9550 && TREE_CODE (low_result
) == INTEGER_CST
)
9552 /* Make a 3-bit mask with the high-order bit being the
9553 value for `>', the next for '=', and the low for '<'. */
9554 switch ((integer_onep (high_result
) * 4)
9555 + (integer_onep (equal_result
) * 2)
9556 + integer_onep (low_result
))
9560 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9581 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9586 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9587 SET_EXPR_LOCATION (tem
, loc
);
9590 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9595 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9596 into a single range test. */
9597 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9598 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9599 && TREE_CODE (arg1
) == INTEGER_CST
9600 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9601 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9602 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9603 && !TREE_OVERFLOW (arg1
))
9605 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9606 if (tem
!= NULL_TREE
)
9610 /* Fold ~X op ~Y as Y op X. */
9611 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9612 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9614 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9615 return fold_build2_loc (loc
, code
, type
,
9616 fold_convert_loc (loc
, cmp_type
,
9617 TREE_OPERAND (arg1
, 0)),
9618 TREE_OPERAND (arg0
, 0));
9621 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9622 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9623 && (TREE_CODE (arg1
) == INTEGER_CST
|| TREE_CODE (arg1
) == VECTOR_CST
))
9625 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9626 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9627 TREE_OPERAND (arg0
, 0),
9628 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9629 fold_convert_loc (loc
, cmp_type
, arg1
)));
9636 /* Subroutine of fold_binary. Optimize complex multiplications of the
9637 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9638 argument EXPR represents the expression "z" of type TYPE. */
9641 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9643 tree itype
= TREE_TYPE (type
);
9644 tree rpart
, ipart
, tem
;
9646 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9648 rpart
= TREE_OPERAND (expr
, 0);
9649 ipart
= TREE_OPERAND (expr
, 1);
9651 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9653 rpart
= TREE_REALPART (expr
);
9654 ipart
= TREE_IMAGPART (expr
);
9658 expr
= save_expr (expr
);
9659 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9660 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9663 rpart
= save_expr (rpart
);
9664 ipart
= save_expr (ipart
);
9665 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9666 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9667 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9668 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9669 build_zero_cst (itype
));
9673 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9674 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9675 guarantees that P and N have the same least significant log2(M) bits.
9676 N is not otherwise constrained. In particular, N is not normalized to
9677 0 <= N < M as is common. In general, the precise value of P is unknown.
9678 M is chosen as large as possible such that constant N can be determined.
9680 Returns M and sets *RESIDUE to N.
9682 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9683 account. This is not always possible due to PR 35705.
9686 static unsigned HOST_WIDE_INT
9687 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9688 bool allow_func_align
)
9690 enum tree_code code
;
9694 code
= TREE_CODE (expr
);
9695 if (code
== ADDR_EXPR
)
9697 unsigned int bitalign
;
9698 get_object_alignment_1 (TREE_OPERAND (expr
, 0), &bitalign
, residue
);
9699 *residue
/= BITS_PER_UNIT
;
9700 return bitalign
/ BITS_PER_UNIT
;
9702 else if (code
== POINTER_PLUS_EXPR
)
9705 unsigned HOST_WIDE_INT modulus
;
9706 enum tree_code inner_code
;
9708 op0
= TREE_OPERAND (expr
, 0);
9710 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9713 op1
= TREE_OPERAND (expr
, 1);
9715 inner_code
= TREE_CODE (op1
);
9716 if (inner_code
== INTEGER_CST
)
9718 *residue
+= TREE_INT_CST_LOW (op1
);
9721 else if (inner_code
== MULT_EXPR
)
9723 op1
= TREE_OPERAND (op1
, 1);
9724 if (TREE_CODE (op1
) == INTEGER_CST
)
9726 unsigned HOST_WIDE_INT align
;
9728 /* Compute the greatest power-of-2 divisor of op1. */
9729 align
= TREE_INT_CST_LOW (op1
);
9732 /* If align is non-zero and less than *modulus, replace
9733 *modulus with align., If align is 0, then either op1 is 0
9734 or the greatest power-of-2 divisor of op1 doesn't fit in an
9735 unsigned HOST_WIDE_INT. In either case, no additional
9736 constraint is imposed. */
9738 modulus
= MIN (modulus
, align
);
9745 /* If we get here, we were unable to determine anything useful about the
9750 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9751 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9754 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9756 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9758 if (TREE_CODE (arg
) == VECTOR_CST
)
9760 for (i
= 0; i
< VECTOR_CST_NELTS (arg
); ++i
)
9761 elts
[i
] = VECTOR_CST_ELT (arg
, i
);
9763 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9765 constructor_elt
*elt
;
9767 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg
), i
, elt
)
9768 if (i
>= nelts
|| TREE_CODE (TREE_TYPE (elt
->value
)) == VECTOR_TYPE
)
9771 elts
[i
] = elt
->value
;
9775 for (; i
< nelts
; i
++)
9777 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9781 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9782 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9783 NULL_TREE otherwise. */
9786 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9788 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9790 bool need_ctor
= false;
9792 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9793 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9794 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9795 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9798 elts
= XALLOCAVEC (tree
, nelts
* 3);
9799 if (!vec_cst_ctor_to_array (arg0
, elts
)
9800 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9803 for (i
= 0; i
< nelts
; i
++)
9805 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9807 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9812 vec
<constructor_elt
, va_gc
> *v
;
9813 vec_alloc (v
, nelts
);
9814 for (i
= 0; i
< nelts
; i
++)
9815 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9816 return build_constructor (type
, v
);
9819 return build_vector (type
, &elts
[2 * nelts
]);
9822 /* Try to fold a pointer difference of type TYPE two address expressions of
9823 array references AREF0 and AREF1 using location LOC. Return a
9824 simplified expression for the difference or NULL_TREE. */
9827 fold_addr_of_array_ref_difference (location_t loc
, tree type
,
9828 tree aref0
, tree aref1
)
9830 tree base0
= TREE_OPERAND (aref0
, 0);
9831 tree base1
= TREE_OPERAND (aref1
, 0);
9832 tree base_offset
= build_int_cst (type
, 0);
9834 /* If the bases are array references as well, recurse. If the bases
9835 are pointer indirections compute the difference of the pointers.
9836 If the bases are equal, we are set. */
9837 if ((TREE_CODE (base0
) == ARRAY_REF
9838 && TREE_CODE (base1
) == ARRAY_REF
9840 = fold_addr_of_array_ref_difference (loc
, type
, base0
, base1
)))
9841 || (INDIRECT_REF_P (base0
)
9842 && INDIRECT_REF_P (base1
)
9843 && (base_offset
= fold_binary_loc (loc
, MINUS_EXPR
, type
,
9844 TREE_OPERAND (base0
, 0),
9845 TREE_OPERAND (base1
, 0))))
9846 || operand_equal_p (base0
, base1
, 0))
9848 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
9849 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
9850 tree esz
= fold_convert_loc (loc
, type
, array_ref_element_size (aref0
));
9851 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
9852 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
9854 fold_build2_loc (loc
, MULT_EXPR
, type
,
9860 /* If the real or vector real constant CST of type TYPE has an exact
9861 inverse, return it, else return NULL. */
9864 exact_inverse (tree type
, tree cst
)
9867 tree unit_type
, *elts
;
9868 enum machine_mode mode
;
9869 unsigned vec_nelts
, i
;
9871 switch (TREE_CODE (cst
))
9874 r
= TREE_REAL_CST (cst
);
9876 if (exact_real_inverse (TYPE_MODE (type
), &r
))
9877 return build_real (type
, r
);
9882 vec_nelts
= VECTOR_CST_NELTS (cst
);
9883 elts
= XALLOCAVEC (tree
, vec_nelts
);
9884 unit_type
= TREE_TYPE (type
);
9885 mode
= TYPE_MODE (unit_type
);
9887 for (i
= 0; i
< vec_nelts
; i
++)
9889 r
= TREE_REAL_CST (VECTOR_CST_ELT (cst
, i
));
9890 if (!exact_real_inverse (mode
, &r
))
9892 elts
[i
] = build_real (unit_type
, r
);
9895 return build_vector (type
, elts
);
9902 /* Mask out the tz least significant bits of X of type TYPE where
9903 tz is the number of trailing zeroes in Y. */
9905 mask_with_tz (tree type
, const wide_int
&x
, const wide_int
&y
)
9907 int tz
= wi::ctz (y
);
9909 return wi::mask (tz
, true, TYPE_PRECISION (type
)) & x
;
9913 /* Return true when T is an address and is known to be nonzero.
9914 For floating point we further ensure that T is not denormal.
9915 Similar logic is present in nonzero_address in rtlanal.h.
9917 If the return value is based on the assumption that signed overflow
9918 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9919 change *STRICT_OVERFLOW_P. */
9922 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
9924 tree type
= TREE_TYPE (t
);
9925 enum tree_code code
;
9927 /* Doing something useful for floating point would need more work. */
9928 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
9931 code
= TREE_CODE (t
);
9932 switch (TREE_CODE_CLASS (code
))
9935 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9938 case tcc_comparison
:
9939 return tree_binary_nonzero_warnv_p (code
, type
,
9940 TREE_OPERAND (t
, 0),
9941 TREE_OPERAND (t
, 1),
9944 case tcc_declaration
:
9946 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9954 case TRUTH_NOT_EXPR
:
9955 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
9958 case TRUTH_AND_EXPR
:
9960 case TRUTH_XOR_EXPR
:
9961 return tree_binary_nonzero_warnv_p (code
, type
,
9962 TREE_OPERAND (t
, 0),
9963 TREE_OPERAND (t
, 1),
9971 case WITH_SIZE_EXPR
:
9973 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
9978 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
9982 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
9987 tree fndecl
= get_callee_fndecl (t
);
9988 if (!fndecl
) return false;
9989 if (flag_delete_null_pointer_checks
&& !flag_check_new
9990 && DECL_IS_OPERATOR_NEW (fndecl
)
9991 && !TREE_NOTHROW (fndecl
))
9993 if (flag_delete_null_pointer_checks
9994 && lookup_attribute ("returns_nonnull",
9995 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
9997 return alloca_call_p (t
);
10006 /* Return true when T is an address and is known to be nonzero.
10007 Handle warnings about undefined signed overflow. */
10010 tree_expr_nonzero_p (tree t
)
10012 bool ret
, strict_overflow_p
;
10014 strict_overflow_p
= false;
10015 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
10016 if (strict_overflow_p
)
10017 fold_overflow_warning (("assuming signed overflow does not occur when "
10018 "determining that expression is always "
10020 WARN_STRICT_OVERFLOW_MISC
);
10024 /* Fold a binary expression of code CODE and type TYPE with operands
10025 OP0 and OP1. LOC is the location of the resulting expression.
10026 Return the folded expression if folding is successful. Otherwise,
10027 return NULL_TREE. */
10030 fold_binary_loc (location_t loc
,
10031 enum tree_code code
, tree type
, tree op0
, tree op1
)
10033 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
10034 tree arg0
, arg1
, tem
;
10035 tree t1
= NULL_TREE
;
10036 bool strict_overflow_p
;
10039 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
10040 && TREE_CODE_LENGTH (code
) == 2
10041 && op0
!= NULL_TREE
10042 && op1
!= NULL_TREE
);
10047 /* Strip any conversions that don't change the mode. This is
10048 safe for every expression, except for a comparison expression
10049 because its signedness is derived from its operands. So, in
10050 the latter case, only strip conversions that don't change the
10051 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10054 Note that this is done as an internal manipulation within the
10055 constant folder, in order to find the simplest representation
10056 of the arguments so that their form can be studied. In any
10057 cases, the appropriate type conversions should be put back in
10058 the tree that will get out of the constant folder. */
10060 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
10062 STRIP_SIGN_NOPS (arg0
);
10063 STRIP_SIGN_NOPS (arg1
);
10071 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10072 constant but we can't do arithmetic on them. */
10073 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10074 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
10075 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
10076 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10077 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
10078 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
)
10079 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == INTEGER_CST
))
10081 if (kind
== tcc_binary
)
10083 /* Make sure type and arg0 have the same saturating flag. */
10084 gcc_assert (TYPE_SATURATING (type
)
10085 == TYPE_SATURATING (TREE_TYPE (arg0
)));
10086 tem
= const_binop (code
, arg0
, arg1
);
10088 else if (kind
== tcc_comparison
)
10089 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
10093 if (tem
!= NULL_TREE
)
10095 if (TREE_TYPE (tem
) != type
)
10096 tem
= fold_convert_loc (loc
, type
, tem
);
10101 /* If this is a commutative operation, and ARG0 is a constant, move it
10102 to ARG1 to reduce the number of tests below. */
10103 if (commutative_tree_code (code
)
10104 && tree_swap_operands_p (arg0
, arg1
, true))
10105 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
10107 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10109 First check for cases where an arithmetic operation is applied to a
10110 compound, conditional, or comparison operation. Push the arithmetic
10111 operation inside the compound or conditional to see if any folding
10112 can then be done. Convert comparison to conditional for this purpose.
10113 The also optimizes non-constant cases that used to be done in
10116 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10117 one of the operands is a comparison and the other is a comparison, a
10118 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10119 code below would make the expression more complex. Change it to a
10120 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10121 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10123 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
10124 || code
== EQ_EXPR
|| code
== NE_EXPR
)
10125 && TREE_CODE (type
) != VECTOR_TYPE
10126 && ((truth_value_p (TREE_CODE (arg0
))
10127 && (truth_value_p (TREE_CODE (arg1
))
10128 || (TREE_CODE (arg1
) == BIT_AND_EXPR
10129 && integer_onep (TREE_OPERAND (arg1
, 1)))))
10130 || (truth_value_p (TREE_CODE (arg1
))
10131 && (truth_value_p (TREE_CODE (arg0
))
10132 || (TREE_CODE (arg0
) == BIT_AND_EXPR
10133 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
10135 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
10136 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
10139 fold_convert_loc (loc
, boolean_type_node
, arg0
),
10140 fold_convert_loc (loc
, boolean_type_node
, arg1
));
10142 if (code
== EQ_EXPR
)
10143 tem
= invert_truthvalue_loc (loc
, tem
);
10145 return fold_convert_loc (loc
, type
, tem
);
10148 if (TREE_CODE_CLASS (code
) == tcc_binary
10149 || TREE_CODE_CLASS (code
) == tcc_comparison
)
10151 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
10153 tem
= fold_build2_loc (loc
, code
, type
,
10154 fold_convert_loc (loc
, TREE_TYPE (op0
),
10155 TREE_OPERAND (arg0
, 1)), op1
);
10156 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
10159 if (TREE_CODE (arg1
) == COMPOUND_EXPR
10160 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10162 tem
= fold_build2_loc (loc
, code
, type
, op0
,
10163 fold_convert_loc (loc
, TREE_TYPE (op1
),
10164 TREE_OPERAND (arg1
, 1)));
10165 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
10169 if (TREE_CODE (arg0
) == COND_EXPR
10170 || TREE_CODE (arg0
) == VEC_COND_EXPR
10171 || COMPARISON_CLASS_P (arg0
))
10173 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10175 /*cond_first_p=*/1);
10176 if (tem
!= NULL_TREE
)
10180 if (TREE_CODE (arg1
) == COND_EXPR
10181 || TREE_CODE (arg1
) == VEC_COND_EXPR
10182 || COMPARISON_CLASS_P (arg1
))
10184 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
10186 /*cond_first_p=*/0);
10187 if (tem
!= NULL_TREE
)
10195 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10196 if (TREE_CODE (arg0
) == ADDR_EXPR
10197 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
10199 tree iref
= TREE_OPERAND (arg0
, 0);
10200 return fold_build2 (MEM_REF
, type
,
10201 TREE_OPERAND (iref
, 0),
10202 int_const_binop (PLUS_EXPR
, arg1
,
10203 TREE_OPERAND (iref
, 1)));
10206 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10207 if (TREE_CODE (arg0
) == ADDR_EXPR
10208 && handled_component_p (TREE_OPERAND (arg0
, 0)))
10211 HOST_WIDE_INT coffset
;
10212 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
10216 return fold_build2 (MEM_REF
, type
,
10217 build_fold_addr_expr (base
),
10218 int_const_binop (PLUS_EXPR
, arg1
,
10219 size_int (coffset
)));
10224 case POINTER_PLUS_EXPR
:
10225 /* 0 +p index -> (type)index */
10226 if (integer_zerop (arg0
))
10227 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10229 /* PTR +p 0 -> PTR */
10230 if (integer_zerop (arg1
))
10231 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10233 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10234 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10235 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
10236 return fold_convert_loc (loc
, type
,
10237 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10238 fold_convert_loc (loc
, sizetype
,
10240 fold_convert_loc (loc
, sizetype
,
10243 /* (PTR +p B) +p A -> PTR +p (B + A) */
10244 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10247 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
10248 tree arg00
= TREE_OPERAND (arg0
, 0);
10249 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
10250 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
10251 return fold_convert_loc (loc
, type
,
10252 fold_build_pointer_plus_loc (loc
,
10256 /* PTR_CST +p CST -> CST1 */
10257 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
10258 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
10259 fold_convert_loc (loc
, type
, arg1
));
10261 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10262 of the array. Loop optimizer sometimes produce this type of
10264 if (TREE_CODE (arg0
) == ADDR_EXPR
)
10266 tem
= try_move_mult_to_index (loc
, arg0
,
10267 fold_convert_loc (loc
,
10270 return fold_convert_loc (loc
, type
, tem
);
10276 /* A + (-B) -> A - B */
10277 if (TREE_CODE (arg1
) == NEGATE_EXPR
10278 && (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
10279 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10280 fold_convert_loc (loc
, type
, arg0
),
10281 fold_convert_loc (loc
, type
,
10282 TREE_OPERAND (arg1
, 0)));
10283 /* (-A) + B -> B - A */
10284 if (TREE_CODE (arg0
) == NEGATE_EXPR
10285 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
)
10286 && (flag_sanitize
& SANITIZE_SI_OVERFLOW
) == 0)
10287 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10288 fold_convert_loc (loc
, type
, arg1
),
10289 fold_convert_loc (loc
, type
,
10290 TREE_OPERAND (arg0
, 0)));
10292 if (INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10294 /* Convert ~A + 1 to -A. */
10295 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10296 && integer_onep (arg1
))
10297 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
10298 fold_convert_loc (loc
, type
,
10299 TREE_OPERAND (arg0
, 0)));
10301 /* ~X + X is -1. */
10302 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10303 && !TYPE_OVERFLOW_TRAPS (type
))
10305 tree tem
= TREE_OPERAND (arg0
, 0);
10308 if (operand_equal_p (tem
, arg1
, 0))
10310 t1
= build_all_ones_cst (type
);
10311 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10315 /* X + ~X is -1. */
10316 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10317 && !TYPE_OVERFLOW_TRAPS (type
))
10319 tree tem
= TREE_OPERAND (arg1
, 0);
10322 if (operand_equal_p (arg0
, tem
, 0))
10324 t1
= build_all_ones_cst (type
);
10325 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10329 /* X + (X / CST) * -CST is X % CST. */
10330 if (TREE_CODE (arg1
) == MULT_EXPR
10331 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10332 && operand_equal_p (arg0
,
10333 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
10335 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
10336 tree cst1
= TREE_OPERAND (arg1
, 1);
10337 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
10339 if (sum
&& integer_zerop (sum
))
10340 return fold_convert_loc (loc
, type
,
10341 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
10342 TREE_TYPE (arg0
), arg0
,
10347 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10348 one. Make sure the type is not saturating and has the signedness of
10349 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10350 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10351 if ((TREE_CODE (arg0
) == MULT_EXPR
10352 || TREE_CODE (arg1
) == MULT_EXPR
)
10353 && !TYPE_SATURATING (type
)
10354 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10355 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10356 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10358 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10363 if (! FLOAT_TYPE_P (type
))
10365 if (integer_zerop (arg1
))
10366 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10368 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10369 with a constant, and the two constants have no bits in common,
10370 we should treat this as a BIT_IOR_EXPR since this may produce more
10371 simplifications. */
10372 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10373 && TREE_CODE (arg1
) == BIT_AND_EXPR
10374 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10375 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10376 && wi::bit_and (TREE_OPERAND (arg0
, 1),
10377 TREE_OPERAND (arg1
, 1)) == 0)
10379 code
= BIT_IOR_EXPR
;
10383 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10384 (plus (plus (mult) (mult)) (foo)) so that we can
10385 take advantage of the factoring cases below. */
10386 if (TYPE_OVERFLOW_WRAPS (type
)
10387 && (((TREE_CODE (arg0
) == PLUS_EXPR
10388 || TREE_CODE (arg0
) == MINUS_EXPR
)
10389 && TREE_CODE (arg1
) == MULT_EXPR
)
10390 || ((TREE_CODE (arg1
) == PLUS_EXPR
10391 || TREE_CODE (arg1
) == MINUS_EXPR
)
10392 && TREE_CODE (arg0
) == MULT_EXPR
)))
10394 tree parg0
, parg1
, parg
, marg
;
10395 enum tree_code pcode
;
10397 if (TREE_CODE (arg1
) == MULT_EXPR
)
10398 parg
= arg0
, marg
= arg1
;
10400 parg
= arg1
, marg
= arg0
;
10401 pcode
= TREE_CODE (parg
);
10402 parg0
= TREE_OPERAND (parg
, 0);
10403 parg1
= TREE_OPERAND (parg
, 1);
10404 STRIP_NOPS (parg0
);
10405 STRIP_NOPS (parg1
);
10407 if (TREE_CODE (parg0
) == MULT_EXPR
10408 && TREE_CODE (parg1
) != MULT_EXPR
)
10409 return fold_build2_loc (loc
, pcode
, type
,
10410 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10411 fold_convert_loc (loc
, type
,
10413 fold_convert_loc (loc
, type
,
10415 fold_convert_loc (loc
, type
, parg1
));
10416 if (TREE_CODE (parg0
) != MULT_EXPR
10417 && TREE_CODE (parg1
) == MULT_EXPR
)
10419 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10420 fold_convert_loc (loc
, type
, parg0
),
10421 fold_build2_loc (loc
, pcode
, type
,
10422 fold_convert_loc (loc
, type
, marg
),
10423 fold_convert_loc (loc
, type
,
10429 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10430 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10431 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10433 /* Likewise if the operands are reversed. */
10434 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10435 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10437 /* Convert X + -C into X - C. */
10438 if (TREE_CODE (arg1
) == REAL_CST
10439 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10441 tem
= fold_negate_const (arg1
, type
);
10442 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10443 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10444 fold_convert_loc (loc
, type
, arg0
),
10445 fold_convert_loc (loc
, type
, tem
));
10448 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10449 to __complex__ ( x, y ). This is not the same for SNaNs or
10450 if signed zeros are involved. */
10451 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10452 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10453 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10455 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10456 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10457 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10458 bool arg0rz
= false, arg0iz
= false;
10459 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10460 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10462 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10463 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10464 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10466 tree rp
= arg1r
? arg1r
10467 : build1 (REALPART_EXPR
, rtype
, arg1
);
10468 tree ip
= arg0i
? arg0i
10469 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10470 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10472 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10474 tree rp
= arg0r
? arg0r
10475 : build1 (REALPART_EXPR
, rtype
, arg0
);
10476 tree ip
= arg1i
? arg1i
10477 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10478 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10483 if (flag_unsafe_math_optimizations
10484 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10485 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10486 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10489 /* Convert x+x into x*2.0. */
10490 if (operand_equal_p (arg0
, arg1
, 0)
10491 && SCALAR_FLOAT_TYPE_P (type
))
10492 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10493 build_real (type
, dconst2
));
10495 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10496 We associate floats only if the user has specified
10497 -fassociative-math. */
10498 if (flag_associative_math
10499 && TREE_CODE (arg1
) == PLUS_EXPR
10500 && TREE_CODE (arg0
) != MULT_EXPR
)
10502 tree tree10
= TREE_OPERAND (arg1
, 0);
10503 tree tree11
= TREE_OPERAND (arg1
, 1);
10504 if (TREE_CODE (tree11
) == MULT_EXPR
10505 && TREE_CODE (tree10
) == MULT_EXPR
)
10508 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10509 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10512 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10513 We associate floats only if the user has specified
10514 -fassociative-math. */
10515 if (flag_associative_math
10516 && TREE_CODE (arg0
) == PLUS_EXPR
10517 && TREE_CODE (arg1
) != MULT_EXPR
)
10519 tree tree00
= TREE_OPERAND (arg0
, 0);
10520 tree tree01
= TREE_OPERAND (arg0
, 1);
10521 if (TREE_CODE (tree01
) == MULT_EXPR
10522 && TREE_CODE (tree00
) == MULT_EXPR
)
10525 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10526 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10532 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10533 is a rotate of A by C1 bits. */
10534 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10535 is a rotate of A by B bits. */
10537 enum tree_code code0
, code1
;
10539 code0
= TREE_CODE (arg0
);
10540 code1
= TREE_CODE (arg1
);
10541 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10542 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10543 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10544 TREE_OPERAND (arg1
, 0), 0)
10545 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10546 TYPE_UNSIGNED (rtype
))
10547 /* Only create rotates in complete modes. Other cases are not
10548 expanded properly. */
10549 && (element_precision (rtype
)
10550 == element_precision (TYPE_MODE (rtype
))))
10552 tree tree01
, tree11
;
10553 enum tree_code code01
, code11
;
10555 tree01
= TREE_OPERAND (arg0
, 1);
10556 tree11
= TREE_OPERAND (arg1
, 1);
10557 STRIP_NOPS (tree01
);
10558 STRIP_NOPS (tree11
);
10559 code01
= TREE_CODE (tree01
);
10560 code11
= TREE_CODE (tree11
);
10561 if (code01
== INTEGER_CST
10562 && code11
== INTEGER_CST
10563 && (wi::to_widest (tree01
) + wi::to_widest (tree11
)
10564 == element_precision (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10566 tem
= build2_loc (loc
, LROTATE_EXPR
,
10567 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10568 TREE_OPERAND (arg0
, 0),
10569 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10570 return fold_convert_loc (loc
, type
, tem
);
10572 else if (code11
== MINUS_EXPR
)
10574 tree tree110
, tree111
;
10575 tree110
= TREE_OPERAND (tree11
, 0);
10576 tree111
= TREE_OPERAND (tree11
, 1);
10577 STRIP_NOPS (tree110
);
10578 STRIP_NOPS (tree111
);
10579 if (TREE_CODE (tree110
) == INTEGER_CST
10580 && 0 == compare_tree_int (tree110
,
10582 (TREE_TYPE (TREE_OPERAND
10584 && operand_equal_p (tree01
, tree111
, 0))
10586 fold_convert_loc (loc
, type
,
10587 build2 ((code0
== LSHIFT_EXPR
10590 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10591 TREE_OPERAND (arg0
, 0), tree01
));
10593 else if (code01
== MINUS_EXPR
)
10595 tree tree010
, tree011
;
10596 tree010
= TREE_OPERAND (tree01
, 0);
10597 tree011
= TREE_OPERAND (tree01
, 1);
10598 STRIP_NOPS (tree010
);
10599 STRIP_NOPS (tree011
);
10600 if (TREE_CODE (tree010
) == INTEGER_CST
10601 && 0 == compare_tree_int (tree010
,
10603 (TREE_TYPE (TREE_OPERAND
10605 && operand_equal_p (tree11
, tree011
, 0))
10606 return fold_convert_loc
10608 build2 ((code0
!= LSHIFT_EXPR
10611 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10612 TREE_OPERAND (arg0
, 0), tree11
));
10618 /* In most languages, can't associate operations on floats through
10619 parentheses. Rather than remember where the parentheses were, we
10620 don't associate floats at all, unless the user has specified
10621 -fassociative-math.
10622 And, we need to make sure type is not saturating. */
10624 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10625 && !TYPE_SATURATING (type
))
10627 tree var0
, con0
, lit0
, minus_lit0
;
10628 tree var1
, con1
, lit1
, minus_lit1
;
10632 /* Split both trees into variables, constants, and literals. Then
10633 associate each group together, the constants with literals,
10634 then the result with variables. This increases the chances of
10635 literals being recombined later and of generating relocatable
10636 expressions for the sum of a constant and literal. */
10637 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10638 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10639 code
== MINUS_EXPR
);
10641 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10642 if (code
== MINUS_EXPR
)
10645 /* With undefined overflow prefer doing association in a type
10646 which wraps on overflow, if that is one of the operand types. */
10647 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10648 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10650 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
10651 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
10652 atype
= TREE_TYPE (arg0
);
10653 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
10654 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1
)))
10655 atype
= TREE_TYPE (arg1
);
10656 gcc_assert (TYPE_PRECISION (atype
) == TYPE_PRECISION (type
));
10659 /* With undefined overflow we can only associate constants with one
10660 variable, and constants whose association doesn't overflow. */
10661 if ((POINTER_TYPE_P (atype
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10662 || (INTEGRAL_TYPE_P (atype
) && !TYPE_OVERFLOW_WRAPS (atype
)))
10669 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10670 tmp0
= TREE_OPERAND (tmp0
, 0);
10671 if (CONVERT_EXPR_P (tmp0
)
10672 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10673 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0
, 0)))
10674 <= TYPE_PRECISION (atype
)))
10675 tmp0
= TREE_OPERAND (tmp0
, 0);
10676 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10677 tmp1
= TREE_OPERAND (tmp1
, 0);
10678 if (CONVERT_EXPR_P (tmp1
)
10679 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10680 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1
, 0)))
10681 <= TYPE_PRECISION (atype
)))
10682 tmp1
= TREE_OPERAND (tmp1
, 0);
10683 /* The only case we can still associate with two variables
10684 is if they are the same, modulo negation and bit-pattern
10685 preserving conversions. */
10686 if (!operand_equal_p (tmp0
, tmp1
, 0))
10691 /* Only do something if we found more than two objects. Otherwise,
10692 nothing has changed and we risk infinite recursion. */
10694 && (2 < ((var0
!= 0) + (var1
!= 0)
10695 + (con0
!= 0) + (con1
!= 0)
10696 + (lit0
!= 0) + (lit1
!= 0)
10697 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10699 bool any_overflows
= false;
10700 if (lit0
) any_overflows
|= TREE_OVERFLOW (lit0
);
10701 if (lit1
) any_overflows
|= TREE_OVERFLOW (lit1
);
10702 if (minus_lit0
) any_overflows
|= TREE_OVERFLOW (minus_lit0
);
10703 if (minus_lit1
) any_overflows
|= TREE_OVERFLOW (minus_lit1
);
10704 var0
= associate_trees (loc
, var0
, var1
, code
, atype
);
10705 con0
= associate_trees (loc
, con0
, con1
, code
, atype
);
10706 lit0
= associate_trees (loc
, lit0
, lit1
, code
, atype
);
10707 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
,
10710 /* Preserve the MINUS_EXPR if the negative part of the literal is
10711 greater than the positive part. Otherwise, the multiplicative
10712 folding code (i.e extract_muldiv) may be fooled in case
10713 unsigned constants are subtracted, like in the following
10714 example: ((X*2 + 4) - 8U)/2. */
10715 if (minus_lit0
&& lit0
)
10717 if (TREE_CODE (lit0
) == INTEGER_CST
10718 && TREE_CODE (minus_lit0
) == INTEGER_CST
10719 && tree_int_cst_lt (lit0
, minus_lit0
))
10721 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10722 MINUS_EXPR
, atype
);
10727 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10728 MINUS_EXPR
, atype
);
10733 /* Don't introduce overflows through reassociation. */
10735 && ((lit0
&& TREE_OVERFLOW (lit0
))
10736 || (minus_lit0
&& TREE_OVERFLOW (minus_lit0
))))
10743 fold_convert_loc (loc
, type
,
10744 associate_trees (loc
, var0
, minus_lit0
,
10745 MINUS_EXPR
, atype
));
10748 con0
= associate_trees (loc
, con0
, minus_lit0
,
10749 MINUS_EXPR
, atype
);
10751 fold_convert_loc (loc
, type
,
10752 associate_trees (loc
, var0
, con0
,
10753 PLUS_EXPR
, atype
));
10757 con0
= associate_trees (loc
, con0
, lit0
, code
, atype
);
10759 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10767 /* Pointer simplifications for subtraction, simple reassociations. */
10768 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10770 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10771 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10772 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10774 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10775 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10776 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10777 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10778 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10779 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10781 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10784 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10785 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10787 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10788 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10789 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10790 fold_convert_loc (loc
, type
, arg1
));
10792 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10795 /* A - (-B) -> A + B */
10796 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10797 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10798 fold_convert_loc (loc
, type
,
10799 TREE_OPERAND (arg1
, 0)));
10800 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10801 if (TREE_CODE (arg0
) == NEGATE_EXPR
10802 && negate_expr_p (arg1
)
10803 && reorder_operands_p (arg0
, arg1
))
10804 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10805 fold_convert_loc (loc
, type
,
10806 negate_expr (arg1
)),
10807 fold_convert_loc (loc
, type
,
10808 TREE_OPERAND (arg0
, 0)));
10809 /* Convert -A - 1 to ~A. */
10810 if (TREE_CODE (type
) != COMPLEX_TYPE
10811 && TREE_CODE (arg0
) == NEGATE_EXPR
10812 && integer_onep (arg1
)
10813 && !TYPE_OVERFLOW_TRAPS (type
))
10814 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10815 fold_convert_loc (loc
, type
,
10816 TREE_OPERAND (arg0
, 0)));
10818 /* Convert -1 - A to ~A. */
10819 if (TREE_CODE (type
) != COMPLEX_TYPE
10820 && integer_all_onesp (arg0
))
10821 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10824 /* X - (X / Y) * Y is X % Y. */
10825 if ((INTEGRAL_TYPE_P (type
) || VECTOR_INTEGER_TYPE_P (type
))
10826 && TREE_CODE (arg1
) == MULT_EXPR
10827 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10828 && operand_equal_p (arg0
,
10829 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10830 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10831 TREE_OPERAND (arg1
, 1), 0))
10833 fold_convert_loc (loc
, type
,
10834 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10835 arg0
, TREE_OPERAND (arg1
, 1)));
10837 if (! FLOAT_TYPE_P (type
))
10839 if (integer_zerop (arg0
))
10840 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10841 if (integer_zerop (arg1
))
10842 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10844 /* Fold A - (A & B) into ~B & A. */
10845 if (!TREE_SIDE_EFFECTS (arg0
)
10846 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10848 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10850 tree arg10
= fold_convert_loc (loc
, type
,
10851 TREE_OPERAND (arg1
, 0));
10852 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10853 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10855 fold_convert_loc (loc
, type
, arg0
));
10857 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10859 tree arg11
= fold_convert_loc (loc
,
10860 type
, TREE_OPERAND (arg1
, 1));
10861 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10862 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10864 fold_convert_loc (loc
, type
, arg0
));
10868 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10869 any power of 2 minus 1. */
10870 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10871 && TREE_CODE (arg1
) == BIT_AND_EXPR
10872 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10873 TREE_OPERAND (arg1
, 0), 0))
10875 tree mask0
= TREE_OPERAND (arg0
, 1);
10876 tree mask1
= TREE_OPERAND (arg1
, 1);
10877 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10879 if (operand_equal_p (tem
, mask1
, 0))
10881 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10882 TREE_OPERAND (arg0
, 0), mask1
);
10883 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10888 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10889 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10890 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10892 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10893 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10894 (-ARG1 + ARG0) reduces to -ARG1. */
10895 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10896 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10898 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10899 __complex__ ( x, -y ). This is not the same for SNaNs or if
10900 signed zeros are involved. */
10901 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10902 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10903 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10905 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10906 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10907 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10908 bool arg0rz
= false, arg0iz
= false;
10909 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10910 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10912 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10913 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10914 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10916 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10918 : build1 (REALPART_EXPR
, rtype
, arg1
));
10919 tree ip
= arg0i
? arg0i
10920 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10921 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10923 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10925 tree rp
= arg0r
? arg0r
10926 : build1 (REALPART_EXPR
, rtype
, arg0
);
10927 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10929 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10930 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10935 /* Fold &x - &x. This can happen from &x.foo - &x.
10936 This is unsafe for certain floats even in non-IEEE formats.
10937 In IEEE, it is unsafe because it does wrong for NaNs.
10938 Also note that operand_equal_p is always false if an operand
10941 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10942 && operand_equal_p (arg0
, arg1
, 0))
10943 return build_zero_cst (type
);
10945 /* A - B -> A + (-B) if B is easily negatable. */
10946 if (negate_expr_p (arg1
)
10947 && ((FLOAT_TYPE_P (type
)
10948 /* Avoid this transformation if B is a positive REAL_CST. */
10949 && (TREE_CODE (arg1
) != REAL_CST
10950 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10951 || INTEGRAL_TYPE_P (type
)))
10952 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10953 fold_convert_loc (loc
, type
, arg0
),
10954 fold_convert_loc (loc
, type
,
10955 negate_expr (arg1
)));
10957 /* Try folding difference of addresses. */
10959 HOST_WIDE_INT diff
;
10961 if ((TREE_CODE (arg0
) == ADDR_EXPR
10962 || TREE_CODE (arg1
) == ADDR_EXPR
)
10963 && ptr_difference_const (arg0
, arg1
, &diff
))
10964 return build_int_cst_type (type
, diff
);
10967 /* Fold &a[i] - &a[j] to i-j. */
10968 if (TREE_CODE (arg0
) == ADDR_EXPR
10969 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10970 && TREE_CODE (arg1
) == ADDR_EXPR
10971 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10973 tree tem
= fold_addr_of_array_ref_difference (loc
, type
,
10974 TREE_OPERAND (arg0
, 0),
10975 TREE_OPERAND (arg1
, 0));
10980 if (FLOAT_TYPE_P (type
)
10981 && flag_unsafe_math_optimizations
10982 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10983 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10984 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10987 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10988 one. Make sure the type is not saturating and has the signedness of
10989 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10990 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10991 if ((TREE_CODE (arg0
) == MULT_EXPR
10992 || TREE_CODE (arg1
) == MULT_EXPR
)
10993 && !TYPE_SATURATING (type
)
10994 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg0
))
10995 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (TREE_TYPE (arg1
))
10996 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10998 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
11006 /* (-A) * (-B) -> A * B */
11007 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11008 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11009 fold_convert_loc (loc
, type
,
11010 TREE_OPERAND (arg0
, 0)),
11011 fold_convert_loc (loc
, type
,
11012 negate_expr (arg1
)));
11013 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11014 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11015 fold_convert_loc (loc
, type
,
11016 negate_expr (arg0
)),
11017 fold_convert_loc (loc
, type
,
11018 TREE_OPERAND (arg1
, 0)));
11020 if (! FLOAT_TYPE_P (type
))
11022 if (integer_zerop (arg1
))
11023 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11024 if (integer_onep (arg1
))
11025 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11026 /* Transform x * -1 into -x. Make sure to do the negation
11027 on the original operand with conversions not stripped
11028 because we can only strip non-sign-changing conversions. */
11029 if (integer_minus_onep (arg1
))
11030 return fold_convert_loc (loc
, type
, negate_expr (op0
));
11031 /* Transform x * -C into -x * C if x is easily negatable. */
11032 if (TREE_CODE (arg1
) == INTEGER_CST
11033 && tree_int_cst_sgn (arg1
) == -1
11034 && negate_expr_p (arg0
)
11035 && (tem
= negate_expr (arg1
)) != arg1
11036 && !TREE_OVERFLOW (tem
))
11037 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11038 fold_convert_loc (loc
, type
,
11039 negate_expr (arg0
)),
11042 /* (a * (1 << b)) is (a << b) */
11043 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11044 && integer_onep (TREE_OPERAND (arg1
, 0)))
11045 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
11046 TREE_OPERAND (arg1
, 1));
11047 if (TREE_CODE (arg0
) == LSHIFT_EXPR
11048 && integer_onep (TREE_OPERAND (arg0
, 0)))
11049 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
11050 TREE_OPERAND (arg0
, 1));
11052 /* (A + A) * C -> A * 2 * C */
11053 if (TREE_CODE (arg0
) == PLUS_EXPR
11054 && TREE_CODE (arg1
) == INTEGER_CST
11055 && operand_equal_p (TREE_OPERAND (arg0
, 0),
11056 TREE_OPERAND (arg0
, 1), 0))
11057 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11058 omit_one_operand_loc (loc
, type
,
11059 TREE_OPERAND (arg0
, 0),
11060 TREE_OPERAND (arg0
, 1)),
11061 fold_build2_loc (loc
, MULT_EXPR
, type
,
11062 build_int_cst (type
, 2) , arg1
));
11064 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11065 sign-changing only. */
11066 if (TREE_CODE (arg1
) == INTEGER_CST
11067 && TREE_CODE (arg0
) == EXACT_DIV_EXPR
11068 && operand_equal_p (arg1
, TREE_OPERAND (arg0
, 1), 0))
11069 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11071 strict_overflow_p
= false;
11072 if (TREE_CODE (arg1
) == INTEGER_CST
11073 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11074 &strict_overflow_p
)))
11076 if (strict_overflow_p
)
11077 fold_overflow_warning (("assuming signed overflow does not "
11078 "occur when simplifying "
11080 WARN_STRICT_OVERFLOW_MISC
);
11081 return fold_convert_loc (loc
, type
, tem
);
11084 /* Optimize z * conj(z) for integer complex numbers. */
11085 if (TREE_CODE (arg0
) == CONJ_EXPR
11086 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11087 return fold_mult_zconjz (loc
, type
, arg1
);
11088 if (TREE_CODE (arg1
) == CONJ_EXPR
11089 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11090 return fold_mult_zconjz (loc
, type
, arg0
);
11094 /* Maybe fold x * 0 to 0. The expressions aren't the same
11095 when x is NaN, since x * 0 is also NaN. Nor are they the
11096 same in modes with signed zeros, since multiplying a
11097 negative value by 0 gives -0, not +0. */
11098 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11099 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11100 && real_zerop (arg1
))
11101 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11102 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11103 Likewise for complex arithmetic with signed zeros. */
11104 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11105 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11106 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11107 && real_onep (arg1
))
11108 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11110 /* Transform x * -1.0 into -x. */
11111 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11112 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11113 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
11114 && real_minus_onep (arg1
))
11115 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
11117 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11118 the result for floating point types due to rounding so it is applied
11119 only if -fassociative-math was specify. */
11120 if (flag_associative_math
11121 && TREE_CODE (arg0
) == RDIV_EXPR
11122 && TREE_CODE (arg1
) == REAL_CST
11123 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
11125 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
11128 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11129 TREE_OPERAND (arg0
, 1));
11132 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11133 if (operand_equal_p (arg0
, arg1
, 0))
11135 tree tem
= fold_strip_sign_ops (arg0
);
11136 if (tem
!= NULL_TREE
)
11138 tem
= fold_convert_loc (loc
, type
, tem
);
11139 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
11143 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11144 This is not the same for NaNs or if signed zeros are
11146 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11147 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
11148 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11149 && TREE_CODE (arg1
) == COMPLEX_CST
11150 && real_zerop (TREE_REALPART (arg1
)))
11152 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
11153 if (real_onep (TREE_IMAGPART (arg1
)))
11155 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11156 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
11158 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
11159 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
11161 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
11162 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
11163 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
11167 /* Optimize z * conj(z) for floating point complex numbers.
11168 Guarded by flag_unsafe_math_optimizations as non-finite
11169 imaginary components don't produce scalar results. */
11170 if (flag_unsafe_math_optimizations
11171 && TREE_CODE (arg0
) == CONJ_EXPR
11172 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11173 return fold_mult_zconjz (loc
, type
, arg1
);
11174 if (flag_unsafe_math_optimizations
11175 && TREE_CODE (arg1
) == CONJ_EXPR
11176 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11177 return fold_mult_zconjz (loc
, type
, arg0
);
11179 if (flag_unsafe_math_optimizations
)
11181 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11182 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11184 /* Optimizations of root(...)*root(...). */
11185 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
11188 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11189 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11191 /* Optimize sqrt(x)*sqrt(x) as x. */
11192 if (BUILTIN_SQRT_P (fcode0
)
11193 && operand_equal_p (arg00
, arg10
, 0)
11194 && ! HONOR_SNANS (TYPE_MODE (type
)))
11197 /* Optimize root(x)*root(y) as root(x*y). */
11198 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11199 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
11200 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
11203 /* Optimize expN(x)*expN(y) as expN(x+y). */
11204 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
11206 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11207 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11208 CALL_EXPR_ARG (arg0
, 0),
11209 CALL_EXPR_ARG (arg1
, 0));
11210 return build_call_expr_loc (loc
, expfn
, 1, arg
);
11213 /* Optimizations of pow(...)*pow(...). */
11214 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
11215 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
11216 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
11218 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11219 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11220 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11221 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11223 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11224 if (operand_equal_p (arg01
, arg11
, 0))
11226 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11227 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
11229 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
11232 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11233 if (operand_equal_p (arg00
, arg10
, 0))
11235 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11236 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
11238 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
11242 /* Optimize tan(x)*cos(x) as sin(x). */
11243 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
11244 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
11245 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
11246 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
11247 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
11248 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
11249 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11250 CALL_EXPR_ARG (arg1
, 0), 0))
11252 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
11254 if (sinfn
!= NULL_TREE
)
11255 return build_call_expr_loc (loc
, sinfn
, 1,
11256 CALL_EXPR_ARG (arg0
, 0));
11259 /* Optimize x*pow(x,c) as pow(x,c+1). */
11260 if (fcode1
== BUILT_IN_POW
11261 || fcode1
== BUILT_IN_POWF
11262 || fcode1
== BUILT_IN_POWL
)
11264 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11265 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11266 if (TREE_CODE (arg11
) == REAL_CST
11267 && !TREE_OVERFLOW (arg11
)
11268 && operand_equal_p (arg0
, arg10
, 0))
11270 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11274 c
= TREE_REAL_CST (arg11
);
11275 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11276 arg
= build_real (type
, c
);
11277 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11281 /* Optimize pow(x,c)*x as pow(x,c+1). */
11282 if (fcode0
== BUILT_IN_POW
11283 || fcode0
== BUILT_IN_POWF
11284 || fcode0
== BUILT_IN_POWL
)
11286 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11287 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11288 if (TREE_CODE (arg01
) == REAL_CST
11289 && !TREE_OVERFLOW (arg01
)
11290 && operand_equal_p (arg1
, arg00
, 0))
11292 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11296 c
= TREE_REAL_CST (arg01
);
11297 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
11298 arg
= build_real (type
, c
);
11299 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11303 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11304 if (!in_gimple_form
11306 && operand_equal_p (arg0
, arg1
, 0))
11308 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
11312 tree arg
= build_real (type
, dconst2
);
11313 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
11322 if (integer_all_onesp (arg1
))
11323 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11324 if (integer_zerop (arg1
))
11325 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11326 if (operand_equal_p (arg0
, arg1
, 0))
11327 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11329 /* ~X | X is -1. */
11330 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11331 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11333 t1
= build_zero_cst (type
);
11334 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11335 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11338 /* X | ~X is -1. */
11339 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11340 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11342 t1
= build_zero_cst (type
);
11343 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11344 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11347 /* Canonicalize (X & C1) | C2. */
11348 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11349 && TREE_CODE (arg1
) == INTEGER_CST
11350 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11352 int width
= TYPE_PRECISION (type
), w
;
11353 wide_int c1
= TREE_OPERAND (arg0
, 1);
11354 wide_int c2
= arg1
;
11356 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11357 if ((c1
& c2
) == c1
)
11358 return omit_one_operand_loc (loc
, type
, arg1
,
11359 TREE_OPERAND (arg0
, 0));
11361 wide_int msk
= wi::mask (width
, false,
11362 TYPE_PRECISION (TREE_TYPE (arg1
)));
11364 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11365 if (msk
.and_not (c1
| c2
) == 0)
11366 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11367 TREE_OPERAND (arg0
, 0), arg1
);
11369 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11370 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11371 mode which allows further optimizations. */
11374 wide_int c3
= c1
.and_not (c2
);
11375 for (w
= BITS_PER_UNIT
; w
<= width
; w
<<= 1)
11377 wide_int mask
= wi::mask (w
, false,
11378 TYPE_PRECISION (type
));
11379 if (((c1
| c2
) & mask
) == mask
&& c1
.and_not (mask
) == 0)
11387 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
11388 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11389 TREE_OPERAND (arg0
, 0),
11390 wide_int_to_tree (type
,
11395 /* (X & Y) | Y is (X, Y). */
11396 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11397 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11398 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11399 /* (X & Y) | X is (Y, X). */
11400 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11401 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11402 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11403 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11404 /* X | (X & Y) is (Y, X). */
11405 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11406 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11407 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11408 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11409 /* X | (Y & X) is (Y, X). */
11410 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11411 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11412 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11413 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11415 /* (X & ~Y) | (~X & Y) is X ^ Y */
11416 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11417 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11419 tree a0
, a1
, l0
, l1
, n0
, n1
;
11421 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11422 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11424 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11425 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11427 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11428 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11430 if ((operand_equal_p (n0
, a0
, 0)
11431 && operand_equal_p (n1
, a1
, 0))
11432 || (operand_equal_p (n0
, a1
, 0)
11433 && operand_equal_p (n1
, a0
, 0)))
11434 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11437 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11438 if (t1
!= NULL_TREE
)
11441 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11443 This results in more efficient code for machines without a NAND
11444 instruction. Combine will canonicalize to the first form
11445 which will allow use of NAND instructions provided by the
11446 backend if they exist. */
11447 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11448 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11451 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11452 build2 (BIT_AND_EXPR
, type
,
11453 fold_convert_loc (loc
, type
,
11454 TREE_OPERAND (arg0
, 0)),
11455 fold_convert_loc (loc
, type
,
11456 TREE_OPERAND (arg1
, 0))));
11459 /* See if this can be simplified into a rotate first. If that
11460 is unsuccessful continue in the association code. */
11464 if (integer_zerop (arg1
))
11465 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11466 if (integer_all_onesp (arg1
))
11467 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11468 if (operand_equal_p (arg0
, arg1
, 0))
11469 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11471 /* ~X ^ X is -1. */
11472 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11473 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11475 t1
= build_zero_cst (type
);
11476 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11477 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11480 /* X ^ ~X is -1. */
11481 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11482 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11484 t1
= build_zero_cst (type
);
11485 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11486 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11489 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11490 with a constant, and the two constants have no bits in common,
11491 we should treat this as a BIT_IOR_EXPR since this may produce more
11492 simplifications. */
11493 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11494 && TREE_CODE (arg1
) == BIT_AND_EXPR
11495 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11496 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11497 && wi::bit_and (TREE_OPERAND (arg0
, 1),
11498 TREE_OPERAND (arg1
, 1)) == 0)
11500 code
= BIT_IOR_EXPR
;
11504 /* (X | Y) ^ X -> Y & ~ X*/
11505 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11506 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11508 tree t2
= TREE_OPERAND (arg0
, 1);
11509 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11511 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11512 fold_convert_loc (loc
, type
, t2
),
11513 fold_convert_loc (loc
, type
, t1
));
11517 /* (Y | X) ^ X -> Y & ~ X*/
11518 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11519 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11521 tree t2
= TREE_OPERAND (arg0
, 0);
11522 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11524 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11525 fold_convert_loc (loc
, type
, t2
),
11526 fold_convert_loc (loc
, type
, t1
));
11530 /* X ^ (X | Y) -> Y & ~ X*/
11531 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11532 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11534 tree t2
= TREE_OPERAND (arg1
, 1);
11535 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11537 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11538 fold_convert_loc (loc
, type
, t2
),
11539 fold_convert_loc (loc
, type
, t1
));
11543 /* X ^ (Y | X) -> Y & ~ X*/
11544 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11545 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11547 tree t2
= TREE_OPERAND (arg1
, 0);
11548 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11550 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11551 fold_convert_loc (loc
, type
, t2
),
11552 fold_convert_loc (loc
, type
, t1
));
11556 /* Convert ~X ^ ~Y to X ^ Y. */
11557 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11558 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11559 return fold_build2_loc (loc
, code
, type
,
11560 fold_convert_loc (loc
, type
,
11561 TREE_OPERAND (arg0
, 0)),
11562 fold_convert_loc (loc
, type
,
11563 TREE_OPERAND (arg1
, 0)));
11565 /* Convert ~X ^ C to X ^ ~C. */
11566 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11567 && TREE_CODE (arg1
) == INTEGER_CST
)
11568 return fold_build2_loc (loc
, code
, type
,
11569 fold_convert_loc (loc
, type
,
11570 TREE_OPERAND (arg0
, 0)),
11571 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11573 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11574 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11575 && integer_onep (TREE_OPERAND (arg0
, 1))
11576 && integer_onep (arg1
))
11577 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11578 build_zero_cst (TREE_TYPE (arg0
)));
11580 /* Fold (X & Y) ^ Y as ~X & Y. */
11581 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11582 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11584 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11585 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11586 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11587 fold_convert_loc (loc
, type
, arg1
));
11589 /* Fold (X & Y) ^ X as ~Y & X. */
11590 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11591 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11592 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11594 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11595 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11596 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11597 fold_convert_loc (loc
, type
, arg1
));
11599 /* Fold X ^ (X & Y) as X & ~Y. */
11600 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11601 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11603 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11604 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11605 fold_convert_loc (loc
, type
, arg0
),
11606 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11608 /* Fold X ^ (Y & X) as ~Y & X. */
11609 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11610 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11611 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11613 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11614 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11615 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11616 fold_convert_loc (loc
, type
, arg0
));
11619 /* See if this can be simplified into a rotate first. If that
11620 is unsuccessful continue in the association code. */
11624 if (integer_all_onesp (arg1
))
11625 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11626 if (integer_zerop (arg1
))
11627 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11628 if (operand_equal_p (arg0
, arg1
, 0))
11629 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11631 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11632 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11633 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11634 || (TREE_CODE (arg0
) == EQ_EXPR
11635 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11636 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11637 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11639 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11640 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11641 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11642 || (TREE_CODE (arg1
) == EQ_EXPR
11643 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11644 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11645 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11647 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11648 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11649 && TREE_CODE (arg1
) == INTEGER_CST
11650 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11652 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11653 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11654 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11655 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11656 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11658 fold_convert_loc (loc
, type
,
11659 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11660 type
, tmp2
, tmp3
));
11663 /* (X | Y) & Y is (X, Y). */
11664 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11665 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11666 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11667 /* (X | Y) & X is (Y, X). */
11668 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11669 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11670 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11671 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11672 /* X & (X | Y) is (Y, X). */
11673 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11674 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11675 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11676 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11677 /* X & (Y | X) is (Y, X). */
11678 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11679 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11680 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11681 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11683 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11684 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11685 && integer_onep (TREE_OPERAND (arg0
, 1))
11686 && integer_onep (arg1
))
11689 tem
= TREE_OPERAND (arg0
, 0);
11690 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11691 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11693 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11694 build_zero_cst (TREE_TYPE (tem
)));
11696 /* Fold ~X & 1 as (X & 1) == 0. */
11697 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11698 && integer_onep (arg1
))
11701 tem
= TREE_OPERAND (arg0
, 0);
11702 tem2
= fold_convert_loc (loc
, TREE_TYPE (tem
), arg1
);
11703 tem2
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
),
11705 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem2
,
11706 build_zero_cst (TREE_TYPE (tem
)));
11708 /* Fold !X & 1 as X == 0. */
11709 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11710 && integer_onep (arg1
))
11712 tem
= TREE_OPERAND (arg0
, 0);
11713 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11714 build_zero_cst (TREE_TYPE (tem
)));
11717 /* Fold (X ^ Y) & Y as ~X & Y. */
11718 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11719 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11721 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11722 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11723 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11724 fold_convert_loc (loc
, type
, arg1
));
11726 /* Fold (X ^ Y) & X as ~Y & X. */
11727 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11728 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11729 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11731 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11732 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11733 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11734 fold_convert_loc (loc
, type
, arg1
));
11736 /* Fold X & (X ^ Y) as X & ~Y. */
11737 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11738 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11740 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11741 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11742 fold_convert_loc (loc
, type
, arg0
),
11743 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11745 /* Fold X & (Y ^ X) as ~Y & X. */
11746 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11747 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11748 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11750 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11751 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11752 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11753 fold_convert_loc (loc
, type
, arg0
));
11756 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11757 multiple of 1 << CST. */
11758 if (TREE_CODE (arg1
) == INTEGER_CST
)
11760 wide_int cst1
= arg1
;
11761 wide_int ncst1
= -cst1
;
11762 if ((cst1
& ncst1
) == ncst1
11763 && multiple_of_p (type
, arg0
,
11764 wide_int_to_tree (TREE_TYPE (arg1
), ncst1
)))
11765 return fold_convert_loc (loc
, type
, arg0
);
11768 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11770 if (TREE_CODE (arg1
) == INTEGER_CST
11771 && TREE_CODE (arg0
) == MULT_EXPR
11772 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11774 wide_int warg1
= arg1
;
11775 wide_int masked
= mask_with_tz (type
, warg1
, TREE_OPERAND (arg0
, 1));
11778 return omit_two_operands_loc (loc
, type
, build_zero_cst (type
),
11780 else if (masked
!= warg1
)
11782 /* Avoid the transform if arg1 is a mask of some
11783 mode which allows further optimizations. */
11784 int pop
= wi::popcount (warg1
);
11785 if (!(pop
>= BITS_PER_UNIT
11786 && exact_log2 (pop
) != -1
11787 && wi::mask (pop
, false, warg1
.get_precision ()) == warg1
))
11788 return fold_build2_loc (loc
, code
, type
, op0
,
11789 wide_int_to_tree (type
, masked
));
11793 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11794 ((A & N) + B) & M -> (A + B) & M
11795 Similarly if (N & M) == 0,
11796 ((A | N) + B) & M -> (A + B) & M
11797 and for - instead of + (or unary - instead of +)
11798 and/or ^ instead of |.
11799 If B is constant and (B & M) == 0, fold into A & M. */
11800 if (TREE_CODE (arg1
) == INTEGER_CST
)
11802 wide_int cst1
= arg1
;
11803 if ((~cst1
!= 0) && (cst1
& (cst1
+ 1)) == 0
11804 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11805 && (TREE_CODE (arg0
) == PLUS_EXPR
11806 || TREE_CODE (arg0
) == MINUS_EXPR
11807 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11808 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11809 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11815 /* Now we know that arg0 is (C + D) or (C - D) or
11816 -C and arg1 (M) is == (1LL << cst) - 1.
11817 Store C into PMOP[0] and D into PMOP[1]. */
11818 pmop
[0] = TREE_OPERAND (arg0
, 0);
11820 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11822 pmop
[1] = TREE_OPERAND (arg0
, 1);
11826 if ((wi::max_value (TREE_TYPE (arg0
)) & cst1
) != cst1
)
11829 for (; which
>= 0; which
--)
11830 switch (TREE_CODE (pmop
[which
]))
11835 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11838 cst0
= TREE_OPERAND (pmop
[which
], 1);
11840 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11845 else if (cst0
!= 0)
11847 /* If C or D is of the form (A & N) where
11848 (N & M) == M, or of the form (A | N) or
11849 (A ^ N) where (N & M) == 0, replace it with A. */
11850 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11853 /* If C or D is a N where (N & M) == 0, it can be
11854 omitted (assumed 0). */
11855 if ((TREE_CODE (arg0
) == PLUS_EXPR
11856 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11857 && (cst1
& pmop
[which
]) == 0)
11858 pmop
[which
] = NULL
;
11864 /* Only build anything new if we optimized one or both arguments
11866 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11867 || (TREE_CODE (arg0
) != NEGATE_EXPR
11868 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11870 tree utype
= TREE_TYPE (arg0
);
11871 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11873 /* Perform the operations in a type that has defined
11874 overflow behavior. */
11875 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11876 if (pmop
[0] != NULL
)
11877 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11878 if (pmop
[1] != NULL
)
11879 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11882 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11883 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11884 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11886 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11887 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11889 else if (pmop
[0] != NULL
)
11891 else if (pmop
[1] != NULL
)
11894 return build_int_cst (type
, 0);
11896 else if (pmop
[0] == NULL
)
11897 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11899 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11901 /* TEM is now the new binary +, - or unary - replacement. */
11902 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11903 fold_convert_loc (loc
, utype
, arg1
));
11904 return fold_convert_loc (loc
, type
, tem
);
11909 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11910 if (t1
!= NULL_TREE
)
11912 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11913 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11914 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11916 prec
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11918 wide_int mask
= wide_int::from (arg1
, prec
, UNSIGNED
);
11921 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11924 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11926 This results in more efficient code for machines without a NOR
11927 instruction. Combine will canonicalize to the first form
11928 which will allow use of NOR instructions provided by the
11929 backend if they exist. */
11930 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11931 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11933 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11934 build2 (BIT_IOR_EXPR
, type
,
11935 fold_convert_loc (loc
, type
,
11936 TREE_OPERAND (arg0
, 0)),
11937 fold_convert_loc (loc
, type
,
11938 TREE_OPERAND (arg1
, 0))));
11941 /* If arg0 is derived from the address of an object or function, we may
11942 be able to fold this expression using the object or function's
11944 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && tree_fits_uhwi_p (arg1
))
11946 unsigned HOST_WIDE_INT modulus
, residue
;
11947 unsigned HOST_WIDE_INT low
= tree_to_uhwi (arg1
);
11949 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11950 integer_onep (arg1
));
11952 /* This works because modulus is a power of 2. If this weren't the
11953 case, we'd have to replace it by its greatest power-of-2
11954 divisor: modulus & -modulus. */
11956 return build_int_cst (type
, residue
& low
);
11959 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11960 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11961 if the new mask might be further optimized. */
11962 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11963 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11964 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11965 && TREE_CODE (arg1
) == INTEGER_CST
11966 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
11967 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) > 0
11968 && (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
11969 < TYPE_PRECISION (TREE_TYPE (arg0
))))
11971 unsigned int shiftc
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
11972 unsigned HOST_WIDE_INT mask
= TREE_INT_CST_LOW (arg1
);
11973 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11974 tree shift_type
= TREE_TYPE (arg0
);
11976 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11977 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11978 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11979 && TYPE_PRECISION (TREE_TYPE (arg0
))
11980 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0
))))
11982 prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11983 tree arg00
= TREE_OPERAND (arg0
, 0);
11984 /* See if more bits can be proven as zero because of
11986 if (TREE_CODE (arg00
) == NOP_EXPR
11987 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11989 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11990 if (TYPE_PRECISION (inner_type
)
11991 == GET_MODE_PRECISION (TYPE_MODE (inner_type
))
11992 && TYPE_PRECISION (inner_type
) < prec
)
11994 prec
= TYPE_PRECISION (inner_type
);
11995 /* See if we can shorten the right shift. */
11997 shift_type
= inner_type
;
11998 /* Otherwise X >> C1 is all zeros, so we'll optimize
11999 it into (X, 0) later on by making sure zerobits
12003 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
12006 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
12007 zerobits
<<= prec
- shiftc
;
12009 /* For arithmetic shift if sign bit could be set, zerobits
12010 can contain actually sign bits, so no transformation is
12011 possible, unless MASK masks them all away. In that
12012 case the shift needs to be converted into logical shift. */
12013 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
12014 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
12016 if ((mask
& zerobits
) == 0)
12017 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
12023 /* ((X << 16) & 0xff00) is (X, 0). */
12024 if ((mask
& zerobits
) == mask
)
12025 return omit_one_operand_loc (loc
, type
,
12026 build_int_cst (type
, 0), arg0
);
12028 newmask
= mask
| zerobits
;
12029 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
12031 /* Only do the transformation if NEWMASK is some integer
12033 for (prec
= BITS_PER_UNIT
;
12034 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
12035 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
12037 if (prec
< HOST_BITS_PER_WIDE_INT
12038 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
12042 if (shift_type
!= TREE_TYPE (arg0
))
12044 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
12045 fold_convert_loc (loc
, shift_type
,
12046 TREE_OPERAND (arg0
, 0)),
12047 TREE_OPERAND (arg0
, 1));
12048 tem
= fold_convert_loc (loc
, type
, tem
);
12052 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
12053 if (!tree_int_cst_equal (newmaskt
, arg1
))
12054 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
12062 /* Don't touch a floating-point divide by zero unless the mode
12063 of the constant can represent infinity. */
12064 if (TREE_CODE (arg1
) == REAL_CST
12065 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
12066 && real_zerop (arg1
))
12069 /* Optimize A / A to 1.0 if we don't care about
12070 NaNs or Infinities. Skip the transformation
12071 for non-real operands. */
12072 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
12073 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
12074 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
12075 && operand_equal_p (arg0
, arg1
, 0))
12077 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
12079 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
12082 /* The complex version of the above A / A optimization. */
12083 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
12084 && operand_equal_p (arg0
, arg1
, 0))
12086 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
12087 if (! HONOR_NANS (TYPE_MODE (elem_type
))
12088 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
12090 tree r
= build_real (elem_type
, dconst1
);
12091 /* omit_two_operands will call fold_convert for us. */
12092 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
12096 /* (-A) / (-B) -> A / B */
12097 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
12098 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12099 TREE_OPERAND (arg0
, 0),
12100 negate_expr (arg1
));
12101 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
12102 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12103 negate_expr (arg0
),
12104 TREE_OPERAND (arg1
, 0));
12106 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12107 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
12108 && real_onep (arg1
))
12109 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12111 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12112 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
12113 && real_minus_onep (arg1
))
12114 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
12115 negate_expr (arg0
)));
12117 /* If ARG1 is a constant, we can convert this to a multiply by the
12118 reciprocal. This does not have the same rounding properties,
12119 so only do this if -freciprocal-math. We can actually
12120 always safely do it if ARG1 is a power of two, but it's hard to
12121 tell if it is or not in a portable manner. */
12123 && (TREE_CODE (arg1
) == REAL_CST
12124 || (TREE_CODE (arg1
) == COMPLEX_CST
12125 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1
)))
12126 || (TREE_CODE (arg1
) == VECTOR_CST
12127 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1
)))))
12129 if (flag_reciprocal_math
12130 && 0 != (tem
= const_binop (code
, build_one_cst (type
), arg1
)))
12131 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
12132 /* Find the reciprocal if optimizing and the result is exact.
12133 TODO: Complex reciprocal not implemented. */
12134 if (TREE_CODE (arg1
) != COMPLEX_CST
)
12136 tree inverse
= exact_inverse (TREE_TYPE (arg0
), arg1
);
12139 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, inverse
);
12142 /* Convert A/B/C to A/(B*C). */
12143 if (flag_reciprocal_math
12144 && TREE_CODE (arg0
) == RDIV_EXPR
)
12145 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
12146 fold_build2_loc (loc
, MULT_EXPR
, type
,
12147 TREE_OPERAND (arg0
, 1), arg1
));
12149 /* Convert A/(B/C) to (A/B)*C. */
12150 if (flag_reciprocal_math
12151 && TREE_CODE (arg1
) == RDIV_EXPR
)
12152 return fold_build2_loc (loc
, MULT_EXPR
, type
,
12153 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
12154 TREE_OPERAND (arg1
, 0)),
12155 TREE_OPERAND (arg1
, 1));
12157 /* Convert C1/(X*C2) into (C1/C2)/X. */
12158 if (flag_reciprocal_math
12159 && TREE_CODE (arg1
) == MULT_EXPR
12160 && TREE_CODE (arg0
) == REAL_CST
12161 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
12163 tree tem
= const_binop (RDIV_EXPR
, arg0
,
12164 TREE_OPERAND (arg1
, 1));
12166 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
12167 TREE_OPERAND (arg1
, 0));
12170 if (flag_unsafe_math_optimizations
)
12172 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
12173 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
12175 /* Optimize sin(x)/cos(x) as tan(x). */
12176 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
12177 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
12178 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
12179 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12180 CALL_EXPR_ARG (arg1
, 0), 0))
12182 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12184 if (tanfn
!= NULL_TREE
)
12185 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
12188 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12189 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
12190 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
12191 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
12192 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
12193 CALL_EXPR_ARG (arg1
, 0), 0))
12195 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
12197 if (tanfn
!= NULL_TREE
)
12199 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
12200 CALL_EXPR_ARG (arg0
, 0));
12201 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12202 build_real (type
, dconst1
), tmp
);
12206 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12207 NaNs or Infinities. */
12208 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
12209 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
12210 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
12212 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12213 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12215 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12216 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12217 && operand_equal_p (arg00
, arg01
, 0))
12219 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12221 if (cosfn
!= NULL_TREE
)
12222 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12226 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12227 NaNs or Infinities. */
12228 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
12229 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
12230 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
12232 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12233 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
12235 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
12236 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
12237 && operand_equal_p (arg00
, arg01
, 0))
12239 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
12241 if (cosfn
!= NULL_TREE
)
12243 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
12244 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
12245 build_real (type
, dconst1
),
12251 /* Optimize pow(x,c)/x as pow(x,c-1). */
12252 if (fcode0
== BUILT_IN_POW
12253 || fcode0
== BUILT_IN_POWF
12254 || fcode0
== BUILT_IN_POWL
)
12256 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
12257 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
12258 if (TREE_CODE (arg01
) == REAL_CST
12259 && !TREE_OVERFLOW (arg01
)
12260 && operand_equal_p (arg1
, arg00
, 0))
12262 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
12266 c
= TREE_REAL_CST (arg01
);
12267 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
12268 arg
= build_real (type
, c
);
12269 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
12273 /* Optimize a/root(b/c) into a*root(c/b). */
12274 if (BUILTIN_ROOT_P (fcode1
))
12276 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
12278 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
12280 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12281 tree b
= TREE_OPERAND (rootarg
, 0);
12282 tree c
= TREE_OPERAND (rootarg
, 1);
12284 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
12286 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
12287 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
12291 /* Optimize x/expN(y) into x*expN(-y). */
12292 if (BUILTIN_EXPONENT_P (fcode1
))
12294 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12295 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
12296 arg1
= build_call_expr_loc (loc
,
12298 fold_convert_loc (loc
, type
, arg
));
12299 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12302 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12303 if (fcode1
== BUILT_IN_POW
12304 || fcode1
== BUILT_IN_POWF
12305 || fcode1
== BUILT_IN_POWL
)
12307 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
12308 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
12309 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
12310 tree neg11
= fold_convert_loc (loc
, type
,
12311 negate_expr (arg11
));
12312 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
12313 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
12318 case TRUNC_DIV_EXPR
:
12319 /* Optimize (X & (-A)) / A where A is a power of 2,
12321 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12322 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
12323 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
12325 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
12326 arg1
, TREE_OPERAND (arg0
, 1));
12327 if (sum
&& integer_zerop (sum
)) {
12328 tree pow2
= build_int_cst (integer_type_node
,
12329 wi::exact_log2 (arg1
));
12330 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12331 TREE_OPERAND (arg0
, 0), pow2
);
12337 case FLOOR_DIV_EXPR
:
12338 /* Simplify A / (B << N) where A and B are positive and B is
12339 a power of 2, to A >> (N + log2(B)). */
12340 strict_overflow_p
= false;
12341 if (TREE_CODE (arg1
) == LSHIFT_EXPR
12342 && (TYPE_UNSIGNED (type
)
12343 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12345 tree sval
= TREE_OPERAND (arg1
, 0);
12346 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
12348 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
12349 tree pow2
= build_int_cst (TREE_TYPE (sh_cnt
),
12350 wi::exact_log2 (sval
));
12352 if (strict_overflow_p
)
12353 fold_overflow_warning (("assuming signed overflow does not "
12354 "occur when simplifying A / (B << N)"),
12355 WARN_STRICT_OVERFLOW_MISC
);
12357 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
12359 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
12360 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
12364 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12365 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12366 if (INTEGRAL_TYPE_P (type
)
12367 && TYPE_UNSIGNED (type
)
12368 && code
== FLOOR_DIV_EXPR
)
12369 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
12373 case ROUND_DIV_EXPR
:
12374 case CEIL_DIV_EXPR
:
12375 case EXACT_DIV_EXPR
:
12376 if (integer_onep (arg1
))
12377 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12378 if (integer_zerop (arg1
))
12380 /* X / -1 is -X. */
12381 if (!TYPE_UNSIGNED (type
)
12382 && TREE_CODE (arg1
) == INTEGER_CST
12383 && wi::eq_p (arg1
, -1))
12384 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
12386 /* Convert -A / -B to A / B when the type is signed and overflow is
12388 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12389 && TREE_CODE (arg0
) == NEGATE_EXPR
12390 && negate_expr_p (arg1
))
12392 if (INTEGRAL_TYPE_P (type
))
12393 fold_overflow_warning (("assuming signed overflow does not occur "
12394 "when distributing negation across "
12396 WARN_STRICT_OVERFLOW_MISC
);
12397 return fold_build2_loc (loc
, code
, type
,
12398 fold_convert_loc (loc
, type
,
12399 TREE_OPERAND (arg0
, 0)),
12400 fold_convert_loc (loc
, type
,
12401 negate_expr (arg1
)));
12403 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
12404 && TREE_CODE (arg1
) == NEGATE_EXPR
12405 && negate_expr_p (arg0
))
12407 if (INTEGRAL_TYPE_P (type
))
12408 fold_overflow_warning (("assuming signed overflow does not occur "
12409 "when distributing negation across "
12411 WARN_STRICT_OVERFLOW_MISC
);
12412 return fold_build2_loc (loc
, code
, type
,
12413 fold_convert_loc (loc
, type
,
12414 negate_expr (arg0
)),
12415 fold_convert_loc (loc
, type
,
12416 TREE_OPERAND (arg1
, 0)));
12419 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12420 operation, EXACT_DIV_EXPR.
12422 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12423 At one time others generated faster code, it's not clear if they do
12424 after the last round to changes to the DIV code in expmed.c. */
12425 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
12426 && multiple_of_p (type
, arg0
, arg1
))
12427 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
12429 strict_overflow_p
= false;
12430 if (TREE_CODE (arg1
) == INTEGER_CST
12431 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12432 &strict_overflow_p
)))
12434 if (strict_overflow_p
)
12435 fold_overflow_warning (("assuming signed overflow does not occur "
12436 "when simplifying division"),
12437 WARN_STRICT_OVERFLOW_MISC
);
12438 return fold_convert_loc (loc
, type
, tem
);
12443 case CEIL_MOD_EXPR
:
12444 case FLOOR_MOD_EXPR
:
12445 case ROUND_MOD_EXPR
:
12446 case TRUNC_MOD_EXPR
:
12447 /* X % 1 is always zero, but be sure to preserve any side
12449 if (integer_onep (arg1
))
12450 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12452 /* X % 0, return X % 0 unchanged so that we can get the
12453 proper warnings and errors. */
12454 if (integer_zerop (arg1
))
12457 /* 0 % X is always zero, but be sure to preserve any side
12458 effects in X. Place this after checking for X == 0. */
12459 if (integer_zerop (arg0
))
12460 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12462 /* X % -1 is zero. */
12463 if (!TYPE_UNSIGNED (type
)
12464 && TREE_CODE (arg1
) == INTEGER_CST
12465 && wi::eq_p (arg1
, -1))
12466 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12468 /* X % -C is the same as X % C. */
12469 if (code
== TRUNC_MOD_EXPR
12470 && TYPE_SIGN (type
) == SIGNED
12471 && TREE_CODE (arg1
) == INTEGER_CST
12472 && !TREE_OVERFLOW (arg1
)
12473 && wi::neg_p (arg1
)
12474 && !TYPE_OVERFLOW_TRAPS (type
)
12475 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12476 && !sign_bit_p (arg1
, arg1
))
12477 return fold_build2_loc (loc
, code
, type
,
12478 fold_convert_loc (loc
, type
, arg0
),
12479 fold_convert_loc (loc
, type
,
12480 negate_expr (arg1
)));
12482 /* X % -Y is the same as X % Y. */
12483 if (code
== TRUNC_MOD_EXPR
12484 && !TYPE_UNSIGNED (type
)
12485 && TREE_CODE (arg1
) == NEGATE_EXPR
12486 && !TYPE_OVERFLOW_TRAPS (type
))
12487 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12488 fold_convert_loc (loc
, type
,
12489 TREE_OPERAND (arg1
, 0)));
12491 strict_overflow_p
= false;
12492 if (TREE_CODE (arg1
) == INTEGER_CST
12493 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12494 &strict_overflow_p
)))
12496 if (strict_overflow_p
)
12497 fold_overflow_warning (("assuming signed overflow does not occur "
12498 "when simplifying modulus"),
12499 WARN_STRICT_OVERFLOW_MISC
);
12500 return fold_convert_loc (loc
, type
, tem
);
12503 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12504 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12505 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12506 && (TYPE_UNSIGNED (type
)
12507 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12510 /* Also optimize A % (C << N) where C is a power of 2,
12511 to A & ((C << N) - 1). */
12512 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12513 c
= TREE_OPERAND (arg1
, 0);
12515 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12518 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12519 build_int_cst (TREE_TYPE (arg1
), 1));
12520 if (strict_overflow_p
)
12521 fold_overflow_warning (("assuming signed overflow does not "
12522 "occur when simplifying "
12523 "X % (power of two)"),
12524 WARN_STRICT_OVERFLOW_MISC
);
12525 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12526 fold_convert_loc (loc
, type
, arg0
),
12527 fold_convert_loc (loc
, type
, mask
));
12535 if (integer_all_onesp (arg0
))
12536 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12540 /* Optimize -1 >> x for arithmetic right shifts. */
12541 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12542 && tree_expr_nonnegative_p (arg1
))
12543 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12544 /* ... fall through ... */
12548 if (integer_zerop (arg1
))
12549 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12550 if (integer_zerop (arg0
))
12551 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12553 /* Prefer vector1 << scalar to vector1 << vector2
12554 if vector2 is uniform. */
12555 if (VECTOR_TYPE_P (TREE_TYPE (arg1
))
12556 && (tem
= uniform_vector_p (arg1
)) != NULL_TREE
)
12557 return fold_build2_loc (loc
, code
, type
, op0
, tem
);
12559 /* Since negative shift count is not well-defined,
12560 don't try to compute it in the compiler. */
12561 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12564 prec
= element_precision (type
);
12566 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12567 if (TREE_CODE (op0
) == code
&& tree_fits_uhwi_p (arg1
)
12568 && tree_to_uhwi (arg1
) < prec
12569 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12570 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
12572 unsigned int low
= (tree_to_uhwi (TREE_OPERAND (arg0
, 1))
12573 + tree_to_uhwi (arg1
));
12575 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12576 being well defined. */
12579 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12581 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12582 return omit_one_operand_loc (loc
, type
, build_zero_cst (type
),
12583 TREE_OPERAND (arg0
, 0));
12588 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12589 build_int_cst (TREE_TYPE (arg1
), low
));
12592 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12593 into x & ((unsigned)-1 >> c) for unsigned types. */
12594 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12595 || (TYPE_UNSIGNED (type
)
12596 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12597 && tree_fits_uhwi_p (arg1
)
12598 && tree_to_uhwi (arg1
) < prec
12599 && tree_fits_uhwi_p (TREE_OPERAND (arg0
, 1))
12600 && tree_to_uhwi (TREE_OPERAND (arg0
, 1)) < prec
)
12602 HOST_WIDE_INT low0
= tree_to_uhwi (TREE_OPERAND (arg0
, 1));
12603 HOST_WIDE_INT low1
= tree_to_uhwi (arg1
);
12609 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12611 lshift
= build_minus_one_cst (type
);
12612 lshift
= const_binop (code
, lshift
, arg1
);
12614 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12618 /* Rewrite an LROTATE_EXPR by a constant into an
12619 RROTATE_EXPR by a new constant. */
12620 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12622 tree tem
= build_int_cst (TREE_TYPE (arg1
), prec
);
12623 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12624 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12627 /* If we have a rotate of a bit operation with the rotate count and
12628 the second operand of the bit operation both constant,
12629 permute the two operations. */
12630 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12631 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12632 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12633 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12634 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12635 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12636 fold_build2_loc (loc
, code
, type
,
12637 TREE_OPERAND (arg0
, 0), arg1
),
12638 fold_build2_loc (loc
, code
, type
,
12639 TREE_OPERAND (arg0
, 1), arg1
));
12641 /* Two consecutive rotates adding up to the some integer
12642 multiple of the precision of the type can be ignored. */
12643 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12644 && TREE_CODE (arg0
) == RROTATE_EXPR
12645 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12646 && wi::umod_trunc (wi::add (arg1
, TREE_OPERAND (arg0
, 1)),
12648 return TREE_OPERAND (arg0
, 0);
12650 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12651 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12652 if the latter can be further optimized. */
12653 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12654 && TREE_CODE (arg0
) == BIT_AND_EXPR
12655 && TREE_CODE (arg1
) == INTEGER_CST
12656 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12658 tree mask
= fold_build2_loc (loc
, code
, type
,
12659 fold_convert_loc (loc
, type
,
12660 TREE_OPERAND (arg0
, 1)),
12662 tree shift
= fold_build2_loc (loc
, code
, type
,
12663 fold_convert_loc (loc
, type
,
12664 TREE_OPERAND (arg0
, 0)),
12666 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12674 if (operand_equal_p (arg0
, arg1
, 0))
12675 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12676 if (INTEGRAL_TYPE_P (type
)
12677 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12678 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12679 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12685 if (operand_equal_p (arg0
, arg1
, 0))
12686 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12687 if (INTEGRAL_TYPE_P (type
)
12688 && TYPE_MAX_VALUE (type
)
12689 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12690 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12691 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12696 case TRUTH_ANDIF_EXPR
:
12697 /* Note that the operands of this must be ints
12698 and their values must be 0 or 1.
12699 ("true" is a fixed value perhaps depending on the language.) */
12700 /* If first arg is constant zero, return it. */
12701 if (integer_zerop (arg0
))
12702 return fold_convert_loc (loc
, type
, arg0
);
12703 case TRUTH_AND_EXPR
:
12704 /* If either arg is constant true, drop it. */
12705 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12706 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12707 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12708 /* Preserve sequence points. */
12709 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12710 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12711 /* If second arg is constant zero, result is zero, but first arg
12712 must be evaluated. */
12713 if (integer_zerop (arg1
))
12714 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12715 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12716 case will be handled here. */
12717 if (integer_zerop (arg0
))
12718 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12720 /* !X && X is always false. */
12721 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12722 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12723 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12724 /* X && !X is always false. */
12725 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12726 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12727 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12729 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12730 means A >= Y && A != MAX, but in this case we know that
12733 if (!TREE_SIDE_EFFECTS (arg0
)
12734 && !TREE_SIDE_EFFECTS (arg1
))
12736 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12737 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12738 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12740 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12741 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12742 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12745 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12751 case TRUTH_ORIF_EXPR
:
12752 /* Note that the operands of this must be ints
12753 and their values must be 0 or true.
12754 ("true" is a fixed value perhaps depending on the language.) */
12755 /* If first arg is constant true, return it. */
12756 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12757 return fold_convert_loc (loc
, type
, arg0
);
12758 case TRUTH_OR_EXPR
:
12759 /* If either arg is constant zero, drop it. */
12760 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12761 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12762 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12763 /* Preserve sequence points. */
12764 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12765 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12766 /* If second arg is constant true, result is true, but we must
12767 evaluate first arg. */
12768 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12769 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12770 /* Likewise for first arg, but note this only occurs here for
12772 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12773 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12775 /* !X || X is always true. */
12776 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12777 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12778 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12779 /* X || !X is always true. */
12780 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12781 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12782 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12784 /* (X && !Y) || (!X && Y) is X ^ Y */
12785 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12786 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12788 tree a0
, a1
, l0
, l1
, n0
, n1
;
12790 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12791 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12793 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12794 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12796 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12797 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12799 if ((operand_equal_p (n0
, a0
, 0)
12800 && operand_equal_p (n1
, a1
, 0))
12801 || (operand_equal_p (n0
, a1
, 0)
12802 && operand_equal_p (n1
, a0
, 0)))
12803 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12806 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12812 case TRUTH_XOR_EXPR
:
12813 /* If the second arg is constant zero, drop it. */
12814 if (integer_zerop (arg1
))
12815 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12816 /* If the second arg is constant true, this is a logical inversion. */
12817 if (integer_onep (arg1
))
12819 tem
= invert_truthvalue_loc (loc
, arg0
);
12820 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12822 /* Identical arguments cancel to zero. */
12823 if (operand_equal_p (arg0
, arg1
, 0))
12824 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12826 /* !X ^ X is always true. */
12827 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12828 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12829 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12831 /* X ^ !X is always true. */
12832 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12833 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12834 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12843 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12844 if (tem
!= NULL_TREE
)
12847 /* bool_var != 0 becomes bool_var. */
12848 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12849 && code
== NE_EXPR
)
12850 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12852 /* bool_var == 1 becomes bool_var. */
12853 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12854 && code
== EQ_EXPR
)
12855 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12857 /* bool_var != 1 becomes !bool_var. */
12858 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12859 && code
== NE_EXPR
)
12860 return fold_convert_loc (loc
, type
,
12861 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12862 TREE_TYPE (arg0
), arg0
));
12864 /* bool_var == 0 becomes !bool_var. */
12865 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12866 && code
== EQ_EXPR
)
12867 return fold_convert_loc (loc
, type
,
12868 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12869 TREE_TYPE (arg0
), arg0
));
12871 /* !exp != 0 becomes !exp */
12872 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12873 && code
== NE_EXPR
)
12874 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12876 /* If this is an equality comparison of the address of two non-weak,
12877 unaliased symbols neither of which are extern (since we do not
12878 have access to attributes for externs), then we know the result. */
12879 if (TREE_CODE (arg0
) == ADDR_EXPR
12880 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12881 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12882 && ! lookup_attribute ("alias",
12883 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12884 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12885 && TREE_CODE (arg1
) == ADDR_EXPR
12886 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12887 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12888 && ! lookup_attribute ("alias",
12889 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12890 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12892 /* We know that we're looking at the address of two
12893 non-weak, unaliased, static _DECL nodes.
12895 It is both wasteful and incorrect to call operand_equal_p
12896 to compare the two ADDR_EXPR nodes. It is wasteful in that
12897 all we need to do is test pointer equality for the arguments
12898 to the two ADDR_EXPR nodes. It is incorrect to use
12899 operand_equal_p as that function is NOT equivalent to a
12900 C equality test. It can in fact return false for two
12901 objects which would test as equal using the C equality
12903 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12904 return constant_boolean_node (equal
12905 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12909 /* Similarly for a NEGATE_EXPR. */
12910 if (TREE_CODE (arg0
) == NEGATE_EXPR
12911 && TREE_CODE (arg1
) == INTEGER_CST
12912 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12914 && TREE_CODE (tem
) == INTEGER_CST
12915 && !TREE_OVERFLOW (tem
))
12916 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12918 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12919 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12920 && TREE_CODE (arg1
) == INTEGER_CST
12921 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12922 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12923 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12924 fold_convert_loc (loc
,
12927 TREE_OPERAND (arg0
, 1)));
12929 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12930 if ((TREE_CODE (arg0
) == PLUS_EXPR
12931 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12932 || TREE_CODE (arg0
) == MINUS_EXPR
)
12933 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12936 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12937 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12939 tree val
= TREE_OPERAND (arg0
, 1);
12940 return omit_two_operands_loc (loc
, type
,
12941 fold_build2_loc (loc
, code
, type
,
12943 build_int_cst (TREE_TYPE (val
),
12945 TREE_OPERAND (arg0
, 0), arg1
);
12948 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12949 if (TREE_CODE (arg0
) == MINUS_EXPR
12950 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12951 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12954 && wi::extract_uhwi (TREE_OPERAND (arg0
, 0), 0, 1) == 1)
12956 return omit_two_operands_loc (loc
, type
,
12958 ? boolean_true_node
: boolean_false_node
,
12959 TREE_OPERAND (arg0
, 1), arg1
);
12962 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12963 if (TREE_CODE (arg0
) == ABS_EXPR
12964 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12965 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12967 /* If this is an EQ or NE comparison with zero and ARG0 is
12968 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12969 two operations, but the latter can be done in one less insn
12970 on machines that have only two-operand insns or on which a
12971 constant cannot be the first operand. */
12972 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12973 && integer_zerop (arg1
))
12975 tree arg00
= TREE_OPERAND (arg0
, 0);
12976 tree arg01
= TREE_OPERAND (arg0
, 1);
12977 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12978 && integer_onep (TREE_OPERAND (arg00
, 0)))
12980 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12981 arg01
, TREE_OPERAND (arg00
, 1));
12982 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12983 build_int_cst (TREE_TYPE (arg0
), 1));
12984 return fold_build2_loc (loc
, code
, type
,
12985 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12988 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12989 && integer_onep (TREE_OPERAND (arg01
, 0)))
12991 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12992 arg00
, TREE_OPERAND (arg01
, 1));
12993 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12994 build_int_cst (TREE_TYPE (arg0
), 1));
12995 return fold_build2_loc (loc
, code
, type
,
12996 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
13001 /* If this is an NE or EQ comparison of zero against the result of a
13002 signed MOD operation whose second operand is a power of 2, make
13003 the MOD operation unsigned since it is simpler and equivalent. */
13004 if (integer_zerop (arg1
)
13005 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
13006 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
13007 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
13008 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
13009 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
13010 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13012 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
13013 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
13014 fold_convert_loc (loc
, newtype
,
13015 TREE_OPERAND (arg0
, 0)),
13016 fold_convert_loc (loc
, newtype
,
13017 TREE_OPERAND (arg0
, 1)));
13019 return fold_build2_loc (loc
, code
, type
, newmod
,
13020 fold_convert_loc (loc
, newtype
, arg1
));
13023 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13024 C1 is a valid shift constant, and C2 is a power of two, i.e.
13026 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13027 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
13028 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
13030 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13031 && integer_zerop (arg1
))
13033 tree itype
= TREE_TYPE (arg0
);
13034 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
13035 prec
= TYPE_PRECISION (itype
);
13037 /* Check for a valid shift count. */
13038 if (wi::ltu_p (arg001
, prec
))
13040 tree arg01
= TREE_OPERAND (arg0
, 1);
13041 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13042 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
13043 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13044 can be rewritten as (X & (C2 << C1)) != 0. */
13045 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
13047 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
13048 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
13049 return fold_build2_loc (loc
, code
, type
, tem
,
13050 fold_convert_loc (loc
, itype
, arg1
));
13052 /* Otherwise, for signed (arithmetic) shifts,
13053 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13054 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13055 else if (!TYPE_UNSIGNED (itype
))
13056 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
13057 arg000
, build_int_cst (itype
, 0));
13058 /* Otherwise, of unsigned (logical) shifts,
13059 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13060 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13062 return omit_one_operand_loc (loc
, type
,
13063 code
== EQ_EXPR
? integer_one_node
13064 : integer_zero_node
,
13069 /* If we have (A & C) == C where C is a power of 2, convert this into
13070 (A & C) != 0. Similarly for NE_EXPR. */
13071 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13072 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13073 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
13074 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13075 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
13076 integer_zero_node
));
13078 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13079 bit, then fold the expression into A < 0 or A >= 0. */
13080 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
13084 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13085 Similarly for NE_EXPR. */
13086 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13087 && TREE_CODE (arg1
) == INTEGER_CST
13088 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13090 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
13091 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
13092 TREE_OPERAND (arg0
, 1));
13094 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13095 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
13097 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
13098 if (integer_nonzerop (dandnotc
))
13099 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
13102 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13103 Similarly for NE_EXPR. */
13104 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
13105 && TREE_CODE (arg1
) == INTEGER_CST
13106 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13108 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
13110 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13111 TREE_OPERAND (arg0
, 1),
13112 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
13113 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
13114 if (integer_nonzerop (candnotd
))
13115 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
13118 /* If this is a comparison of a field, we may be able to simplify it. */
13119 if ((TREE_CODE (arg0
) == COMPONENT_REF
13120 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
13121 /* Handle the constant case even without -O
13122 to make sure the warnings are given. */
13123 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
13125 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
13130 /* Optimize comparisons of strlen vs zero to a compare of the
13131 first character of the string vs zero. To wit,
13132 strlen(ptr) == 0 => *ptr == 0
13133 strlen(ptr) != 0 => *ptr != 0
13134 Other cases should reduce to one of these two (or a constant)
13135 due to the return value of strlen being unsigned. */
13136 if (TREE_CODE (arg0
) == CALL_EXPR
13137 && integer_zerop (arg1
))
13139 tree fndecl
= get_callee_fndecl (arg0
);
13142 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
13143 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
13144 && call_expr_nargs (arg0
) == 1
13145 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
13147 tree iref
= build_fold_indirect_ref_loc (loc
,
13148 CALL_EXPR_ARG (arg0
, 0));
13149 return fold_build2_loc (loc
, code
, type
, iref
,
13150 build_int_cst (TREE_TYPE (iref
), 0));
13154 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13155 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13156 if (TREE_CODE (arg0
) == RSHIFT_EXPR
13157 && integer_zerop (arg1
)
13158 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13160 tree arg00
= TREE_OPERAND (arg0
, 0);
13161 tree arg01
= TREE_OPERAND (arg0
, 1);
13162 tree itype
= TREE_TYPE (arg00
);
13163 if (wi::eq_p (arg01
, TYPE_PRECISION (itype
) - 1))
13165 if (TYPE_UNSIGNED (itype
))
13167 itype
= signed_type_for (itype
);
13168 arg00
= fold_convert_loc (loc
, itype
, arg00
);
13170 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
13171 type
, arg00
, build_zero_cst (itype
));
13175 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13176 if (integer_zerop (arg1
)
13177 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
13178 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13179 TREE_OPERAND (arg0
, 1));
13181 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13182 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13183 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
13184 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13185 build_zero_cst (TREE_TYPE (arg0
)));
13186 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13187 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13188 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13189 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
13190 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
13191 build_zero_cst (TREE_TYPE (arg0
)));
13193 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13194 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13195 && TREE_CODE (arg1
) == INTEGER_CST
13196 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
13197 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
13198 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
13199 TREE_OPERAND (arg0
, 1), arg1
));
13201 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13202 (X & C) == 0 when C is a single bit. */
13203 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13204 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
13205 && integer_zerop (arg1
)
13206 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
13208 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
13209 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
13210 TREE_OPERAND (arg0
, 1));
13211 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
13213 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13217 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13218 constant C is a power of two, i.e. a single bit. */
13219 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13220 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13221 && integer_zerop (arg1
)
13222 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13223 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13224 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13226 tree arg00
= TREE_OPERAND (arg0
, 0);
13227 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13228 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
13231 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13232 when is C is a power of two, i.e. a single bit. */
13233 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13234 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
13235 && integer_zerop (arg1
)
13236 && integer_pow2p (TREE_OPERAND (arg0
, 1))
13237 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13238 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
13240 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
13241 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
13242 arg000
, TREE_OPERAND (arg0
, 1));
13243 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
13244 tem
, build_int_cst (TREE_TYPE (tem
), 0));
13247 if (integer_zerop (arg1
)
13248 && tree_expr_nonzero_p (arg0
))
13250 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
13251 return omit_one_operand_loc (loc
, type
, res
, arg0
);
13254 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13255 if (TREE_CODE (arg0
) == NEGATE_EXPR
13256 && TREE_CODE (arg1
) == NEGATE_EXPR
)
13257 return fold_build2_loc (loc
, code
, type
,
13258 TREE_OPERAND (arg0
, 0),
13259 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13260 TREE_OPERAND (arg1
, 0)));
13262 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13263 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13264 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
13266 tree arg00
= TREE_OPERAND (arg0
, 0);
13267 tree arg01
= TREE_OPERAND (arg0
, 1);
13268 tree arg10
= TREE_OPERAND (arg1
, 0);
13269 tree arg11
= TREE_OPERAND (arg1
, 1);
13270 tree itype
= TREE_TYPE (arg0
);
13272 if (operand_equal_p (arg01
, arg11
, 0))
13273 return fold_build2_loc (loc
, code
, type
,
13274 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13275 fold_build2_loc (loc
,
13276 BIT_XOR_EXPR
, itype
,
13279 build_zero_cst (itype
));
13281 if (operand_equal_p (arg01
, arg10
, 0))
13282 return fold_build2_loc (loc
, code
, type
,
13283 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13284 fold_build2_loc (loc
,
13285 BIT_XOR_EXPR
, itype
,
13288 build_zero_cst (itype
));
13290 if (operand_equal_p (arg00
, arg11
, 0))
13291 return fold_build2_loc (loc
, code
, type
,
13292 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13293 fold_build2_loc (loc
,
13294 BIT_XOR_EXPR
, itype
,
13297 build_zero_cst (itype
));
13299 if (operand_equal_p (arg00
, arg10
, 0))
13300 return fold_build2_loc (loc
, code
, type
,
13301 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
13302 fold_build2_loc (loc
,
13303 BIT_XOR_EXPR
, itype
,
13306 build_zero_cst (itype
));
13309 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
13310 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
13312 tree arg00
= TREE_OPERAND (arg0
, 0);
13313 tree arg01
= TREE_OPERAND (arg0
, 1);
13314 tree arg10
= TREE_OPERAND (arg1
, 0);
13315 tree arg11
= TREE_OPERAND (arg1
, 1);
13316 tree itype
= TREE_TYPE (arg0
);
13318 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13319 operand_equal_p guarantees no side-effects so we don't need
13320 to use omit_one_operand on Z. */
13321 if (operand_equal_p (arg01
, arg11
, 0))
13322 return fold_build2_loc (loc
, code
, type
, arg00
,
13323 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13325 if (operand_equal_p (arg01
, arg10
, 0))
13326 return fold_build2_loc (loc
, code
, type
, arg00
,
13327 fold_convert_loc (loc
, TREE_TYPE (arg00
),
13329 if (operand_equal_p (arg00
, arg11
, 0))
13330 return fold_build2_loc (loc
, code
, type
, arg01
,
13331 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13333 if (operand_equal_p (arg00
, arg10
, 0))
13334 return fold_build2_loc (loc
, code
, type
, arg01
,
13335 fold_convert_loc (loc
, TREE_TYPE (arg01
),
13338 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13339 if (TREE_CODE (arg01
) == INTEGER_CST
13340 && TREE_CODE (arg11
) == INTEGER_CST
)
13342 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
13343 fold_convert_loc (loc
, itype
, arg11
));
13344 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
13345 return fold_build2_loc (loc
, code
, type
, tem
,
13346 fold_convert_loc (loc
, itype
, arg10
));
13350 /* Attempt to simplify equality/inequality comparisons of complex
13351 values. Only lower the comparison if the result is known or
13352 can be simplified to a single scalar comparison. */
13353 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
13354 || TREE_CODE (arg0
) == COMPLEX_CST
)
13355 && (TREE_CODE (arg1
) == COMPLEX_EXPR
13356 || TREE_CODE (arg1
) == COMPLEX_CST
))
13358 tree real0
, imag0
, real1
, imag1
;
13361 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
13363 real0
= TREE_OPERAND (arg0
, 0);
13364 imag0
= TREE_OPERAND (arg0
, 1);
13368 real0
= TREE_REALPART (arg0
);
13369 imag0
= TREE_IMAGPART (arg0
);
13372 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
13374 real1
= TREE_OPERAND (arg1
, 0);
13375 imag1
= TREE_OPERAND (arg1
, 1);
13379 real1
= TREE_REALPART (arg1
);
13380 imag1
= TREE_IMAGPART (arg1
);
13383 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
13384 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
13386 if (integer_zerop (rcond
))
13388 if (code
== EQ_EXPR
)
13389 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13391 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
13395 if (code
== NE_EXPR
)
13396 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13398 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
13402 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
13403 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
13405 if (integer_zerop (icond
))
13407 if (code
== EQ_EXPR
)
13408 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13410 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
13414 if (code
== NE_EXPR
)
13415 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13417 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
13428 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
13429 if (tem
!= NULL_TREE
)
13432 /* Transform comparisons of the form X +- C CMP X. */
13433 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13434 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13435 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13436 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13437 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13438 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13440 tree arg01
= TREE_OPERAND (arg0
, 1);
13441 enum tree_code code0
= TREE_CODE (arg0
);
13444 if (TREE_CODE (arg01
) == REAL_CST
)
13445 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13447 is_positive
= tree_int_cst_sgn (arg01
);
13449 /* (X - c) > X becomes false. */
13450 if (code
== GT_EXPR
13451 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13452 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13454 if (TREE_CODE (arg01
) == INTEGER_CST
13455 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13456 fold_overflow_warning (("assuming signed overflow does not "
13457 "occur when assuming that (X - c) > X "
13458 "is always false"),
13459 WARN_STRICT_OVERFLOW_ALL
);
13460 return constant_boolean_node (0, type
);
13463 /* Likewise (X + c) < X becomes false. */
13464 if (code
== LT_EXPR
13465 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13466 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13468 if (TREE_CODE (arg01
) == INTEGER_CST
13469 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13470 fold_overflow_warning (("assuming signed overflow does not "
13471 "occur when assuming that "
13472 "(X + c) < X is always false"),
13473 WARN_STRICT_OVERFLOW_ALL
);
13474 return constant_boolean_node (0, type
);
13477 /* Convert (X - c) <= X to true. */
13478 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13480 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13481 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13483 if (TREE_CODE (arg01
) == INTEGER_CST
13484 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13485 fold_overflow_warning (("assuming signed overflow does not "
13486 "occur when assuming that "
13487 "(X - c) <= X is always true"),
13488 WARN_STRICT_OVERFLOW_ALL
);
13489 return constant_boolean_node (1, type
);
13492 /* Convert (X + c) >= X to true. */
13493 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13495 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13496 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13498 if (TREE_CODE (arg01
) == INTEGER_CST
13499 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13500 fold_overflow_warning (("assuming signed overflow does not "
13501 "occur when assuming that "
13502 "(X + c) >= X is always true"),
13503 WARN_STRICT_OVERFLOW_ALL
);
13504 return constant_boolean_node (1, type
);
13507 if (TREE_CODE (arg01
) == INTEGER_CST
)
13509 /* Convert X + c > X and X - c < X to true for integers. */
13510 if (code
== GT_EXPR
13511 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13512 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13514 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13515 fold_overflow_warning (("assuming signed overflow does "
13516 "not occur when assuming that "
13517 "(X + c) > X is always true"),
13518 WARN_STRICT_OVERFLOW_ALL
);
13519 return constant_boolean_node (1, type
);
13522 if (code
== LT_EXPR
13523 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13524 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13526 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13527 fold_overflow_warning (("assuming signed overflow does "
13528 "not occur when assuming that "
13529 "(X - c) < X is always true"),
13530 WARN_STRICT_OVERFLOW_ALL
);
13531 return constant_boolean_node (1, type
);
13534 /* Convert X + c <= X and X - c >= X to false for integers. */
13535 if (code
== LE_EXPR
13536 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13537 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13539 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13540 fold_overflow_warning (("assuming signed overflow does "
13541 "not occur when assuming that "
13542 "(X + c) <= X is always false"),
13543 WARN_STRICT_OVERFLOW_ALL
);
13544 return constant_boolean_node (0, type
);
13547 if (code
== GE_EXPR
13548 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13549 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13551 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13552 fold_overflow_warning (("assuming signed overflow does "
13553 "not occur when assuming that "
13554 "(X - c) >= X is always false"),
13555 WARN_STRICT_OVERFLOW_ALL
);
13556 return constant_boolean_node (0, type
);
13561 /* Comparisons with the highest or lowest possible integer of
13562 the specified precision will have known values. */
13564 tree arg1_type
= TREE_TYPE (arg1
);
13565 unsigned int prec
= TYPE_PRECISION (arg1_type
);
13567 if (TREE_CODE (arg1
) == INTEGER_CST
13568 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13570 wide_int max
= wi::max_value (arg1_type
);
13571 wide_int signed_max
= wi::max_value (prec
, SIGNED
);
13572 wide_int min
= wi::min_value (arg1_type
);
13574 if (wi::eq_p (arg1
, max
))
13578 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13581 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13584 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13587 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13589 /* The GE_EXPR and LT_EXPR cases above are not normally
13590 reached because of previous transformations. */
13595 else if (wi::eq_p (arg1
, max
- 1))
13599 arg1
= const_binop (PLUS_EXPR
, arg1
,
13600 build_int_cst (TREE_TYPE (arg1
), 1));
13601 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13602 fold_convert_loc (loc
,
13603 TREE_TYPE (arg1
), arg0
),
13606 arg1
= const_binop (PLUS_EXPR
, arg1
,
13607 build_int_cst (TREE_TYPE (arg1
), 1));
13608 return fold_build2_loc (loc
, NE_EXPR
, type
,
13609 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13615 else if (wi::eq_p (arg1
, min
))
13619 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13622 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13625 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13628 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13633 else if (wi::eq_p (arg1
, min
+ 1))
13637 arg1
= const_binop (MINUS_EXPR
, arg1
,
13638 build_int_cst (TREE_TYPE (arg1
), 1));
13639 return fold_build2_loc (loc
, NE_EXPR
, type
,
13640 fold_convert_loc (loc
,
13641 TREE_TYPE (arg1
), arg0
),
13644 arg1
= const_binop (MINUS_EXPR
, arg1
,
13645 build_int_cst (TREE_TYPE (arg1
), 1));
13646 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13647 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13654 else if (wi::eq_p (arg1
, signed_max
)
13655 && TYPE_UNSIGNED (arg1_type
)
13656 /* We will flip the signedness of the comparison operator
13657 associated with the mode of arg1, so the sign bit is
13658 specified by this mode. Check that arg1 is the signed
13659 max associated with this sign bit. */
13660 && prec
== GET_MODE_PRECISION (TYPE_MODE (arg1_type
))
13661 /* signed_type does not work on pointer types. */
13662 && INTEGRAL_TYPE_P (arg1_type
))
13664 /* The following case also applies to X < signed_max+1
13665 and X >= signed_max+1 because previous transformations. */
13666 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13668 tree st
= signed_type_for (arg1_type
);
13669 return fold_build2_loc (loc
,
13670 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13671 type
, fold_convert_loc (loc
, st
, arg0
),
13672 build_int_cst (st
, 0));
13678 /* If we are comparing an ABS_EXPR with a constant, we can
13679 convert all the cases into explicit comparisons, but they may
13680 well not be faster than doing the ABS and one comparison.
13681 But ABS (X) <= C is a range comparison, which becomes a subtraction
13682 and a comparison, and is probably faster. */
13683 if (code
== LE_EXPR
13684 && TREE_CODE (arg1
) == INTEGER_CST
13685 && TREE_CODE (arg0
) == ABS_EXPR
13686 && ! TREE_SIDE_EFFECTS (arg0
)
13687 && (0 != (tem
= negate_expr (arg1
)))
13688 && TREE_CODE (tem
) == INTEGER_CST
13689 && !TREE_OVERFLOW (tem
))
13690 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13691 build2 (GE_EXPR
, type
,
13692 TREE_OPERAND (arg0
, 0), tem
),
13693 build2 (LE_EXPR
, type
,
13694 TREE_OPERAND (arg0
, 0), arg1
));
13696 /* Convert ABS_EXPR<x> >= 0 to true. */
13697 strict_overflow_p
= false;
13698 if (code
== GE_EXPR
13699 && (integer_zerop (arg1
)
13700 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13701 && real_zerop (arg1
)))
13702 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13704 if (strict_overflow_p
)
13705 fold_overflow_warning (("assuming signed overflow does not occur "
13706 "when simplifying comparison of "
13707 "absolute value and zero"),
13708 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13709 return omit_one_operand_loc (loc
, type
,
13710 constant_boolean_node (true, type
),
13714 /* Convert ABS_EXPR<x> < 0 to false. */
13715 strict_overflow_p
= false;
13716 if (code
== LT_EXPR
13717 && (integer_zerop (arg1
) || real_zerop (arg1
))
13718 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13720 if (strict_overflow_p
)
13721 fold_overflow_warning (("assuming signed overflow does not occur "
13722 "when simplifying comparison of "
13723 "absolute value and zero"),
13724 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13725 return omit_one_operand_loc (loc
, type
,
13726 constant_boolean_node (false, type
),
13730 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13731 and similarly for >= into !=. */
13732 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13733 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13734 && TREE_CODE (arg1
) == LSHIFT_EXPR
13735 && integer_onep (TREE_OPERAND (arg1
, 0)))
13736 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13737 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13738 TREE_OPERAND (arg1
, 1)),
13739 build_zero_cst (TREE_TYPE (arg0
)));
13741 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13742 otherwise Y might be >= # of bits in X's type and thus e.g.
13743 (unsigned char) (1 << Y) for Y 15 might be 0.
13744 If the cast is widening, then 1 << Y should have unsigned type,
13745 otherwise if Y is number of bits in the signed shift type minus 1,
13746 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13747 31 might be 0xffffffff80000000. */
13748 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13749 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13750 && CONVERT_EXPR_P (arg1
)
13751 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13752 && (TYPE_PRECISION (TREE_TYPE (arg1
))
13753 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
13754 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0)))
13755 || (TYPE_PRECISION (TREE_TYPE (arg1
))
13756 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)))))
13757 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13759 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13760 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13761 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13762 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13763 build_zero_cst (TREE_TYPE (arg0
)));
13768 case UNORDERED_EXPR
:
13776 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13778 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13779 if (t1
!= NULL_TREE
)
13783 /* If the first operand is NaN, the result is constant. */
13784 if (TREE_CODE (arg0
) == REAL_CST
13785 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13786 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13788 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13789 ? integer_zero_node
13790 : integer_one_node
;
13791 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13794 /* If the second operand is NaN, the result is constant. */
13795 if (TREE_CODE (arg1
) == REAL_CST
13796 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13797 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13799 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13800 ? integer_zero_node
13801 : integer_one_node
;
13802 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13805 /* Simplify unordered comparison of something with itself. */
13806 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13807 && operand_equal_p (arg0
, arg1
, 0))
13808 return constant_boolean_node (1, type
);
13810 if (code
== LTGT_EXPR
13811 && !flag_trapping_math
13812 && operand_equal_p (arg0
, arg1
, 0))
13813 return constant_boolean_node (0, type
);
13815 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13817 tree targ0
= strip_float_extensions (arg0
);
13818 tree targ1
= strip_float_extensions (arg1
);
13819 tree newtype
= TREE_TYPE (targ0
);
13821 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13822 newtype
= TREE_TYPE (targ1
);
13824 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13825 return fold_build2_loc (loc
, code
, type
,
13826 fold_convert_loc (loc
, newtype
, targ0
),
13827 fold_convert_loc (loc
, newtype
, targ1
));
13832 case COMPOUND_EXPR
:
13833 /* When pedantic, a compound expression can be neither an lvalue
13834 nor an integer constant expression. */
13835 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13837 /* Don't let (0, 0) be null pointer constant. */
13838 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13839 : fold_convert_loc (loc
, type
, arg1
);
13840 return pedantic_non_lvalue_loc (loc
, tem
);
13843 if ((TREE_CODE (arg0
) == REAL_CST
13844 && TREE_CODE (arg1
) == REAL_CST
)
13845 || (TREE_CODE (arg0
) == INTEGER_CST
13846 && TREE_CODE (arg1
) == INTEGER_CST
))
13847 return build_complex (type
, arg0
, arg1
);
13848 if (TREE_CODE (arg0
) == REALPART_EXPR
13849 && TREE_CODE (arg1
) == IMAGPART_EXPR
13850 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
13851 && operand_equal_p (TREE_OPERAND (arg0
, 0),
13852 TREE_OPERAND (arg1
, 0), 0))
13853 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
13854 TREE_OPERAND (arg1
, 0));
13858 /* An ASSERT_EXPR should never be passed to fold_binary. */
13859 gcc_unreachable ();
13861 case VEC_PACK_TRUNC_EXPR
:
13862 case VEC_PACK_FIX_TRUNC_EXPR
:
13864 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13867 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
13868 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
13869 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13872 elts
= XALLOCAVEC (tree
, nelts
);
13873 if (!vec_cst_ctor_to_array (arg0
, elts
)
13874 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
13877 for (i
= 0; i
< nelts
; i
++)
13879 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
13880 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
13881 TREE_TYPE (type
), elts
[i
]);
13882 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
13886 return build_vector (type
, elts
);
13889 case VEC_WIDEN_MULT_LO_EXPR
:
13890 case VEC_WIDEN_MULT_HI_EXPR
:
13891 case VEC_WIDEN_MULT_EVEN_EXPR
:
13892 case VEC_WIDEN_MULT_ODD_EXPR
:
13894 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
13895 unsigned int out
, ofs
, scale
;
13898 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
13899 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
13900 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13903 elts
= XALLOCAVEC (tree
, nelts
* 4);
13904 if (!vec_cst_ctor_to_array (arg0
, elts
)
13905 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
13908 if (code
== VEC_WIDEN_MULT_LO_EXPR
)
13909 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? nelts
: 0;
13910 else if (code
== VEC_WIDEN_MULT_HI_EXPR
)
13911 scale
= 0, ofs
= BYTES_BIG_ENDIAN
? 0 : nelts
;
13912 else if (code
== VEC_WIDEN_MULT_EVEN_EXPR
)
13913 scale
= 1, ofs
= 0;
13914 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13915 scale
= 1, ofs
= 1;
13917 for (out
= 0; out
< nelts
; out
++)
13919 unsigned int in1
= (out
<< scale
) + ofs
;
13920 unsigned int in2
= in1
+ nelts
* 2;
13923 t1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in1
]);
13924 t2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[in2
]);
13926 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13928 elts
[out
] = const_binop (MULT_EXPR
, t1
, t2
);
13929 if (elts
[out
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[out
]))
13933 return build_vector (type
, elts
);
13938 } /* switch (code) */
13941 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13942 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13946 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13948 switch (TREE_CODE (*tp
))
13954 *walk_subtrees
= 0;
13956 /* ... fall through ... */
13963 /* Return whether the sub-tree ST contains a label which is accessible from
13964 outside the sub-tree. */
13967 contains_label_p (tree st
)
13970 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13973 /* Fold a ternary expression of code CODE and type TYPE with operands
13974 OP0, OP1, and OP2. Return the folded expression if folding is
13975 successful. Otherwise, return NULL_TREE. */
13978 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13979 tree op0
, tree op1
, tree op2
)
13982 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13983 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13985 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13986 && TREE_CODE_LENGTH (code
) == 3);
13988 /* Strip any conversions that don't change the mode. This is safe
13989 for every expression, except for a comparison expression because
13990 its signedness is derived from its operands. So, in the latter
13991 case, only strip conversions that don't change the signedness.
13993 Note that this is done as an internal manipulation within the
13994 constant folder, in order to find the simplest representation of
13995 the arguments so that their form can be studied. In any cases,
13996 the appropriate type conversions should be put back in the tree
13997 that will get out of the constant folder. */
14018 case COMPONENT_REF
:
14019 if (TREE_CODE (arg0
) == CONSTRUCTOR
14020 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
14022 unsigned HOST_WIDE_INT idx
;
14024 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
14031 case VEC_COND_EXPR
:
14032 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14033 so all simple results must be passed through pedantic_non_lvalue. */
14034 if (TREE_CODE (arg0
) == INTEGER_CST
)
14036 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
14037 tem
= integer_zerop (arg0
) ? op2
: op1
;
14038 /* Only optimize constant conditions when the selected branch
14039 has the same type as the COND_EXPR. This avoids optimizing
14040 away "c ? x : throw", where the throw has a void type.
14041 Avoid throwing away that operand which contains label. */
14042 if ((!TREE_SIDE_EFFECTS (unused_op
)
14043 || !contains_label_p (unused_op
))
14044 && (! VOID_TYPE_P (TREE_TYPE (tem
))
14045 || VOID_TYPE_P (type
)))
14046 return pedantic_non_lvalue_loc (loc
, tem
);
14049 else if (TREE_CODE (arg0
) == VECTOR_CST
)
14051 if (integer_all_onesp (arg0
))
14052 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg2
);
14053 if (integer_zerop (arg0
))
14054 return pedantic_omit_one_operand_loc (loc
, type
, arg2
, arg1
);
14056 if ((TREE_CODE (arg1
) == VECTOR_CST
14057 || TREE_CODE (arg1
) == CONSTRUCTOR
)
14058 && (TREE_CODE (arg2
) == VECTOR_CST
14059 || TREE_CODE (arg2
) == CONSTRUCTOR
))
14061 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
14062 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14063 gcc_assert (nelts
== VECTOR_CST_NELTS (arg0
));
14064 for (i
= 0; i
< nelts
; i
++)
14066 tree val
= VECTOR_CST_ELT (arg0
, i
);
14067 if (integer_all_onesp (val
))
14069 else if (integer_zerop (val
))
14070 sel
[i
] = nelts
+ i
;
14071 else /* Currently unreachable. */
14074 tree t
= fold_vec_perm (type
, arg1
, arg2
, sel
);
14075 if (t
!= NULL_TREE
)
14080 if (operand_equal_p (arg1
, op2
, 0))
14081 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
14083 /* If we have A op B ? A : C, we may be able to convert this to a
14084 simpler expression, depending on the operation and the values
14085 of B and C. Signed zeros prevent all of these transformations,
14086 for reasons given above each one.
14088 Also try swapping the arguments and inverting the conditional. */
14089 if (COMPARISON_CLASS_P (arg0
)
14090 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
14091 arg1
, TREE_OPERAND (arg0
, 1))
14092 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
14094 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
14099 if (COMPARISON_CLASS_P (arg0
)
14100 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
14102 TREE_OPERAND (arg0
, 1))
14103 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
14105 location_t loc0
= expr_location_or (arg0
, loc
);
14106 tem
= fold_invert_truthvalue (loc0
, arg0
);
14107 if (tem
&& COMPARISON_CLASS_P (tem
))
14109 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
14115 /* If the second operand is simpler than the third, swap them
14116 since that produces better jump optimization results. */
14117 if (truth_value_p (TREE_CODE (arg0
))
14118 && tree_swap_operands_p (op1
, op2
, false))
14120 location_t loc0
= expr_location_or (arg0
, loc
);
14121 /* See if this can be inverted. If it can't, possibly because
14122 it was a floating-point inequality comparison, don't do
14124 tem
= fold_invert_truthvalue (loc0
, arg0
);
14126 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
14129 /* Convert A ? 1 : 0 to simply A. */
14130 if ((code
== VEC_COND_EXPR
? integer_all_onesp (op1
)
14131 : (integer_onep (op1
)
14132 && !VECTOR_TYPE_P (type
)))
14133 && integer_zerop (op2
)
14134 /* If we try to convert OP0 to our type, the
14135 call to fold will try to move the conversion inside
14136 a COND, which will recurse. In that case, the COND_EXPR
14137 is probably the best choice, so leave it alone. */
14138 && type
== TREE_TYPE (arg0
))
14139 return pedantic_non_lvalue_loc (loc
, arg0
);
14141 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14142 over COND_EXPR in cases such as floating point comparisons. */
14143 if (integer_zerop (op1
)
14144 && (code
== VEC_COND_EXPR
? integer_all_onesp (op2
)
14145 : (integer_onep (op2
)
14146 && !VECTOR_TYPE_P (type
)))
14147 && truth_value_p (TREE_CODE (arg0
)))
14148 return pedantic_non_lvalue_loc (loc
,
14149 fold_convert_loc (loc
, type
,
14150 invert_truthvalue_loc (loc
,
14153 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14154 if (TREE_CODE (arg0
) == LT_EXPR
14155 && integer_zerop (TREE_OPERAND (arg0
, 1))
14156 && integer_zerop (op2
)
14157 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
14159 /* sign_bit_p looks through both zero and sign extensions,
14160 but for this optimization only sign extensions are
14162 tree tem2
= TREE_OPERAND (arg0
, 0);
14163 while (tem
!= tem2
)
14165 if (TREE_CODE (tem2
) != NOP_EXPR
14166 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2
, 0))))
14171 tem2
= TREE_OPERAND (tem2
, 0);
14173 /* sign_bit_p only checks ARG1 bits within A's precision.
14174 If <sign bit of A> has wider type than A, bits outside
14175 of A's precision in <sign bit of A> need to be checked.
14176 If they are all 0, this optimization needs to be done
14177 in unsigned A's type, if they are all 1 in signed A's type,
14178 otherwise this can't be done. */
14180 && TYPE_PRECISION (TREE_TYPE (tem
))
14181 < TYPE_PRECISION (TREE_TYPE (arg1
))
14182 && TYPE_PRECISION (TREE_TYPE (tem
))
14183 < TYPE_PRECISION (type
))
14185 int inner_width
, outer_width
;
14188 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
14189 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
14190 if (outer_width
> TYPE_PRECISION (type
))
14191 outer_width
= TYPE_PRECISION (type
);
14193 wide_int mask
= wi::shifted_mask
14194 (inner_width
, outer_width
- inner_width
, false,
14195 TYPE_PRECISION (TREE_TYPE (arg1
)));
14197 wide_int common
= mask
& arg1
;
14198 if (common
== mask
)
14200 tem_type
= signed_type_for (TREE_TYPE (tem
));
14201 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14203 else if (common
== 0)
14205 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
14206 tem
= fold_convert_loc (loc
, tem_type
, tem
);
14214 fold_convert_loc (loc
, type
,
14215 fold_build2_loc (loc
, BIT_AND_EXPR
,
14216 TREE_TYPE (tem
), tem
,
14217 fold_convert_loc (loc
,
14222 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14223 already handled above. */
14224 if (TREE_CODE (arg0
) == BIT_AND_EXPR
14225 && integer_onep (TREE_OPERAND (arg0
, 1))
14226 && integer_zerop (op2
)
14227 && integer_pow2p (arg1
))
14229 tree tem
= TREE_OPERAND (arg0
, 0);
14231 if (TREE_CODE (tem
) == RSHIFT_EXPR
14232 && tree_fits_uhwi_p (TREE_OPERAND (tem
, 1))
14233 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
14234 tree_to_uhwi (TREE_OPERAND (tem
, 1)))
14235 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
14236 TREE_OPERAND (tem
, 0), arg1
);
14239 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14240 is probably obsolete because the first operand should be a
14241 truth value (that's why we have the two cases above), but let's
14242 leave it in until we can confirm this for all front-ends. */
14243 if (integer_zerop (op2
)
14244 && TREE_CODE (arg0
) == NE_EXPR
14245 && integer_zerop (TREE_OPERAND (arg0
, 1))
14246 && integer_pow2p (arg1
)
14247 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
14248 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
14249 arg1
, OEP_ONLY_CONST
))
14250 return pedantic_non_lvalue_loc (loc
,
14251 fold_convert_loc (loc
, type
,
14252 TREE_OPERAND (arg0
, 0)));
14254 /* Disable the transformations below for vectors, since
14255 fold_binary_op_with_conditional_arg may undo them immediately,
14256 yielding an infinite loop. */
14257 if (code
== VEC_COND_EXPR
)
14260 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14261 if (integer_zerop (op2
)
14262 && truth_value_p (TREE_CODE (arg0
))
14263 && truth_value_p (TREE_CODE (arg1
))
14264 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14265 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
? BIT_AND_EXPR
14266 : TRUTH_ANDIF_EXPR
,
14267 type
, fold_convert_loc (loc
, type
, arg0
), arg1
);
14269 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14270 if (code
== VEC_COND_EXPR
? integer_all_onesp (op2
) : integer_onep (op2
)
14271 && truth_value_p (TREE_CODE (arg0
))
14272 && truth_value_p (TREE_CODE (arg1
))
14273 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14275 location_t loc0
= expr_location_or (arg0
, loc
);
14276 /* Only perform transformation if ARG0 is easily inverted. */
14277 tem
= fold_invert_truthvalue (loc0
, arg0
);
14279 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14282 type
, fold_convert_loc (loc
, type
, tem
),
14286 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14287 if (integer_zerop (arg1
)
14288 && truth_value_p (TREE_CODE (arg0
))
14289 && truth_value_p (TREE_CODE (op2
))
14290 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14292 location_t loc0
= expr_location_or (arg0
, loc
);
14293 /* Only perform transformation if ARG0 is easily inverted. */
14294 tem
= fold_invert_truthvalue (loc0
, arg0
);
14296 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14297 ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR
,
14298 type
, fold_convert_loc (loc
, type
, tem
),
14302 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14303 if (code
== VEC_COND_EXPR
? integer_all_onesp (arg1
) : integer_onep (arg1
)
14304 && truth_value_p (TREE_CODE (arg0
))
14305 && truth_value_p (TREE_CODE (op2
))
14306 && (code
== VEC_COND_EXPR
|| !VECTOR_TYPE_P (type
)))
14307 return fold_build2_loc (loc
, code
== VEC_COND_EXPR
14308 ? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR
,
14309 type
, fold_convert_loc (loc
, type
, arg0
), op2
);
14314 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14315 of fold_ternary on them. */
14316 gcc_unreachable ();
14318 case BIT_FIELD_REF
:
14319 if ((TREE_CODE (arg0
) == VECTOR_CST
14320 || (TREE_CODE (arg0
) == CONSTRUCTOR
14321 && TREE_CODE (TREE_TYPE (arg0
)) == VECTOR_TYPE
))
14322 && (type
== TREE_TYPE (TREE_TYPE (arg0
))
14323 || (TREE_CODE (type
) == VECTOR_TYPE
14324 && TREE_TYPE (type
) == TREE_TYPE (TREE_TYPE (arg0
)))))
14326 tree eltype
= TREE_TYPE (TREE_TYPE (arg0
));
14327 unsigned HOST_WIDE_INT width
= tree_to_uhwi (TYPE_SIZE (eltype
));
14328 unsigned HOST_WIDE_INT n
= tree_to_uhwi (arg1
);
14329 unsigned HOST_WIDE_INT idx
= tree_to_uhwi (op2
);
14332 && (idx
% width
) == 0
14333 && (n
% width
) == 0
14334 && ((idx
+ n
) / width
) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
14339 if (TREE_CODE (arg0
) == VECTOR_CST
)
14342 return VECTOR_CST_ELT (arg0
, idx
);
14344 tree
*vals
= XALLOCAVEC (tree
, n
);
14345 for (unsigned i
= 0; i
< n
; ++i
)
14346 vals
[i
] = VECTOR_CST_ELT (arg0
, idx
+ i
);
14347 return build_vector (type
, vals
);
14350 /* Constructor elements can be subvectors. */
14351 unsigned HOST_WIDE_INT k
= 1;
14352 if (CONSTRUCTOR_NELTS (arg0
) != 0)
14354 tree cons_elem
= TREE_TYPE (CONSTRUCTOR_ELT (arg0
, 0)->value
);
14355 if (TREE_CODE (cons_elem
) == VECTOR_TYPE
)
14356 k
= TYPE_VECTOR_SUBPARTS (cons_elem
);
14359 /* We keep an exact subset of the constructor elements. */
14360 if ((idx
% k
) == 0 && (n
% k
) == 0)
14362 if (CONSTRUCTOR_NELTS (arg0
) == 0)
14363 return build_constructor (type
, NULL
);
14368 if (idx
< CONSTRUCTOR_NELTS (arg0
))
14369 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
14370 return build_zero_cst (type
);
14373 vec
<constructor_elt
, va_gc
> *vals
;
14374 vec_alloc (vals
, n
);
14375 for (unsigned i
= 0;
14376 i
< n
&& idx
+ i
< CONSTRUCTOR_NELTS (arg0
);
14378 CONSTRUCTOR_APPEND_ELT (vals
, NULL_TREE
,
14380 (arg0
, idx
+ i
)->value
);
14381 return build_constructor (type
, vals
);
14383 /* The bitfield references a single constructor element. */
14384 else if (idx
+ n
<= (idx
/ k
+ 1) * k
)
14386 if (CONSTRUCTOR_NELTS (arg0
) <= idx
/ k
)
14387 return build_zero_cst (type
);
14389 return CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
;
14391 return fold_build3_loc (loc
, code
, type
,
14392 CONSTRUCTOR_ELT (arg0
, idx
/ k
)->value
, op1
,
14393 build_int_cst (TREE_TYPE (op2
), (idx
% k
) * width
));
14398 /* A bit-field-ref that referenced the full argument can be stripped. */
14399 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
14400 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_to_uhwi (arg1
)
14401 && integer_zerop (op2
))
14402 return fold_convert_loc (loc
, type
, arg0
);
14404 /* On constants we can use native encode/interpret to constant
14405 fold (nearly) all BIT_FIELD_REFs. */
14406 if (CONSTANT_CLASS_P (arg0
)
14407 && can_native_interpret_type_p (type
)
14408 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)))
14409 /* This limitation should not be necessary, we just need to
14410 round this up to mode size. */
14411 && tree_to_uhwi (op1
) % BITS_PER_UNIT
== 0
14412 /* Need bit-shifting of the buffer to relax the following. */
14413 && tree_to_uhwi (op2
) % BITS_PER_UNIT
== 0)
14415 unsigned HOST_WIDE_INT bitpos
= tree_to_uhwi (op2
);
14416 unsigned HOST_WIDE_INT bitsize
= tree_to_uhwi (op1
);
14417 unsigned HOST_WIDE_INT clen
;
14418 clen
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0
)));
14419 /* ??? We cannot tell native_encode_expr to start at
14420 some random byte only. So limit us to a reasonable amount
14424 unsigned char *b
= XALLOCAVEC (unsigned char, clen
);
14425 unsigned HOST_WIDE_INT len
= native_encode_expr (arg0
, b
, clen
);
14427 && len
* BITS_PER_UNIT
>= bitpos
+ bitsize
)
14429 tree v
= native_interpret_expr (type
,
14430 b
+ bitpos
/ BITS_PER_UNIT
,
14431 bitsize
/ BITS_PER_UNIT
);
14441 /* For integers we can decompose the FMA if possible. */
14442 if (TREE_CODE (arg0
) == INTEGER_CST
14443 && TREE_CODE (arg1
) == INTEGER_CST
)
14444 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
14445 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
14446 if (integer_zerop (arg2
))
14447 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
14449 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
14451 case VEC_PERM_EXPR
:
14452 if (TREE_CODE (arg2
) == VECTOR_CST
)
14454 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
, mask
;
14455 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14456 bool need_mask_canon
= false;
14457 bool all_in_vec0
= true;
14458 bool all_in_vec1
= true;
14459 bool maybe_identity
= true;
14460 bool single_arg
= (op0
== op1
);
14461 bool changed
= false;
14463 mask
= single_arg
? (nelts
- 1) : (2 * nelts
- 1);
14464 gcc_assert (nelts
== VECTOR_CST_NELTS (arg2
));
14465 for (i
= 0; i
< nelts
; i
++)
14467 tree val
= VECTOR_CST_ELT (arg2
, i
);
14468 if (TREE_CODE (val
) != INTEGER_CST
)
14471 /* Make sure that the perm value is in an acceptable
14474 if (wi::gtu_p (t
, mask
))
14476 need_mask_canon
= true;
14477 sel
[i
] = t
.to_uhwi () & mask
;
14480 sel
[i
] = t
.to_uhwi ();
14482 if (sel
[i
] < nelts
)
14483 all_in_vec1
= false;
14485 all_in_vec0
= false;
14487 if ((sel
[i
] & (nelts
-1)) != i
)
14488 maybe_identity
= false;
14491 if (maybe_identity
)
14501 else if (all_in_vec1
)
14504 for (i
= 0; i
< nelts
; i
++)
14506 need_mask_canon
= true;
14509 if ((TREE_CODE (op0
) == VECTOR_CST
14510 || TREE_CODE (op0
) == CONSTRUCTOR
)
14511 && (TREE_CODE (op1
) == VECTOR_CST
14512 || TREE_CODE (op1
) == CONSTRUCTOR
))
14514 tree t
= fold_vec_perm (type
, op0
, op1
, sel
);
14515 if (t
!= NULL_TREE
)
14519 if (op0
== op1
&& !single_arg
)
14522 if (need_mask_canon
&& arg2
== op2
)
14524 tree
*tsel
= XALLOCAVEC (tree
, nelts
);
14525 tree eltype
= TREE_TYPE (TREE_TYPE (arg2
));
14526 for (i
= 0; i
< nelts
; i
++)
14527 tsel
[i
] = build_int_cst (eltype
, sel
[i
]);
14528 op2
= build_vector (TREE_TYPE (arg2
), tsel
);
14533 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, op2
);
14539 } /* switch (code) */
14542 /* Perform constant folding and related simplification of EXPR.
14543 The related simplifications include x*1 => x, x*0 => 0, etc.,
14544 and application of the associative law.
14545 NOP_EXPR conversions may be removed freely (as long as we
14546 are careful not to change the type of the overall expression).
14547 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14548 but we can constant-fold them if they have constant operands. */
14550 #ifdef ENABLE_FOLD_CHECKING
14551 # define fold(x) fold_1 (x)
14552 static tree
fold_1 (tree
);
14558 const tree t
= expr
;
14559 enum tree_code code
= TREE_CODE (t
);
14560 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14562 location_t loc
= EXPR_LOCATION (expr
);
14564 /* Return right away if a constant. */
14565 if (kind
== tcc_constant
)
14568 /* CALL_EXPR-like objects with variable numbers of operands are
14569 treated specially. */
14570 if (kind
== tcc_vl_exp
)
14572 if (code
== CALL_EXPR
)
14574 tem
= fold_call_expr (loc
, expr
, false);
14575 return tem
? tem
: expr
;
14580 if (IS_EXPR_CODE_CLASS (kind
))
14582 tree type
= TREE_TYPE (t
);
14583 tree op0
, op1
, op2
;
14585 switch (TREE_CODE_LENGTH (code
))
14588 op0
= TREE_OPERAND (t
, 0);
14589 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14590 return tem
? tem
: expr
;
14592 op0
= TREE_OPERAND (t
, 0);
14593 op1
= TREE_OPERAND (t
, 1);
14594 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14595 return tem
? tem
: expr
;
14597 op0
= TREE_OPERAND (t
, 0);
14598 op1
= TREE_OPERAND (t
, 1);
14599 op2
= TREE_OPERAND (t
, 2);
14600 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14601 return tem
? tem
: expr
;
14611 tree op0
= TREE_OPERAND (t
, 0);
14612 tree op1
= TREE_OPERAND (t
, 1);
14614 if (TREE_CODE (op1
) == INTEGER_CST
14615 && TREE_CODE (op0
) == CONSTRUCTOR
14616 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14618 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (op0
);
14619 unsigned HOST_WIDE_INT end
= vec_safe_length (elts
);
14620 unsigned HOST_WIDE_INT begin
= 0;
14622 /* Find a matching index by means of a binary search. */
14623 while (begin
!= end
)
14625 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14626 tree index
= (*elts
)[middle
].index
;
14628 if (TREE_CODE (index
) == INTEGER_CST
14629 && tree_int_cst_lt (index
, op1
))
14630 begin
= middle
+ 1;
14631 else if (TREE_CODE (index
) == INTEGER_CST
14632 && tree_int_cst_lt (op1
, index
))
14634 else if (TREE_CODE (index
) == RANGE_EXPR
14635 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14636 begin
= middle
+ 1;
14637 else if (TREE_CODE (index
) == RANGE_EXPR
14638 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14641 return (*elts
)[middle
].value
;
14648 /* Return a VECTOR_CST if possible. */
14651 tree type
= TREE_TYPE (t
);
14652 if (TREE_CODE (type
) != VECTOR_TYPE
)
14655 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
14656 unsigned HOST_WIDE_INT idx
, pos
= 0;
14659 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t
), idx
, value
)
14661 if (!CONSTANT_CLASS_P (value
))
14663 if (TREE_CODE (value
) == VECTOR_CST
)
14665 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
14666 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
14669 vec
[pos
++] = value
;
14671 for (; pos
< TYPE_VECTOR_SUBPARTS (type
); ++pos
)
14672 vec
[pos
] = build_zero_cst (TREE_TYPE (type
));
14674 return build_vector (type
, vec
);
14678 return fold (DECL_INITIAL (t
));
14682 } /* switch (code) */
14685 #ifdef ENABLE_FOLD_CHECKING
14688 static void fold_checksum_tree (const_tree
, struct md5_ctx
*,
14689 hash_table
<pointer_hash
<const tree_node
> > *);
14690 static void fold_check_failed (const_tree
, const_tree
);
14691 void print_fold_checksum (const_tree
);
14693 /* When --enable-checking=fold, compute a digest of expr before
14694 and after actual fold call to see if fold did not accidentally
14695 change original expr. */
14701 struct md5_ctx ctx
;
14702 unsigned char checksum_before
[16], checksum_after
[16];
14703 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14705 md5_init_ctx (&ctx
);
14706 fold_checksum_tree (expr
, &ctx
, &ht
);
14707 md5_finish_ctx (&ctx
, checksum_before
);
14710 ret
= fold_1 (expr
);
14712 md5_init_ctx (&ctx
);
14713 fold_checksum_tree (expr
, &ctx
, &ht
);
14714 md5_finish_ctx (&ctx
, checksum_after
);
14716 if (memcmp (checksum_before
, checksum_after
, 16))
14717 fold_check_failed (expr
, ret
);
14723 print_fold_checksum (const_tree expr
)
14725 struct md5_ctx ctx
;
14726 unsigned char checksum
[16], cnt
;
14727 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14729 md5_init_ctx (&ctx
);
14730 fold_checksum_tree (expr
, &ctx
, &ht
);
14731 md5_finish_ctx (&ctx
, checksum
);
14732 for (cnt
= 0; cnt
< 16; ++cnt
)
14733 fprintf (stderr
, "%02x", checksum
[cnt
]);
14734 putc ('\n', stderr
);
14738 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14740 internal_error ("fold check: original tree changed by fold");
14744 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
,
14745 hash_table
<pointer_hash
<const tree_node
> > *ht
)
14747 const tree_node
**slot
;
14748 enum tree_code code
;
14749 union tree_node buf
;
14755 slot
= ht
->find_slot (expr
, INSERT
);
14759 code
= TREE_CODE (expr
);
14760 if (TREE_CODE_CLASS (code
) == tcc_declaration
14761 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14763 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14764 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14765 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14766 expr
= (tree
) &buf
;
14768 else if (TREE_CODE_CLASS (code
) == tcc_type
14769 && (TYPE_POINTER_TO (expr
)
14770 || TYPE_REFERENCE_TO (expr
)
14771 || TYPE_CACHED_VALUES_P (expr
)
14772 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14773 || TYPE_NEXT_VARIANT (expr
)))
14775 /* Allow these fields to be modified. */
14777 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14778 expr
= tmp
= (tree
) &buf
;
14779 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14780 TYPE_POINTER_TO (tmp
) = NULL
;
14781 TYPE_REFERENCE_TO (tmp
) = NULL
;
14782 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14783 if (TYPE_CACHED_VALUES_P (tmp
))
14785 TYPE_CACHED_VALUES_P (tmp
) = 0;
14786 TYPE_CACHED_VALUES (tmp
) = NULL
;
14789 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14790 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14791 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14792 if (TREE_CODE_CLASS (code
) != tcc_type
14793 && TREE_CODE_CLASS (code
) != tcc_declaration
14794 && code
!= TREE_LIST
14795 && code
!= SSA_NAME
14796 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14797 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14798 switch (TREE_CODE_CLASS (code
))
14804 md5_process_bytes (TREE_STRING_POINTER (expr
),
14805 TREE_STRING_LENGTH (expr
), ctx
);
14808 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14809 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14812 for (i
= 0; i
< (int) VECTOR_CST_NELTS (expr
); ++i
)
14813 fold_checksum_tree (VECTOR_CST_ELT (expr
, i
), ctx
, ht
);
14819 case tcc_exceptional
:
14823 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14824 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14825 expr
= TREE_CHAIN (expr
);
14826 goto recursive_label
;
14829 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14830 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14836 case tcc_expression
:
14837 case tcc_reference
:
14838 case tcc_comparison
:
14841 case tcc_statement
:
14843 len
= TREE_OPERAND_LENGTH (expr
);
14844 for (i
= 0; i
< len
; ++i
)
14845 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14847 case tcc_declaration
:
14848 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14849 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14850 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14852 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14853 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14854 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14855 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14856 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14859 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14861 if (TREE_CODE (expr
) == FUNCTION_DECL
)
14862 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14863 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14864 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
14868 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14869 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14870 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14871 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14872 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14873 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14874 if (INTEGRAL_TYPE_P (expr
)
14875 || SCALAR_FLOAT_TYPE_P (expr
))
14877 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14878 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14880 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14881 if (TREE_CODE (expr
) == RECORD_TYPE
14882 || TREE_CODE (expr
) == UNION_TYPE
14883 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14884 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14885 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14892 /* Helper function for outputting the checksum of a tree T. When
14893 debugging with gdb, you can "define mynext" to be "next" followed
14894 by "call debug_fold_checksum (op0)", then just trace down till the
14897 DEBUG_FUNCTION
void
14898 debug_fold_checksum (const_tree t
)
14901 unsigned char checksum
[16];
14902 struct md5_ctx ctx
;
14903 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14905 md5_init_ctx (&ctx
);
14906 fold_checksum_tree (t
, &ctx
, &ht
);
14907 md5_finish_ctx (&ctx
, checksum
);
14910 for (i
= 0; i
< 16; i
++)
14911 fprintf (stderr
, "%d ", checksum
[i
]);
14913 fprintf (stderr
, "\n");
14918 /* Fold a unary tree expression with code CODE of type TYPE with an
14919 operand OP0. LOC is the location of the resulting expression.
14920 Return a folded expression if successful. Otherwise, return a tree
14921 expression with code CODE of type TYPE with an operand OP0. */
14924 fold_build1_stat_loc (location_t loc
,
14925 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14928 #ifdef ENABLE_FOLD_CHECKING
14929 unsigned char checksum_before
[16], checksum_after
[16];
14930 struct md5_ctx ctx
;
14931 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14933 md5_init_ctx (&ctx
);
14934 fold_checksum_tree (op0
, &ctx
, &ht
);
14935 md5_finish_ctx (&ctx
, checksum_before
);
14939 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14941 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14943 #ifdef ENABLE_FOLD_CHECKING
14944 md5_init_ctx (&ctx
);
14945 fold_checksum_tree (op0
, &ctx
, &ht
);
14946 md5_finish_ctx (&ctx
, checksum_after
);
14948 if (memcmp (checksum_before
, checksum_after
, 16))
14949 fold_check_failed (op0
, tem
);
14954 /* Fold a binary tree expression with code CODE of type TYPE with
14955 operands OP0 and OP1. LOC is the location of the resulting
14956 expression. Return a folded expression if successful. Otherwise,
14957 return a tree expression with code CODE of type TYPE with operands
14961 fold_build2_stat_loc (location_t loc
,
14962 enum tree_code code
, tree type
, tree op0
, tree op1
14966 #ifdef ENABLE_FOLD_CHECKING
14967 unsigned char checksum_before_op0
[16],
14968 checksum_before_op1
[16],
14969 checksum_after_op0
[16],
14970 checksum_after_op1
[16];
14971 struct md5_ctx ctx
;
14972 hash_table
<pointer_hash
<const tree_node
> > ht (32);
14974 md5_init_ctx (&ctx
);
14975 fold_checksum_tree (op0
, &ctx
, &ht
);
14976 md5_finish_ctx (&ctx
, checksum_before_op0
);
14979 md5_init_ctx (&ctx
);
14980 fold_checksum_tree (op1
, &ctx
, &ht
);
14981 md5_finish_ctx (&ctx
, checksum_before_op1
);
14985 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14987 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14989 #ifdef ENABLE_FOLD_CHECKING
14990 md5_init_ctx (&ctx
);
14991 fold_checksum_tree (op0
, &ctx
, &ht
);
14992 md5_finish_ctx (&ctx
, checksum_after_op0
);
14995 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14996 fold_check_failed (op0
, tem
);
14998 md5_init_ctx (&ctx
);
14999 fold_checksum_tree (op1
, &ctx
, &ht
);
15000 md5_finish_ctx (&ctx
, checksum_after_op1
);
15002 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
15003 fold_check_failed (op1
, tem
);
15008 /* Fold a ternary tree expression with code CODE of type TYPE with
15009 operands OP0, OP1, and OP2. Return a folded expression if
15010 successful. Otherwise, return a tree expression with code CODE of
15011 type TYPE with operands OP0, OP1, and OP2. */
15014 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
15015 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
15018 #ifdef ENABLE_FOLD_CHECKING
15019 unsigned char checksum_before_op0
[16],
15020 checksum_before_op1
[16],
15021 checksum_before_op2
[16],
15022 checksum_after_op0
[16],
15023 checksum_after_op1
[16],
15024 checksum_after_op2
[16];
15025 struct md5_ctx ctx
;
15026 hash_table
<pointer_hash
<const tree_node
> > ht (32);
15028 md5_init_ctx (&ctx
);
15029 fold_checksum_tree (op0
, &ctx
, &ht
);
15030 md5_finish_ctx (&ctx
, checksum_before_op0
);
15033 md5_init_ctx (&ctx
);
15034 fold_checksum_tree (op1
, &ctx
, &ht
);
15035 md5_finish_ctx (&ctx
, checksum_before_op1
);
15038 md5_init_ctx (&ctx
);
15039 fold_checksum_tree (op2
, &ctx
, &ht
);
15040 md5_finish_ctx (&ctx
, checksum_before_op2
);
15044 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
15045 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
15047 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
15049 #ifdef ENABLE_FOLD_CHECKING
15050 md5_init_ctx (&ctx
);
15051 fold_checksum_tree (op0
, &ctx
, &ht
);
15052 md5_finish_ctx (&ctx
, checksum_after_op0
);
15055 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
15056 fold_check_failed (op0
, tem
);
15058 md5_init_ctx (&ctx
);
15059 fold_checksum_tree (op1
, &ctx
, &ht
);
15060 md5_finish_ctx (&ctx
, checksum_after_op1
);
15063 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
15064 fold_check_failed (op1
, tem
);
15066 md5_init_ctx (&ctx
);
15067 fold_checksum_tree (op2
, &ctx
, &ht
);
15068 md5_finish_ctx (&ctx
, checksum_after_op2
);
15070 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
15071 fold_check_failed (op2
, tem
);
15076 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15077 arguments in ARGARRAY, and a null static chain.
15078 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15079 of type TYPE from the given operands as constructed by build_call_array. */
15082 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
15083 int nargs
, tree
*argarray
)
15086 #ifdef ENABLE_FOLD_CHECKING
15087 unsigned char checksum_before_fn
[16],
15088 checksum_before_arglist
[16],
15089 checksum_after_fn
[16],
15090 checksum_after_arglist
[16];
15091 struct md5_ctx ctx
;
15092 hash_table
<pointer_hash
<const tree_node
> > ht (32);
15095 md5_init_ctx (&ctx
);
15096 fold_checksum_tree (fn
, &ctx
, &ht
);
15097 md5_finish_ctx (&ctx
, checksum_before_fn
);
15100 md5_init_ctx (&ctx
);
15101 for (i
= 0; i
< nargs
; i
++)
15102 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
15103 md5_finish_ctx (&ctx
, checksum_before_arglist
);
15107 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
15109 #ifdef ENABLE_FOLD_CHECKING
15110 md5_init_ctx (&ctx
);
15111 fold_checksum_tree (fn
, &ctx
, &ht
);
15112 md5_finish_ctx (&ctx
, checksum_after_fn
);
15115 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
15116 fold_check_failed (fn
, tem
);
15118 md5_init_ctx (&ctx
);
15119 for (i
= 0; i
< nargs
; i
++)
15120 fold_checksum_tree (argarray
[i
], &ctx
, &ht
);
15121 md5_finish_ctx (&ctx
, checksum_after_arglist
);
15123 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
15124 fold_check_failed (NULL_TREE
, tem
);
15129 /* Perform constant folding and related simplification of initializer
15130 expression EXPR. These behave identically to "fold_buildN" but ignore
15131 potential run-time traps and exceptions that fold must preserve. */
15133 #define START_FOLD_INIT \
15134 int saved_signaling_nans = flag_signaling_nans;\
15135 int saved_trapping_math = flag_trapping_math;\
15136 int saved_rounding_math = flag_rounding_math;\
15137 int saved_trapv = flag_trapv;\
15138 int saved_folding_initializer = folding_initializer;\
15139 flag_signaling_nans = 0;\
15140 flag_trapping_math = 0;\
15141 flag_rounding_math = 0;\
15143 folding_initializer = 1;
15145 #define END_FOLD_INIT \
15146 flag_signaling_nans = saved_signaling_nans;\
15147 flag_trapping_math = saved_trapping_math;\
15148 flag_rounding_math = saved_rounding_math;\
15149 flag_trapv = saved_trapv;\
15150 folding_initializer = saved_folding_initializer;
15153 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
15154 tree type
, tree op
)
15159 result
= fold_build1_loc (loc
, code
, type
, op
);
15166 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
15167 tree type
, tree op0
, tree op1
)
15172 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
15179 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
15180 int nargs
, tree
*argarray
)
15185 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
15191 #undef START_FOLD_INIT
15192 #undef END_FOLD_INIT
15194 /* Determine if first argument is a multiple of second argument. Return 0 if
15195 it is not, or we cannot easily determined it to be.
15197 An example of the sort of thing we care about (at this point; this routine
15198 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15199 fold cases do now) is discovering that
15201 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15207 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15209 This code also handles discovering that
15211 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15213 is a multiple of 8 so we don't have to worry about dealing with a
15214 possible remainder.
15216 Note that we *look* inside a SAVE_EXPR only to determine how it was
15217 calculated; it is not safe for fold to do much of anything else with the
15218 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15219 at run time. For example, the latter example above *cannot* be implemented
15220 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15221 evaluation time of the original SAVE_EXPR is not necessarily the same at
15222 the time the new expression is evaluated. The only optimization of this
15223 sort that would be valid is changing
15225 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15229 SAVE_EXPR (I) * SAVE_EXPR (J)
15231 (where the same SAVE_EXPR (J) is used in the original and the
15232 transformed version). */
15235 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
15237 if (operand_equal_p (top
, bottom
, 0))
15240 if (TREE_CODE (type
) != INTEGER_TYPE
)
15243 switch (TREE_CODE (top
))
15246 /* Bitwise and provides a power of two multiple. If the mask is
15247 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15248 if (!integer_pow2p (bottom
))
15253 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15254 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15258 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
15259 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
15262 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
15266 op1
= TREE_OPERAND (top
, 1);
15267 /* const_binop may not detect overflow correctly,
15268 so check for it explicitly here. */
15269 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node
)), op1
)
15270 && 0 != (t1
= fold_convert (type
,
15271 const_binop (LSHIFT_EXPR
,
15274 && !TREE_OVERFLOW (t1
))
15275 return multiple_of_p (type
, t1
, bottom
);
15280 /* Can't handle conversions from non-integral or wider integral type. */
15281 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
15282 || (TYPE_PRECISION (type
)
15283 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
15286 /* .. fall through ... */
15289 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
15292 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
15293 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
15296 if (TREE_CODE (bottom
) != INTEGER_CST
15297 || integer_zerop (bottom
)
15298 || (TYPE_UNSIGNED (type
)
15299 && (tree_int_cst_sgn (top
) < 0
15300 || tree_int_cst_sgn (bottom
) < 0)))
15302 return wi::multiple_of_p (wi::to_widest (top
), wi::to_widest (bottom
),
15310 /* Return true if CODE or TYPE is known to be non-negative. */
15313 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
15315 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
15316 && truth_value_p (code
))
15317 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15318 have a signed:1 type (where the value is -1 and 0). */
15323 /* Return true if (CODE OP0) is known to be non-negative. If the return
15324 value is based on the assumption that signed overflow is undefined,
15325 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15326 *STRICT_OVERFLOW_P. */
15329 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15330 bool *strict_overflow_p
)
15332 if (TYPE_UNSIGNED (type
))
15338 /* We can't return 1 if flag_wrapv is set because
15339 ABS_EXPR<INT_MIN> = INT_MIN. */
15340 if (!INTEGRAL_TYPE_P (type
))
15342 if (TYPE_OVERFLOW_UNDEFINED (type
))
15344 *strict_overflow_p
= true;
15349 case NON_LVALUE_EXPR
:
15351 case FIX_TRUNC_EXPR
:
15352 return tree_expr_nonnegative_warnv_p (op0
,
15353 strict_overflow_p
);
15357 tree inner_type
= TREE_TYPE (op0
);
15358 tree outer_type
= type
;
15360 if (TREE_CODE (outer_type
) == REAL_TYPE
)
15362 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15363 return tree_expr_nonnegative_warnv_p (op0
,
15364 strict_overflow_p
);
15365 if (INTEGRAL_TYPE_P (inner_type
))
15367 if (TYPE_UNSIGNED (inner_type
))
15369 return tree_expr_nonnegative_warnv_p (op0
,
15370 strict_overflow_p
);
15373 else if (INTEGRAL_TYPE_P (outer_type
))
15375 if (TREE_CODE (inner_type
) == REAL_TYPE
)
15376 return tree_expr_nonnegative_warnv_p (op0
,
15377 strict_overflow_p
);
15378 if (INTEGRAL_TYPE_P (inner_type
))
15379 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
15380 && TYPE_UNSIGNED (inner_type
);
15386 return tree_simple_nonnegative_warnv_p (code
, type
);
15389 /* We don't know sign of `t', so be conservative and return false. */
15393 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15394 value is based on the assumption that signed overflow is undefined,
15395 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15396 *STRICT_OVERFLOW_P. */
15399 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
15400 tree op1
, bool *strict_overflow_p
)
15402 if (TYPE_UNSIGNED (type
))
15407 case POINTER_PLUS_EXPR
:
15409 if (FLOAT_TYPE_P (type
))
15410 return (tree_expr_nonnegative_warnv_p (op0
,
15412 && tree_expr_nonnegative_warnv_p (op1
,
15413 strict_overflow_p
));
15415 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15416 both unsigned and at least 2 bits shorter than the result. */
15417 if (TREE_CODE (type
) == INTEGER_TYPE
15418 && TREE_CODE (op0
) == NOP_EXPR
15419 && TREE_CODE (op1
) == NOP_EXPR
)
15421 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
15422 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
15423 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
15424 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
15426 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
15427 TYPE_PRECISION (inner2
)) + 1;
15428 return prec
< TYPE_PRECISION (type
);
15434 if (FLOAT_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
15436 /* x * x is always non-negative for floating point x
15437 or without overflow. */
15438 if (operand_equal_p (op0
, op1
, 0)
15439 || (tree_expr_nonnegative_warnv_p (op0
, strict_overflow_p
)
15440 && tree_expr_nonnegative_warnv_p (op1
, strict_overflow_p
)))
15442 if (TYPE_OVERFLOW_UNDEFINED (type
))
15443 *strict_overflow_p
= true;
15448 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15449 both unsigned and their total bits is shorter than the result. */
15450 if (TREE_CODE (type
) == INTEGER_TYPE
15451 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
15452 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
15454 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
15455 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
15457 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
15458 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
15461 bool unsigned0
= TYPE_UNSIGNED (inner0
);
15462 bool unsigned1
= TYPE_UNSIGNED (inner1
);
15464 if (TREE_CODE (op0
) == INTEGER_CST
)
15465 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
15467 if (TREE_CODE (op1
) == INTEGER_CST
)
15468 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
15470 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
15471 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
15473 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
15474 ? tree_int_cst_min_precision (op0
, UNSIGNED
)
15475 : TYPE_PRECISION (inner0
);
15477 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
15478 ? tree_int_cst_min_precision (op1
, UNSIGNED
)
15479 : TYPE_PRECISION (inner1
);
15481 return precision0
+ precision1
< TYPE_PRECISION (type
);
15488 return (tree_expr_nonnegative_warnv_p (op0
,
15490 || tree_expr_nonnegative_warnv_p (op1
,
15491 strict_overflow_p
));
15497 case TRUNC_DIV_EXPR
:
15498 case CEIL_DIV_EXPR
:
15499 case FLOOR_DIV_EXPR
:
15500 case ROUND_DIV_EXPR
:
15501 return (tree_expr_nonnegative_warnv_p (op0
,
15503 && tree_expr_nonnegative_warnv_p (op1
,
15504 strict_overflow_p
));
15506 case TRUNC_MOD_EXPR
:
15507 case CEIL_MOD_EXPR
:
15508 case FLOOR_MOD_EXPR
:
15509 case ROUND_MOD_EXPR
:
15510 return tree_expr_nonnegative_warnv_p (op0
,
15511 strict_overflow_p
);
15513 return tree_simple_nonnegative_warnv_p (code
, type
);
15516 /* We don't know sign of `t', so be conservative and return false. */
15520 /* Return true if T is known to be non-negative. If the return
15521 value is based on the assumption that signed overflow is undefined,
15522 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15523 *STRICT_OVERFLOW_P. */
15526 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15528 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15531 switch (TREE_CODE (t
))
15534 return tree_int_cst_sgn (t
) >= 0;
15537 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15540 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15543 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15545 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15546 strict_overflow_p
));
15548 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15551 /* We don't know sign of `t', so be conservative and return false. */
15555 /* Return true if T is known to be non-negative. If the return
15556 value is based on the assumption that signed overflow is undefined,
15557 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15558 *STRICT_OVERFLOW_P. */
15561 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15562 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15564 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15565 switch (DECL_FUNCTION_CODE (fndecl
))
15567 CASE_FLT_FN (BUILT_IN_ACOS
):
15568 CASE_FLT_FN (BUILT_IN_ACOSH
):
15569 CASE_FLT_FN (BUILT_IN_CABS
):
15570 CASE_FLT_FN (BUILT_IN_COSH
):
15571 CASE_FLT_FN (BUILT_IN_ERFC
):
15572 CASE_FLT_FN (BUILT_IN_EXP
):
15573 CASE_FLT_FN (BUILT_IN_EXP10
):
15574 CASE_FLT_FN (BUILT_IN_EXP2
):
15575 CASE_FLT_FN (BUILT_IN_FABS
):
15576 CASE_FLT_FN (BUILT_IN_FDIM
):
15577 CASE_FLT_FN (BUILT_IN_HYPOT
):
15578 CASE_FLT_FN (BUILT_IN_POW10
):
15579 CASE_INT_FN (BUILT_IN_FFS
):
15580 CASE_INT_FN (BUILT_IN_PARITY
):
15581 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15582 CASE_INT_FN (BUILT_IN_CLZ
):
15583 CASE_INT_FN (BUILT_IN_CLRSB
):
15584 case BUILT_IN_BSWAP32
:
15585 case BUILT_IN_BSWAP64
:
15589 CASE_FLT_FN (BUILT_IN_SQRT
):
15590 /* sqrt(-0.0) is -0.0. */
15591 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15593 return tree_expr_nonnegative_warnv_p (arg0
,
15594 strict_overflow_p
);
15596 CASE_FLT_FN (BUILT_IN_ASINH
):
15597 CASE_FLT_FN (BUILT_IN_ATAN
):
15598 CASE_FLT_FN (BUILT_IN_ATANH
):
15599 CASE_FLT_FN (BUILT_IN_CBRT
):
15600 CASE_FLT_FN (BUILT_IN_CEIL
):
15601 CASE_FLT_FN (BUILT_IN_ERF
):
15602 CASE_FLT_FN (BUILT_IN_EXPM1
):
15603 CASE_FLT_FN (BUILT_IN_FLOOR
):
15604 CASE_FLT_FN (BUILT_IN_FMOD
):
15605 CASE_FLT_FN (BUILT_IN_FREXP
):
15606 CASE_FLT_FN (BUILT_IN_ICEIL
):
15607 CASE_FLT_FN (BUILT_IN_IFLOOR
):
15608 CASE_FLT_FN (BUILT_IN_IRINT
):
15609 CASE_FLT_FN (BUILT_IN_IROUND
):
15610 CASE_FLT_FN (BUILT_IN_LCEIL
):
15611 CASE_FLT_FN (BUILT_IN_LDEXP
):
15612 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15613 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15614 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15615 CASE_FLT_FN (BUILT_IN_LLRINT
):
15616 CASE_FLT_FN (BUILT_IN_LLROUND
):
15617 CASE_FLT_FN (BUILT_IN_LRINT
):
15618 CASE_FLT_FN (BUILT_IN_LROUND
):
15619 CASE_FLT_FN (BUILT_IN_MODF
):
15620 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15621 CASE_FLT_FN (BUILT_IN_RINT
):
15622 CASE_FLT_FN (BUILT_IN_ROUND
):
15623 CASE_FLT_FN (BUILT_IN_SCALB
):
15624 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15625 CASE_FLT_FN (BUILT_IN_SCALBN
):
15626 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15627 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15628 CASE_FLT_FN (BUILT_IN_SINH
):
15629 CASE_FLT_FN (BUILT_IN_TANH
):
15630 CASE_FLT_FN (BUILT_IN_TRUNC
):
15631 /* True if the 1st argument is nonnegative. */
15632 return tree_expr_nonnegative_warnv_p (arg0
,
15633 strict_overflow_p
);
15635 CASE_FLT_FN (BUILT_IN_FMAX
):
15636 /* True if the 1st OR 2nd arguments are nonnegative. */
15637 return (tree_expr_nonnegative_warnv_p (arg0
,
15639 || (tree_expr_nonnegative_warnv_p (arg1
,
15640 strict_overflow_p
)));
15642 CASE_FLT_FN (BUILT_IN_FMIN
):
15643 /* True if the 1st AND 2nd arguments are nonnegative. */
15644 return (tree_expr_nonnegative_warnv_p (arg0
,
15646 && (tree_expr_nonnegative_warnv_p (arg1
,
15647 strict_overflow_p
)));
15649 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15650 /* True if the 2nd argument is nonnegative. */
15651 return tree_expr_nonnegative_warnv_p (arg1
,
15652 strict_overflow_p
);
15654 CASE_FLT_FN (BUILT_IN_POWI
):
15655 /* True if the 1st argument is nonnegative or the second
15656 argument is an even integer. */
15657 if (TREE_CODE (arg1
) == INTEGER_CST
15658 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15660 return tree_expr_nonnegative_warnv_p (arg0
,
15661 strict_overflow_p
);
15663 CASE_FLT_FN (BUILT_IN_POW
):
15664 /* True if the 1st argument is nonnegative or the second
15665 argument is an even integer valued real. */
15666 if (TREE_CODE (arg1
) == REAL_CST
)
15671 c
= TREE_REAL_CST (arg1
);
15672 n
= real_to_integer (&c
);
15675 REAL_VALUE_TYPE cint
;
15676 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
15677 if (real_identical (&c
, &cint
))
15681 return tree_expr_nonnegative_warnv_p (arg0
,
15682 strict_overflow_p
);
15687 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15691 /* Return true if T is known to be non-negative. If the return
15692 value is based on the assumption that signed overflow is undefined,
15693 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15694 *STRICT_OVERFLOW_P. */
15697 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15699 enum tree_code code
= TREE_CODE (t
);
15700 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15707 tree temp
= TARGET_EXPR_SLOT (t
);
15708 t
= TARGET_EXPR_INITIAL (t
);
15710 /* If the initializer is non-void, then it's a normal expression
15711 that will be assigned to the slot. */
15712 if (!VOID_TYPE_P (t
))
15713 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15715 /* Otherwise, the initializer sets the slot in some way. One common
15716 way is an assignment statement at the end of the initializer. */
15719 if (TREE_CODE (t
) == BIND_EXPR
)
15720 t
= expr_last (BIND_EXPR_BODY (t
));
15721 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15722 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15723 t
= expr_last (TREE_OPERAND (t
, 0));
15724 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15729 if (TREE_CODE (t
) == MODIFY_EXPR
15730 && TREE_OPERAND (t
, 0) == temp
)
15731 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15732 strict_overflow_p
);
15739 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15740 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15742 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15743 get_callee_fndecl (t
),
15746 strict_overflow_p
);
15748 case COMPOUND_EXPR
:
15750 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15751 strict_overflow_p
);
15753 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15754 strict_overflow_p
);
15756 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15757 strict_overflow_p
);
15760 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15764 /* We don't know sign of `t', so be conservative and return false. */
15768 /* Return true if T is known to be non-negative. If the return
15769 value is based on the assumption that signed overflow is undefined,
15770 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15771 *STRICT_OVERFLOW_P. */
15774 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15776 enum tree_code code
;
15777 if (t
== error_mark_node
)
15780 code
= TREE_CODE (t
);
15781 switch (TREE_CODE_CLASS (code
))
15784 case tcc_comparison
:
15785 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15787 TREE_OPERAND (t
, 0),
15788 TREE_OPERAND (t
, 1),
15789 strict_overflow_p
);
15792 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15794 TREE_OPERAND (t
, 0),
15795 strict_overflow_p
);
15798 case tcc_declaration
:
15799 case tcc_reference
:
15800 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15808 case TRUTH_AND_EXPR
:
15809 case TRUTH_OR_EXPR
:
15810 case TRUTH_XOR_EXPR
:
15811 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15813 TREE_OPERAND (t
, 0),
15814 TREE_OPERAND (t
, 1),
15815 strict_overflow_p
);
15816 case TRUTH_NOT_EXPR
:
15817 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15819 TREE_OPERAND (t
, 0),
15820 strict_overflow_p
);
15827 case WITH_SIZE_EXPR
:
15829 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15832 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15836 /* Return true if `t' is known to be non-negative. Handle warnings
15837 about undefined signed overflow. */
15840 tree_expr_nonnegative_p (tree t
)
15842 bool ret
, strict_overflow_p
;
15844 strict_overflow_p
= false;
15845 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15846 if (strict_overflow_p
)
15847 fold_overflow_warning (("assuming signed overflow does not occur when "
15848 "determining that expression is always "
15850 WARN_STRICT_OVERFLOW_MISC
);
15855 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15856 For floating point we further ensure that T is not denormal.
15857 Similar logic is present in nonzero_address in rtlanal.h.
15859 If the return value is based on the assumption that signed overflow
15860 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15861 change *STRICT_OVERFLOW_P. */
15864 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15865 bool *strict_overflow_p
)
15870 return tree_expr_nonzero_warnv_p (op0
,
15871 strict_overflow_p
);
15875 tree inner_type
= TREE_TYPE (op0
);
15876 tree outer_type
= type
;
15878 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15879 && tree_expr_nonzero_warnv_p (op0
,
15880 strict_overflow_p
));
15884 case NON_LVALUE_EXPR
:
15885 return tree_expr_nonzero_warnv_p (op0
,
15886 strict_overflow_p
);
15895 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15896 For floating point we further ensure that T is not denormal.
15897 Similar logic is present in nonzero_address in rtlanal.h.
15899 If the return value is based on the assumption that signed overflow
15900 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15901 change *STRICT_OVERFLOW_P. */
15904 tree_binary_nonzero_warnv_p (enum tree_code code
,
15907 tree op1
, bool *strict_overflow_p
)
15909 bool sub_strict_overflow_p
;
15912 case POINTER_PLUS_EXPR
:
15914 if (TYPE_OVERFLOW_UNDEFINED (type
))
15916 /* With the presence of negative values it is hard
15917 to say something. */
15918 sub_strict_overflow_p
= false;
15919 if (!tree_expr_nonnegative_warnv_p (op0
,
15920 &sub_strict_overflow_p
)
15921 || !tree_expr_nonnegative_warnv_p (op1
,
15922 &sub_strict_overflow_p
))
15924 /* One of operands must be positive and the other non-negative. */
15925 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15926 overflows, on a twos-complement machine the sum of two
15927 nonnegative numbers can never be zero. */
15928 return (tree_expr_nonzero_warnv_p (op0
,
15930 || tree_expr_nonzero_warnv_p (op1
,
15931 strict_overflow_p
));
15936 if (TYPE_OVERFLOW_UNDEFINED (type
))
15938 if (tree_expr_nonzero_warnv_p (op0
,
15940 && tree_expr_nonzero_warnv_p (op1
,
15941 strict_overflow_p
))
15943 *strict_overflow_p
= true;
15950 sub_strict_overflow_p
= false;
15951 if (tree_expr_nonzero_warnv_p (op0
,
15952 &sub_strict_overflow_p
)
15953 && tree_expr_nonzero_warnv_p (op1
,
15954 &sub_strict_overflow_p
))
15956 if (sub_strict_overflow_p
)
15957 *strict_overflow_p
= true;
15962 sub_strict_overflow_p
= false;
15963 if (tree_expr_nonzero_warnv_p (op0
,
15964 &sub_strict_overflow_p
))
15966 if (sub_strict_overflow_p
)
15967 *strict_overflow_p
= true;
15969 /* When both operands are nonzero, then MAX must be too. */
15970 if (tree_expr_nonzero_warnv_p (op1
,
15971 strict_overflow_p
))
15974 /* MAX where operand 0 is positive is positive. */
15975 return tree_expr_nonnegative_warnv_p (op0
,
15976 strict_overflow_p
);
15978 /* MAX where operand 1 is positive is positive. */
15979 else if (tree_expr_nonzero_warnv_p (op1
,
15980 &sub_strict_overflow_p
)
15981 && tree_expr_nonnegative_warnv_p (op1
,
15982 &sub_strict_overflow_p
))
15984 if (sub_strict_overflow_p
)
15985 *strict_overflow_p
= true;
15991 return (tree_expr_nonzero_warnv_p (op1
,
15993 || tree_expr_nonzero_warnv_p (op0
,
15994 strict_overflow_p
));
16003 /* Return true when T is an address and is known to be nonzero.
16004 For floating point we further ensure that T is not denormal.
16005 Similar logic is present in nonzero_address in rtlanal.h.
16007 If the return value is based on the assumption that signed overflow
16008 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16009 change *STRICT_OVERFLOW_P. */
16012 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
16014 bool sub_strict_overflow_p
;
16015 switch (TREE_CODE (t
))
16018 return !integer_zerop (t
);
16022 tree base
= TREE_OPERAND (t
, 0);
16023 if (!DECL_P (base
))
16024 base
= get_base_address (base
);
16029 /* Weak declarations may link to NULL. Other things may also be NULL
16030 so protect with -fdelete-null-pointer-checks; but not variables
16031 allocated on the stack. */
16033 && (flag_delete_null_pointer_checks
16034 || (DECL_CONTEXT (base
)
16035 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
16036 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
)))))
16037 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
16039 /* Constants are never weak. */
16040 if (CONSTANT_CLASS_P (base
))
16047 sub_strict_overflow_p
= false;
16048 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
16049 &sub_strict_overflow_p
)
16050 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
16051 &sub_strict_overflow_p
))
16053 if (sub_strict_overflow_p
)
16054 *strict_overflow_p
= true;
16065 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16066 attempt to fold the expression to a constant without modifying TYPE,
16069 If the expression could be simplified to a constant, then return
16070 the constant. If the expression would not be simplified to a
16071 constant, then return NULL_TREE. */
16074 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
16076 tree tem
= fold_binary (code
, type
, op0
, op1
);
16077 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16080 /* Given the components of a unary expression CODE, TYPE and OP0,
16081 attempt to fold the expression to a constant without modifying
16084 If the expression could be simplified to a constant, then return
16085 the constant. If the expression would not be simplified to a
16086 constant, then return NULL_TREE. */
16089 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
16091 tree tem
= fold_unary (code
, type
, op0
);
16092 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
16095 /* If EXP represents referencing an element in a constant string
16096 (either via pointer arithmetic or array indexing), return the
16097 tree representing the value accessed, otherwise return NULL. */
16100 fold_read_from_constant_string (tree exp
)
16102 if ((TREE_CODE (exp
) == INDIRECT_REF
16103 || TREE_CODE (exp
) == ARRAY_REF
)
16104 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
16106 tree exp1
= TREE_OPERAND (exp
, 0);
16109 location_t loc
= EXPR_LOCATION (exp
);
16111 if (TREE_CODE (exp
) == INDIRECT_REF
)
16112 string
= string_constant (exp1
, &index
);
16115 tree low_bound
= array_ref_low_bound (exp
);
16116 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
16118 /* Optimize the special-case of a zero lower bound.
16120 We convert the low_bound to sizetype to avoid some problems
16121 with constant folding. (E.g. suppose the lower bound is 1,
16122 and its mode is QI. Without the conversion,l (ARRAY
16123 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16124 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16125 if (! integer_zerop (low_bound
))
16126 index
= size_diffop_loc (loc
, index
,
16127 fold_convert_loc (loc
, sizetype
, low_bound
));
16133 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
16134 && TREE_CODE (string
) == STRING_CST
16135 && TREE_CODE (index
) == INTEGER_CST
16136 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
16137 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
16139 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
16140 return build_int_cst_type (TREE_TYPE (exp
),
16141 (TREE_STRING_POINTER (string
)
16142 [TREE_INT_CST_LOW (index
)]));
16147 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16148 an integer constant, real, or fixed-point constant.
16150 TYPE is the type of the result. */
16153 fold_negate_const (tree arg0
, tree type
)
16155 tree t
= NULL_TREE
;
16157 switch (TREE_CODE (arg0
))
16162 wide_int val
= wi::neg (arg0
, &overflow
);
16163 t
= force_fit_type (type
, val
, 1,
16164 (overflow
| TREE_OVERFLOW (arg0
))
16165 && !TYPE_UNSIGNED (type
));
16170 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16175 FIXED_VALUE_TYPE f
;
16176 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
16177 &(TREE_FIXED_CST (arg0
)), NULL
,
16178 TYPE_SATURATING (type
));
16179 t
= build_fixed (type
, f
);
16180 /* Propagate overflow flags. */
16181 if (overflow_p
| TREE_OVERFLOW (arg0
))
16182 TREE_OVERFLOW (t
) = 1;
16187 gcc_unreachable ();
16193 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16194 an integer constant or real constant.
16196 TYPE is the type of the result. */
16199 fold_abs_const (tree arg0
, tree type
)
16201 tree t
= NULL_TREE
;
16203 switch (TREE_CODE (arg0
))
16207 /* If the value is unsigned or non-negative, then the absolute value
16208 is the same as the ordinary value. */
16209 if (!wi::neg_p (arg0
, TYPE_SIGN (type
)))
16212 /* If the value is negative, then the absolute value is
16217 wide_int val
= wi::neg (arg0
, &overflow
);
16218 t
= force_fit_type (type
, val
, -1,
16219 overflow
| TREE_OVERFLOW (arg0
));
16225 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
16226 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
16232 gcc_unreachable ();
16238 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16239 constant. TYPE is the type of the result. */
16242 fold_not_const (const_tree arg0
, tree type
)
16244 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
16246 return force_fit_type (type
, wi::bit_not (arg0
), 0, TREE_OVERFLOW (arg0
));
16249 /* Given CODE, a relational operator, the target type, TYPE and two
16250 constant operands OP0 and OP1, return the result of the
16251 relational operation. If the result is not a compile time
16252 constant, then return NULL_TREE. */
16255 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
16257 int result
, invert
;
16259 /* From here on, the only cases we handle are when the result is
16260 known to be a constant. */
16262 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
16264 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
16265 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
16267 /* Handle the cases where either operand is a NaN. */
16268 if (real_isnan (c0
) || real_isnan (c1
))
16278 case UNORDERED_EXPR
:
16292 if (flag_trapping_math
)
16298 gcc_unreachable ();
16301 return constant_boolean_node (result
, type
);
16304 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
16307 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
16309 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
16310 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
16311 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
16314 /* Handle equality/inequality of complex constants. */
16315 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
16317 tree rcond
= fold_relational_const (code
, type
,
16318 TREE_REALPART (op0
),
16319 TREE_REALPART (op1
));
16320 tree icond
= fold_relational_const (code
, type
,
16321 TREE_IMAGPART (op0
),
16322 TREE_IMAGPART (op1
));
16323 if (code
== EQ_EXPR
)
16324 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
16325 else if (code
== NE_EXPR
)
16326 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
16331 if (TREE_CODE (op0
) == VECTOR_CST
&& TREE_CODE (op1
) == VECTOR_CST
)
16333 unsigned count
= VECTOR_CST_NELTS (op0
);
16334 tree
*elts
= XALLOCAVEC (tree
, count
);
16335 gcc_assert (VECTOR_CST_NELTS (op1
) == count
16336 && TYPE_VECTOR_SUBPARTS (type
) == count
);
16338 for (unsigned i
= 0; i
< count
; i
++)
16340 tree elem_type
= TREE_TYPE (type
);
16341 tree elem0
= VECTOR_CST_ELT (op0
, i
);
16342 tree elem1
= VECTOR_CST_ELT (op1
, i
);
16344 tree tem
= fold_relational_const (code
, elem_type
,
16347 if (tem
== NULL_TREE
)
16350 elts
[i
] = build_int_cst (elem_type
, integer_zerop (tem
) ? 0 : -1);
16353 return build_vector (type
, elts
);
16356 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16358 To compute GT, swap the arguments and do LT.
16359 To compute GE, do LT and invert the result.
16360 To compute LE, swap the arguments, do LT and invert the result.
16361 To compute NE, do EQ and invert the result.
16363 Therefore, the code below must handle only EQ and LT. */
16365 if (code
== LE_EXPR
|| code
== GT_EXPR
)
16370 code
= swap_tree_comparison (code
);
16373 /* Note that it is safe to invert for real values here because we
16374 have already handled the one case that it matters. */
16377 if (code
== NE_EXPR
|| code
== GE_EXPR
)
16380 code
= invert_tree_comparison (code
, false);
16383 /* Compute a result for LT or EQ if args permit;
16384 Otherwise return T. */
16385 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
16387 if (code
== EQ_EXPR
)
16388 result
= tree_int_cst_equal (op0
, op1
);
16390 result
= tree_int_cst_lt (op0
, op1
);
16397 return constant_boolean_node (result
, type
);
16400 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16401 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16405 fold_build_cleanup_point_expr (tree type
, tree expr
)
16407 /* If the expression does not have side effects then we don't have to wrap
16408 it with a cleanup point expression. */
16409 if (!TREE_SIDE_EFFECTS (expr
))
16412 /* If the expression is a return, check to see if the expression inside the
16413 return has no side effects or the right hand side of the modify expression
16414 inside the return. If either don't have side effects set we don't need to
16415 wrap the expression in a cleanup point expression. Note we don't check the
16416 left hand side of the modify because it should always be a return decl. */
16417 if (TREE_CODE (expr
) == RETURN_EXPR
)
16419 tree op
= TREE_OPERAND (expr
, 0);
16420 if (!op
|| !TREE_SIDE_EFFECTS (op
))
16422 op
= TREE_OPERAND (op
, 1);
16423 if (!TREE_SIDE_EFFECTS (op
))
16427 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
16430 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16431 of an indirection through OP0, or NULL_TREE if no simplification is
16435 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16441 subtype
= TREE_TYPE (sub
);
16442 if (!POINTER_TYPE_P (subtype
))
16445 if (TREE_CODE (sub
) == ADDR_EXPR
)
16447 tree op
= TREE_OPERAND (sub
, 0);
16448 tree optype
= TREE_TYPE (op
);
16449 /* *&CONST_DECL -> to the value of the const decl. */
16450 if (TREE_CODE (op
) == CONST_DECL
)
16451 return DECL_INITIAL (op
);
16452 /* *&p => p; make sure to handle *&"str"[cst] here. */
16453 if (type
== optype
)
16455 tree fop
= fold_read_from_constant_string (op
);
16461 /* *(foo *)&fooarray => fooarray[0] */
16462 else if (TREE_CODE (optype
) == ARRAY_TYPE
16463 && type
== TREE_TYPE (optype
)
16464 && (!in_gimple_form
16465 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16467 tree type_domain
= TYPE_DOMAIN (optype
);
16468 tree min_val
= size_zero_node
;
16469 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16470 min_val
= TYPE_MIN_VALUE (type_domain
);
16472 && TREE_CODE (min_val
) != INTEGER_CST
)
16474 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16475 NULL_TREE
, NULL_TREE
);
16477 /* *(foo *)&complexfoo => __real__ complexfoo */
16478 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16479 && type
== TREE_TYPE (optype
))
16480 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16481 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16482 else if (TREE_CODE (optype
) == VECTOR_TYPE
16483 && type
== TREE_TYPE (optype
))
16485 tree part_width
= TYPE_SIZE (type
);
16486 tree index
= bitsize_int (0);
16487 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16491 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16492 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16494 tree op00
= TREE_OPERAND (sub
, 0);
16495 tree op01
= TREE_OPERAND (sub
, 1);
16498 if (TREE_CODE (op00
) == ADDR_EXPR
)
16501 op00
= TREE_OPERAND (op00
, 0);
16502 op00type
= TREE_TYPE (op00
);
16504 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16505 if (TREE_CODE (op00type
) == VECTOR_TYPE
16506 && type
== TREE_TYPE (op00type
))
16508 HOST_WIDE_INT offset
= tree_to_shwi (op01
);
16509 tree part_width
= TYPE_SIZE (type
);
16510 unsigned HOST_WIDE_INT part_widthi
= tree_to_shwi (part_width
)/BITS_PER_UNIT
;
16511 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16512 tree index
= bitsize_int (indexi
);
16514 if (offset
/ part_widthi
< TYPE_VECTOR_SUBPARTS (op00type
))
16515 return fold_build3_loc (loc
,
16516 BIT_FIELD_REF
, type
, op00
,
16517 part_width
, index
);
16520 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16521 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16522 && type
== TREE_TYPE (op00type
))
16524 tree size
= TYPE_SIZE_UNIT (type
);
16525 if (tree_int_cst_equal (size
, op01
))
16526 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16528 /* ((foo *)&fooarray)[1] => fooarray[1] */
16529 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16530 && type
== TREE_TYPE (op00type
))
16532 tree type_domain
= TYPE_DOMAIN (op00type
);
16533 tree min_val
= size_zero_node
;
16534 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16535 min_val
= TYPE_MIN_VALUE (type_domain
);
16536 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
16537 TYPE_SIZE_UNIT (type
));
16538 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
16539 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16540 NULL_TREE
, NULL_TREE
);
16545 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16546 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16547 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16548 && (!in_gimple_form
16549 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16552 tree min_val
= size_zero_node
;
16553 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16554 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16555 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16556 min_val
= TYPE_MIN_VALUE (type_domain
);
16558 && TREE_CODE (min_val
) != INTEGER_CST
)
16560 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16567 /* Builds an expression for an indirection through T, simplifying some
16571 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16573 tree type
= TREE_TYPE (TREE_TYPE (t
));
16574 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16579 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16582 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16585 fold_indirect_ref_loc (location_t loc
, tree t
)
16587 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16595 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16596 whose result is ignored. The type of the returned tree need not be
16597 the same as the original expression. */
16600 fold_ignored_result (tree t
)
16602 if (!TREE_SIDE_EFFECTS (t
))
16603 return integer_zero_node
;
16606 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16609 t
= TREE_OPERAND (t
, 0);
16613 case tcc_comparison
:
16614 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16615 t
= TREE_OPERAND (t
, 0);
16616 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16617 t
= TREE_OPERAND (t
, 1);
16622 case tcc_expression
:
16623 switch (TREE_CODE (t
))
16625 case COMPOUND_EXPR
:
16626 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16628 t
= TREE_OPERAND (t
, 0);
16632 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16633 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16635 t
= TREE_OPERAND (t
, 0);
16648 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16651 round_up_loc (location_t loc
, tree value
, unsigned int divisor
)
16653 tree div
= NULL_TREE
;
16658 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16659 have to do anything. Only do this when we are not given a const,
16660 because in that case, this check is more expensive than just
16662 if (TREE_CODE (value
) != INTEGER_CST
)
16664 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16666 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16670 /* If divisor is a power of two, simplify this to bit manipulation. */
16671 if (divisor
== (divisor
& -divisor
))
16673 if (TREE_CODE (value
) == INTEGER_CST
)
16675 wide_int val
= value
;
16678 if ((val
& (divisor
- 1)) == 0)
16681 overflow_p
= TREE_OVERFLOW (value
);
16682 val
&= ~(divisor
- 1);
16687 return force_fit_type (TREE_TYPE (value
), val
, -1, overflow_p
);
16693 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16694 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16695 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16696 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16702 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16703 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16704 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16710 /* Likewise, but round down. */
16713 round_down_loc (location_t loc
, tree value
, int divisor
)
16715 tree div
= NULL_TREE
;
16717 gcc_assert (divisor
> 0);
16721 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16722 have to do anything. Only do this when we are not given a const,
16723 because in that case, this check is more expensive than just
16725 if (TREE_CODE (value
) != INTEGER_CST
)
16727 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16729 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16733 /* If divisor is a power of two, simplify this to bit manipulation. */
16734 if (divisor
== (divisor
& -divisor
))
16738 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16739 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16744 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16745 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16746 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16752 /* Returns the pointer to the base of the object addressed by EXP and
16753 extracts the information about the offset of the access, storing it
16754 to PBITPOS and POFFSET. */
16757 split_address_to_core_and_offset (tree exp
,
16758 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16761 enum machine_mode mode
;
16762 int unsignedp
, volatilep
;
16763 HOST_WIDE_INT bitsize
;
16764 location_t loc
= EXPR_LOCATION (exp
);
16766 if (TREE_CODE (exp
) == ADDR_EXPR
)
16768 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16769 poffset
, &mode
, &unsignedp
, &volatilep
,
16771 core
= build_fold_addr_expr_loc (loc
, core
);
16777 *poffset
= NULL_TREE
;
16783 /* Returns true if addresses of E1 and E2 differ by a constant, false
16784 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16787 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16790 HOST_WIDE_INT bitpos1
, bitpos2
;
16791 tree toffset1
, toffset2
, tdiff
, type
;
16793 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16794 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16796 if (bitpos1
% BITS_PER_UNIT
!= 0
16797 || bitpos2
% BITS_PER_UNIT
!= 0
16798 || !operand_equal_p (core1
, core2
, 0))
16801 if (toffset1
&& toffset2
)
16803 type
= TREE_TYPE (toffset1
);
16804 if (type
!= TREE_TYPE (toffset2
))
16805 toffset2
= fold_convert (type
, toffset2
);
16807 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16808 if (!cst_and_fits_in_hwi (tdiff
))
16811 *diff
= int_cst_value (tdiff
);
16813 else if (toffset1
|| toffset2
)
16815 /* If only one of the offsets is non-constant, the difference cannot
16822 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16826 /* Simplify the floating point expression EXP when the sign of the
16827 result is not significant. Return NULL_TREE if no simplification
16831 fold_strip_sign_ops (tree exp
)
16834 location_t loc
= EXPR_LOCATION (exp
);
16836 switch (TREE_CODE (exp
))
16840 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16841 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16845 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16847 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16848 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16849 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16850 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16851 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16852 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16855 case COMPOUND_EXPR
:
16856 arg0
= TREE_OPERAND (exp
, 0);
16857 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16859 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16863 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16864 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16866 return fold_build3_loc (loc
,
16867 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16868 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16869 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16874 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16877 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16878 /* Strip copysign function call, return the 1st argument. */
16879 arg0
= CALL_EXPR_ARG (exp
, 0);
16880 arg1
= CALL_EXPR_ARG (exp
, 1);
16881 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16884 /* Strip sign ops from the argument of "odd" math functions. */
16885 if (negate_mathfn_p (fcode
))
16887 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16889 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);