1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
65 #include "langhooks.h"
69 /* Nonzero if we are folding constants inside an initializer; zero
71 int folding_initializer
= 0;
73 /* The following constants represent a bit based encoding of GCC's
74 comparison operators. This encoding simplifies transformations
75 on relational comparison operators, such as AND and OR. */
76 enum comparison_code
{
95 static bool negate_mathfn_p (enum built_in_function
);
96 static bool negate_expr_p (tree
);
97 static tree
negate_expr (tree
);
98 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
99 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
100 static tree
const_binop (enum tree_code
, tree
, tree
);
101 static enum comparison_code
comparison_to_compcode (enum tree_code
);
102 static enum tree_code
compcode_to_comparison (enum comparison_code
);
103 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
104 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
105 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
106 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
107 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
108 static tree
make_bit_field_ref (location_t
, tree
, tree
,
109 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
110 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
112 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
114 enum machine_mode
*, int *, int *,
116 static int all_ones_mask_p (const_tree
, int);
117 static tree
sign_bit_p (tree
, const_tree
);
118 static int simple_operand_p (const_tree
);
119 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
120 static tree
range_predecessor (tree
);
121 static tree
range_successor (tree
);
122 extern tree
make_range (tree
, int *, tree
*, tree
*, bool *);
123 extern bool merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int,
125 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
126 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
127 static tree
unextend (tree
, int, int, tree
);
128 static tree
fold_truthop (location_t
, enum tree_code
, tree
, tree
, tree
);
129 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
131 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
132 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
133 static tree
fold_binary_op_with_conditional_arg (location_t
,
134 enum tree_code
, tree
,
137 static tree
fold_mathfn_compare (location_t
,
138 enum built_in_function
, enum tree_code
,
140 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
141 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
142 static bool reorder_operands_p (const_tree
, const_tree
);
143 static tree
fold_negate_const (tree
, tree
);
144 static tree
fold_not_const (tree
, tree
);
145 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
146 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
149 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
150 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
151 and SUM1. Then this yields nonzero if overflow occurred during the
154 Overflow occurs if A and B have the same sign, but A and SUM differ in
155 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
157 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
159 /* If ARG2 divides ARG1 with zero remainder, carries out the division
160 of type CODE and returns the quotient.
161 Otherwise returns NULL_TREE. */
164 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
169 /* The sign of the division is according to operand two, that
170 does the correct thing for POINTER_PLUS_EXPR where we want
171 a signed division. */
172 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
173 if (TREE_CODE (TREE_TYPE (arg2
)) == INTEGER_TYPE
174 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2
)))
177 quo
= double_int_divmod (tree_to_double_int (arg1
),
178 tree_to_double_int (arg2
),
181 if (double_int_zero_p (rem
))
182 return build_int_cst_wide (TREE_TYPE (arg1
), quo
.low
, quo
.high
);
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
196 static int fold_deferring_overflow_warnings
;
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
203 static const char* fold_deferred_overflow_warning
;
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
208 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
214 fold_defer_overflow_warnings (void)
216 ++fold_deferring_overflow_warnings
;
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
229 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
234 gcc_assert (fold_deferring_overflow_warnings
> 0);
235 --fold_deferring_overflow_warnings
;
236 if (fold_deferring_overflow_warnings
> 0)
238 if (fold_deferred_overflow_warning
!= NULL
240 && code
< (int) fold_deferred_overflow_code
)
241 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
245 warnmsg
= fold_deferred_overflow_warning
;
246 fold_deferred_overflow_warning
= NULL
;
248 if (!issue
|| warnmsg
== NULL
)
251 if (gimple_no_warning_p (stmt
))
254 /* Use the smallest code level when deciding to issue the
256 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
257 code
= fold_deferred_overflow_code
;
259 if (!issue_strict_overflow_warning (code
))
263 locus
= input_location
;
265 locus
= gimple_location (stmt
);
266 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
269 /* Stop deferring overflow warnings, ignoring any deferred
273 fold_undefer_and_ignore_overflow_warnings (void)
275 fold_undefer_overflow_warnings (false, NULL
, 0);
278 /* Whether we are deferring overflow warnings. */
281 fold_deferring_overflow_warnings_p (void)
283 return fold_deferring_overflow_warnings
> 0;
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
290 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
292 if (fold_deferring_overflow_warnings
> 0)
294 if (fold_deferred_overflow_warning
== NULL
295 || wc
< fold_deferred_overflow_code
)
297 fold_deferred_overflow_warning
= gmsgid
;
298 fold_deferred_overflow_code
= wc
;
301 else if (issue_strict_overflow_warning (wc
))
302 warning (OPT_Wstrict_overflow
, gmsgid
);
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
309 negate_mathfn_p (enum built_in_function code
)
313 CASE_FLT_FN (BUILT_IN_ASIN
):
314 CASE_FLT_FN (BUILT_IN_ASINH
):
315 CASE_FLT_FN (BUILT_IN_ATAN
):
316 CASE_FLT_FN (BUILT_IN_ATANH
):
317 CASE_FLT_FN (BUILT_IN_CASIN
):
318 CASE_FLT_FN (BUILT_IN_CASINH
):
319 CASE_FLT_FN (BUILT_IN_CATAN
):
320 CASE_FLT_FN (BUILT_IN_CATANH
):
321 CASE_FLT_FN (BUILT_IN_CBRT
):
322 CASE_FLT_FN (BUILT_IN_CPROJ
):
323 CASE_FLT_FN (BUILT_IN_CSIN
):
324 CASE_FLT_FN (BUILT_IN_CSINH
):
325 CASE_FLT_FN (BUILT_IN_CTAN
):
326 CASE_FLT_FN (BUILT_IN_CTANH
):
327 CASE_FLT_FN (BUILT_IN_ERF
):
328 CASE_FLT_FN (BUILT_IN_LLROUND
):
329 CASE_FLT_FN (BUILT_IN_LROUND
):
330 CASE_FLT_FN (BUILT_IN_ROUND
):
331 CASE_FLT_FN (BUILT_IN_SIN
):
332 CASE_FLT_FN (BUILT_IN_SINH
):
333 CASE_FLT_FN (BUILT_IN_TAN
):
334 CASE_FLT_FN (BUILT_IN_TANH
):
335 CASE_FLT_FN (BUILT_IN_TRUNC
):
338 CASE_FLT_FN (BUILT_IN_LLRINT
):
339 CASE_FLT_FN (BUILT_IN_LRINT
):
340 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
341 CASE_FLT_FN (BUILT_IN_RINT
):
342 return !flag_rounding_math
;
350 /* Check whether we may negate an integer constant T without causing
354 may_negate_without_overflow_p (const_tree t
)
356 unsigned HOST_WIDE_INT val
;
360 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
362 type
= TREE_TYPE (t
);
363 if (TYPE_UNSIGNED (type
))
366 prec
= TYPE_PRECISION (type
);
367 if (prec
> HOST_BITS_PER_WIDE_INT
)
369 if (TREE_INT_CST_LOW (t
) != 0)
371 prec
-= HOST_BITS_PER_WIDE_INT
;
372 val
= TREE_INT_CST_HIGH (t
);
375 val
= TREE_INT_CST_LOW (t
);
376 if (prec
< HOST_BITS_PER_WIDE_INT
)
377 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
378 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
381 /* Determine whether an expression T can be cheaply negated using
382 the function negate_expr without introducing undefined overflow. */
385 negate_expr_p (tree t
)
392 type
= TREE_TYPE (t
);
395 switch (TREE_CODE (t
))
398 if (TYPE_OVERFLOW_WRAPS (type
))
401 /* Check that -CST will not overflow type. */
402 return may_negate_without_overflow_p (t
);
404 return (INTEGRAL_TYPE_P (type
)
405 && TYPE_OVERFLOW_WRAPS (type
));
412 /* We want to canonicalize to positive real constants. Pretend
413 that only negative ones can be easily negated. */
414 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
417 return negate_expr_p (TREE_REALPART (t
))
418 && negate_expr_p (TREE_IMAGPART (t
));
421 return negate_expr_p (TREE_OPERAND (t
, 0))
422 && negate_expr_p (TREE_OPERAND (t
, 1));
425 return negate_expr_p (TREE_OPERAND (t
, 0));
428 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
429 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
431 /* -(A + B) -> (-B) - A. */
432 if (negate_expr_p (TREE_OPERAND (t
, 1))
433 && reorder_operands_p (TREE_OPERAND (t
, 0),
434 TREE_OPERAND (t
, 1)))
436 /* -(A + B) -> (-A) - B. */
437 return negate_expr_p (TREE_OPERAND (t
, 0));
440 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
441 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
442 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
443 && reorder_operands_p (TREE_OPERAND (t
, 0),
444 TREE_OPERAND (t
, 1));
447 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
453 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
454 return negate_expr_p (TREE_OPERAND (t
, 1))
455 || negate_expr_p (TREE_OPERAND (t
, 0));
463 /* In general we can't negate A / B, because if A is INT_MIN and
464 B is 1, we may turn this into INT_MIN / -1 which is undefined
465 and actually traps on some architectures. But if overflow is
466 undefined, we can negate, because - (INT_MIN / 1) is an
468 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
469 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
471 return negate_expr_p (TREE_OPERAND (t
, 1))
472 || negate_expr_p (TREE_OPERAND (t
, 0));
475 /* Negate -((double)float) as (double)(-float). */
476 if (TREE_CODE (type
) == REAL_TYPE
)
478 tree tem
= strip_float_extensions (t
);
480 return negate_expr_p (tem
);
485 /* Negate -f(x) as f(-x). */
486 if (negate_mathfn_p (builtin_mathfn_code (t
)))
487 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
491 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
492 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
494 tree op1
= TREE_OPERAND (t
, 1);
495 if (TREE_INT_CST_HIGH (op1
) == 0
496 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
497 == TREE_INT_CST_LOW (op1
))
508 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
509 simplification is possible.
510 If negate_expr_p would return true for T, NULL_TREE will never be
514 fold_negate_expr (location_t loc
, tree t
)
516 tree type
= TREE_TYPE (t
);
519 switch (TREE_CODE (t
))
521 /* Convert - (~A) to A + 1. */
523 if (INTEGRAL_TYPE_P (type
))
524 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
525 build_int_cst (type
, 1));
529 tem
= fold_negate_const (t
, type
);
530 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
531 || !TYPE_OVERFLOW_TRAPS (type
))
536 tem
= fold_negate_const (t
, type
);
537 /* Two's complement FP formats, such as c4x, may overflow. */
538 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
543 tem
= fold_negate_const (t
, type
);
548 tree rpart
= negate_expr (TREE_REALPART (t
));
549 tree ipart
= negate_expr (TREE_IMAGPART (t
));
551 if ((TREE_CODE (rpart
) == REAL_CST
552 && TREE_CODE (ipart
) == REAL_CST
)
553 || (TREE_CODE (rpart
) == INTEGER_CST
554 && TREE_CODE (ipart
) == INTEGER_CST
))
555 return build_complex (type
, rpart
, ipart
);
560 if (negate_expr_p (t
))
561 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
562 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
563 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
567 if (negate_expr_p (t
))
568 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
569 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
573 return TREE_OPERAND (t
, 0);
576 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
577 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
579 /* -(A + B) -> (-B) - A. */
580 if (negate_expr_p (TREE_OPERAND (t
, 1))
581 && reorder_operands_p (TREE_OPERAND (t
, 0),
582 TREE_OPERAND (t
, 1)))
584 tem
= negate_expr (TREE_OPERAND (t
, 1));
585 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
586 tem
, TREE_OPERAND (t
, 0));
589 /* -(A + B) -> (-A) - B. */
590 if (negate_expr_p (TREE_OPERAND (t
, 0)))
592 tem
= negate_expr (TREE_OPERAND (t
, 0));
593 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
594 tem
, TREE_OPERAND (t
, 1));
600 /* - (A - B) -> B - A */
601 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
602 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
603 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
604 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
605 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
609 if (TYPE_UNSIGNED (type
))
615 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
617 tem
= TREE_OPERAND (t
, 1);
618 if (negate_expr_p (tem
))
619 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
620 TREE_OPERAND (t
, 0), negate_expr (tem
));
621 tem
= TREE_OPERAND (t
, 0);
622 if (negate_expr_p (tem
))
623 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
624 negate_expr (tem
), TREE_OPERAND (t
, 1));
633 /* In general we can't negate A / B, because if A is INT_MIN and
634 B is 1, we may turn this into INT_MIN / -1 which is undefined
635 and actually traps on some architectures. But if overflow is
636 undefined, we can negate, because - (INT_MIN / 1) is an
638 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
640 const char * const warnmsg
= G_("assuming signed overflow does not "
641 "occur when negating a division");
642 tem
= TREE_OPERAND (t
, 1);
643 if (negate_expr_p (tem
))
645 if (INTEGRAL_TYPE_P (type
)
646 && (TREE_CODE (tem
) != INTEGER_CST
647 || integer_onep (tem
)))
648 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
649 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
650 TREE_OPERAND (t
, 0), negate_expr (tem
));
652 tem
= TREE_OPERAND (t
, 0);
653 if (negate_expr_p (tem
))
655 if (INTEGRAL_TYPE_P (type
)
656 && (TREE_CODE (tem
) != INTEGER_CST
657 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
658 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
659 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
660 negate_expr (tem
), TREE_OPERAND (t
, 1));
666 /* Convert -((double)float) into (double)(-float). */
667 if (TREE_CODE (type
) == REAL_TYPE
)
669 tem
= strip_float_extensions (t
);
670 if (tem
!= t
&& negate_expr_p (tem
))
671 return fold_convert_loc (loc
, type
, negate_expr (tem
));
676 /* Negate -f(x) as f(-x). */
677 if (negate_mathfn_p (builtin_mathfn_code (t
))
678 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
682 fndecl
= get_callee_fndecl (t
);
683 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
684 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
689 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
690 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
692 tree op1
= TREE_OPERAND (t
, 1);
693 if (TREE_INT_CST_HIGH (op1
) == 0
694 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
695 == TREE_INT_CST_LOW (op1
))
697 tree ntype
= TYPE_UNSIGNED (type
)
698 ? signed_type_for (type
)
699 : unsigned_type_for (type
);
700 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
701 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
702 return fold_convert_loc (loc
, type
, temp
);
714 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
715 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
727 loc
= EXPR_LOCATION (t
);
728 type
= TREE_TYPE (t
);
731 tem
= fold_negate_expr (loc
, t
);
734 tem
= build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
735 SET_EXPR_LOCATION (tem
, loc
);
737 return fold_convert_loc (loc
, type
, tem
);
740 /* Split a tree IN into a constant, literal and variable parts that could be
741 combined with CODE to make IN. "constant" means an expression with
742 TREE_CONSTANT but that isn't an actual constant. CODE must be a
743 commutative arithmetic operation. Store the constant part into *CONP,
744 the literal in *LITP and return the variable part. If a part isn't
745 present, set it to null. If the tree does not decompose in this way,
746 return the entire tree as the variable part and the other parts as null.
748 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
749 case, we negate an operand that was subtracted. Except if it is a
750 literal for which we use *MINUS_LITP instead.
752 If NEGATE_P is true, we are negating all of IN, again except a literal
753 for which we use *MINUS_LITP instead.
755 If IN is itself a literal or constant, return it as appropriate.
757 Note that we do not guarantee that any of the three values will be the
758 same type as IN, but they will have the same signedness and mode. */
761 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
762 tree
*minus_litp
, int negate_p
)
770 /* Strip any conversions that don't change the machine mode or signedness. */
771 STRIP_SIGN_NOPS (in
);
773 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
774 || TREE_CODE (in
) == FIXED_CST
)
776 else if (TREE_CODE (in
) == code
777 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
778 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
779 /* We can associate addition and subtraction together (even
780 though the C standard doesn't say so) for integers because
781 the value is not affected. For reals, the value might be
782 affected, so we can't. */
783 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
784 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
786 tree op0
= TREE_OPERAND (in
, 0);
787 tree op1
= TREE_OPERAND (in
, 1);
788 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
789 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
791 /* First see if either of the operands is a literal, then a constant. */
792 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
793 || TREE_CODE (op0
) == FIXED_CST
)
794 *litp
= op0
, op0
= 0;
795 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
796 || TREE_CODE (op1
) == FIXED_CST
)
797 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
799 if (op0
!= 0 && TREE_CONSTANT (op0
))
800 *conp
= op0
, op0
= 0;
801 else if (op1
!= 0 && TREE_CONSTANT (op1
))
802 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
804 /* If we haven't dealt with either operand, this is not a case we can
805 decompose. Otherwise, VAR is either of the ones remaining, if any. */
806 if (op0
!= 0 && op1
!= 0)
811 var
= op1
, neg_var_p
= neg1_p
;
813 /* Now do any needed negations. */
815 *minus_litp
= *litp
, *litp
= 0;
817 *conp
= negate_expr (*conp
);
819 var
= negate_expr (var
);
821 else if (TREE_CONSTANT (in
))
829 *minus_litp
= *litp
, *litp
= 0;
830 else if (*minus_litp
)
831 *litp
= *minus_litp
, *minus_litp
= 0;
832 *conp
= negate_expr (*conp
);
833 var
= negate_expr (var
);
839 /* Re-associate trees split by the above function. T1 and T2 are
840 either expressions to associate or null. Return the new
841 expression, if any. LOC is the location of the new expression. If
842 we build an operation, do it in TYPE and with CODE. */
845 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
854 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
855 try to fold this since we will have infinite recursion. But do
856 deal with any NEGATE_EXPRs. */
857 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
858 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
860 if (code
== PLUS_EXPR
)
862 if (TREE_CODE (t1
) == NEGATE_EXPR
)
863 tem
= build2 (MINUS_EXPR
, type
, fold_convert_loc (loc
, type
, t2
),
864 fold_convert_loc (loc
, type
, TREE_OPERAND (t1
, 0)));
865 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
866 tem
= build2 (MINUS_EXPR
, type
, fold_convert_loc (loc
, type
, t1
),
867 fold_convert_loc (loc
, type
, TREE_OPERAND (t2
, 0)));
868 else if (integer_zerop (t2
))
869 return fold_convert_loc (loc
, type
, t1
);
871 else if (code
== MINUS_EXPR
)
873 if (integer_zerop (t2
))
874 return fold_convert_loc (loc
, type
, t1
);
877 tem
= build2 (code
, type
, fold_convert_loc (loc
, type
, t1
),
878 fold_convert_loc (loc
, type
, t2
));
879 goto associate_trees_exit
;
882 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
883 fold_convert_loc (loc
, type
, t2
));
884 associate_trees_exit
:
885 protected_set_expr_location (tem
, loc
);
889 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
890 for use in int_const_binop, size_binop and size_diffop. */
893 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
895 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
897 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
912 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
913 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
914 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
918 /* Combine two integer constants ARG1 and ARG2 under operation CODE
919 to produce a new constant. Return NULL_TREE if we don't know how
920 to evaluate CODE at compile-time.
922 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
925 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
, int notrunc
)
927 double_int op1
, op2
, res
, tmp
;
929 tree type
= TREE_TYPE (arg1
);
930 bool uns
= TYPE_UNSIGNED (type
);
932 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
933 bool overflow
= false;
935 op1
= tree_to_double_int (arg1
);
936 op2
= tree_to_double_int (arg2
);
941 res
= double_int_ior (op1
, op2
);
945 res
= double_int_xor (op1
, op2
);
949 res
= double_int_and (op1
, op2
);
953 res
= double_int_rshift (op1
, double_int_to_shwi (op2
),
954 TYPE_PRECISION (type
), !uns
);
958 /* It's unclear from the C standard whether shifts can overflow.
959 The following code ignores overflow; perhaps a C standard
960 interpretation ruling is needed. */
961 res
= double_int_lshift (op1
, double_int_to_shwi (op2
),
962 TYPE_PRECISION (type
), !uns
);
966 res
= double_int_rrotate (op1
, double_int_to_shwi (op2
),
967 TYPE_PRECISION (type
));
971 res
= double_int_lrotate (op1
, double_int_to_shwi (op2
),
972 TYPE_PRECISION (type
));
976 overflow
= add_double (op1
.low
, op1
.high
, op2
.low
, op2
.high
,
977 &res
.low
, &res
.high
);
981 neg_double (op2
.low
, op2
.high
, &res
.low
, &res
.high
);
982 add_double (op1
.low
, op1
.high
, res
.low
, res
.high
,
983 &res
.low
, &res
.high
);
984 overflow
= OVERFLOW_SUM_SIGN (res
.high
, op2
.high
, op1
.high
);
988 overflow
= mul_double (op1
.low
, op1
.high
, op2
.low
, op2
.high
,
989 &res
.low
, &res
.high
);
993 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
995 /* This is a shortcut for a common special case. */
996 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
997 && !TREE_OVERFLOW (arg1
)
998 && !TREE_OVERFLOW (arg2
)
999 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1001 if (code
== CEIL_DIV_EXPR
)
1002 op1
.low
+= op2
.low
- 1;
1004 res
.low
= op1
.low
/ op2
.low
, res
.high
= 0;
1008 /* ... fall through ... */
1010 case ROUND_DIV_EXPR
:
1011 if (double_int_zero_p (op2
))
1013 if (double_int_one_p (op2
))
1018 if (double_int_equal_p (op1
, op2
)
1019 && ! double_int_zero_p (op1
))
1021 res
= double_int_one
;
1024 overflow
= div_and_round_double (code
, uns
,
1025 op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1026 &res
.low
, &res
.high
,
1027 &tmp
.low
, &tmp
.high
);
1030 case TRUNC_MOD_EXPR
:
1031 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1032 /* This is a shortcut for a common special case. */
1033 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1034 && !TREE_OVERFLOW (arg1
)
1035 && !TREE_OVERFLOW (arg2
)
1036 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1038 if (code
== CEIL_MOD_EXPR
)
1039 op1
.low
+= op2
.low
- 1;
1040 res
.low
= op1
.low
% op2
.low
, res
.high
= 0;
1044 /* ... fall through ... */
1046 case ROUND_MOD_EXPR
:
1047 if (double_int_zero_p (op2
))
1049 overflow
= div_and_round_double (code
, uns
,
1050 op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1051 &tmp
.low
, &tmp
.high
,
1052 &res
.low
, &res
.high
);
1056 res
= double_int_min (op1
, op2
, uns
);
1060 res
= double_int_max (op1
, op2
, uns
);
1069 t
= build_int_cst_wide (TREE_TYPE (arg1
), res
.low
, res
.high
);
1071 /* Propagate overflow flags ourselves. */
1072 if (((!uns
|| is_sizetype
) && overflow
)
1073 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1076 TREE_OVERFLOW (t
) = 1;
1080 t
= force_fit_type_double (TREE_TYPE (arg1
), res
.low
, res
.high
, 1,
1081 ((!uns
|| is_sizetype
) && overflow
)
1082 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1087 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1088 constant. We assume ARG1 and ARG2 have the same data type, or at least
1089 are the same kind of constant and the same machine mode. Return zero if
1090 combining the constants is not allowed in the current operating mode. */
1093 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1095 /* Sanity check for the recursive cases. */
1102 if (TREE_CODE (arg1
) == INTEGER_CST
)
1103 return int_const_binop (code
, arg1
, arg2
, 0);
1105 if (TREE_CODE (arg1
) == REAL_CST
)
1107 enum machine_mode mode
;
1110 REAL_VALUE_TYPE value
;
1111 REAL_VALUE_TYPE result
;
1115 /* The following codes are handled by real_arithmetic. */
1130 d1
= TREE_REAL_CST (arg1
);
1131 d2
= TREE_REAL_CST (arg2
);
1133 type
= TREE_TYPE (arg1
);
1134 mode
= TYPE_MODE (type
);
1136 /* Don't perform operation if we honor signaling NaNs and
1137 either operand is a NaN. */
1138 if (HONOR_SNANS (mode
)
1139 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1142 /* Don't perform operation if it would raise a division
1143 by zero exception. */
1144 if (code
== RDIV_EXPR
1145 && REAL_VALUES_EQUAL (d2
, dconst0
)
1146 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1149 /* If either operand is a NaN, just return it. Otherwise, set up
1150 for floating-point trap; we return an overflow. */
1151 if (REAL_VALUE_ISNAN (d1
))
1153 else if (REAL_VALUE_ISNAN (d2
))
1156 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1157 real_convert (&result
, mode
, &value
);
1159 /* Don't constant fold this floating point operation if
1160 the result has overflowed and flag_trapping_math. */
1161 if (flag_trapping_math
1162 && MODE_HAS_INFINITIES (mode
)
1163 && REAL_VALUE_ISINF (result
)
1164 && !REAL_VALUE_ISINF (d1
)
1165 && !REAL_VALUE_ISINF (d2
))
1168 /* Don't constant fold this floating point operation if the
1169 result may dependent upon the run-time rounding mode and
1170 flag_rounding_math is set, or if GCC's software emulation
1171 is unable to accurately represent the result. */
1172 if ((flag_rounding_math
1173 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1174 && (inexact
|| !real_identical (&result
, &value
)))
1177 t
= build_real (type
, result
);
1179 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1183 if (TREE_CODE (arg1
) == FIXED_CST
)
1185 FIXED_VALUE_TYPE f1
;
1186 FIXED_VALUE_TYPE f2
;
1187 FIXED_VALUE_TYPE result
;
1192 /* The following codes are handled by fixed_arithmetic. */
1198 case TRUNC_DIV_EXPR
:
1199 f2
= TREE_FIXED_CST (arg2
);
1204 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1205 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1213 f1
= TREE_FIXED_CST (arg1
);
1214 type
= TREE_TYPE (arg1
);
1215 sat_p
= TYPE_SATURATING (type
);
1216 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1217 t
= build_fixed (type
, result
);
1218 /* Propagate overflow flags. */
1219 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1220 TREE_OVERFLOW (t
) = 1;
1224 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1226 tree type
= TREE_TYPE (arg1
);
1227 tree r1
= TREE_REALPART (arg1
);
1228 tree i1
= TREE_IMAGPART (arg1
);
1229 tree r2
= TREE_REALPART (arg2
);
1230 tree i2
= TREE_IMAGPART (arg2
);
1237 real
= const_binop (code
, r1
, r2
);
1238 imag
= const_binop (code
, i1
, i2
);
1242 if (COMPLEX_FLOAT_TYPE_P (type
))
1243 return do_mpc_arg2 (arg1
, arg2
, type
,
1244 /* do_nonfinite= */ folding_initializer
,
1247 real
= const_binop (MINUS_EXPR
,
1248 const_binop (MULT_EXPR
, r1
, r2
),
1249 const_binop (MULT_EXPR
, i1
, i2
));
1250 imag
= const_binop (PLUS_EXPR
,
1251 const_binop (MULT_EXPR
, r1
, i2
),
1252 const_binop (MULT_EXPR
, i1
, r2
));
1256 if (COMPLEX_FLOAT_TYPE_P (type
))
1257 return do_mpc_arg2 (arg1
, arg2
, type
,
1258 /* do_nonfinite= */ folding_initializer
,
1261 case TRUNC_DIV_EXPR
:
1263 case FLOOR_DIV_EXPR
:
1264 case ROUND_DIV_EXPR
:
1265 if (flag_complex_method
== 0)
1267 /* Keep this algorithm in sync with
1268 tree-complex.c:expand_complex_div_straight().
1270 Expand complex division to scalars, straightforward algorithm.
1271 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1275 = const_binop (PLUS_EXPR
,
1276 const_binop (MULT_EXPR
, r2
, r2
),
1277 const_binop (MULT_EXPR
, i2
, i2
));
1279 = const_binop (PLUS_EXPR
,
1280 const_binop (MULT_EXPR
, r1
, r2
),
1281 const_binop (MULT_EXPR
, i1
, i2
));
1283 = const_binop (MINUS_EXPR
,
1284 const_binop (MULT_EXPR
, i1
, r2
),
1285 const_binop (MULT_EXPR
, r1
, i2
));
1287 real
= const_binop (code
, t1
, magsquared
);
1288 imag
= const_binop (code
, t2
, magsquared
);
1292 /* Keep this algorithm in sync with
1293 tree-complex.c:expand_complex_div_wide().
1295 Expand complex division to scalars, modified algorithm to minimize
1296 overflow with wide input ranges. */
1297 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1298 fold_abs_const (r2
, TREE_TYPE (type
)),
1299 fold_abs_const (i2
, TREE_TYPE (type
)));
1301 if (integer_nonzerop (compare
))
1303 /* In the TRUE branch, we compute
1305 div = (br * ratio) + bi;
1306 tr = (ar * ratio) + ai;
1307 ti = (ai * ratio) - ar;
1310 tree ratio
= const_binop (code
, r2
, i2
);
1311 tree div
= const_binop (PLUS_EXPR
, i2
,
1312 const_binop (MULT_EXPR
, r2
, ratio
));
1313 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1314 real
= const_binop (PLUS_EXPR
, real
, i1
);
1315 real
= const_binop (code
, real
, div
);
1317 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1318 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1319 imag
= const_binop (code
, imag
, div
);
1323 /* In the FALSE branch, we compute
1325 divisor = (d * ratio) + c;
1326 tr = (b * ratio) + a;
1327 ti = b - (a * ratio);
1330 tree ratio
= const_binop (code
, i2
, r2
);
1331 tree div
= const_binop (PLUS_EXPR
, r2
,
1332 const_binop (MULT_EXPR
, i2
, ratio
));
1334 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1335 real
= const_binop (PLUS_EXPR
, real
, r1
);
1336 real
= const_binop (code
, real
, div
);
1338 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1339 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1340 imag
= const_binop (code
, imag
, div
);
1350 return build_complex (type
, real
, imag
);
1353 if (TREE_CODE (arg1
) == VECTOR_CST
)
1355 tree type
= TREE_TYPE(arg1
);
1356 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1357 tree elements1
, elements2
, list
= NULL_TREE
;
1359 if(TREE_CODE(arg2
) != VECTOR_CST
)
1362 elements1
= TREE_VECTOR_CST_ELTS (arg1
);
1363 elements2
= TREE_VECTOR_CST_ELTS (arg2
);
1365 for (i
= 0; i
< count
; i
++)
1367 tree elem1
, elem2
, elem
;
1369 /* The trailing elements can be empty and should be treated as 0 */
1371 elem1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1374 elem1
= TREE_VALUE(elements1
);
1375 elements1
= TREE_CHAIN (elements1
);
1379 elem2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1382 elem2
= TREE_VALUE(elements2
);
1383 elements2
= TREE_CHAIN (elements2
);
1386 elem
= const_binop (code
, elem1
, elem2
);
1388 /* It is possible that const_binop cannot handle the given
1389 code and return NULL_TREE */
1390 if(elem
== NULL_TREE
)
1393 list
= tree_cons (NULL_TREE
, elem
, list
);
1395 return build_vector(type
, nreverse(list
));
1400 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1401 indicates which particular sizetype to create. */
1404 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1406 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1409 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1410 is a tree code. The type of the result is taken from the operands.
1411 Both must be equivalent integer types, ala int_binop_types_match_p.
1412 If the operands are constant, so is the result. */
1415 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1417 tree type
= TREE_TYPE (arg0
);
1419 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1420 return error_mark_node
;
1422 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1425 /* Handle the special case of two integer constants faster. */
1426 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1428 /* And some specific cases even faster than that. */
1429 if (code
== PLUS_EXPR
)
1431 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1433 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1436 else if (code
== MINUS_EXPR
)
1438 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1441 else if (code
== MULT_EXPR
)
1443 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1447 /* Handle general case of two integer constants. */
1448 return int_const_binop (code
, arg0
, arg1
, 0);
1451 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1454 /* Given two values, either both of sizetype or both of bitsizetype,
1455 compute the difference between the two values. Return the value
1456 in signed type corresponding to the type of the operands. */
1459 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1461 tree type
= TREE_TYPE (arg0
);
1464 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1467 /* If the type is already signed, just do the simple thing. */
1468 if (!TYPE_UNSIGNED (type
))
1469 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1471 if (type
== sizetype
)
1473 else if (type
== bitsizetype
)
1474 ctype
= sbitsizetype
;
1476 ctype
= signed_type_for (type
);
1478 /* If either operand is not a constant, do the conversions to the signed
1479 type and subtract. The hardware will do the right thing with any
1480 overflow in the subtraction. */
1481 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1482 return size_binop_loc (loc
, MINUS_EXPR
,
1483 fold_convert_loc (loc
, ctype
, arg0
),
1484 fold_convert_loc (loc
, ctype
, arg1
));
1486 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1487 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1488 overflow) and negate (which can't either). Special-case a result
1489 of zero while we're here. */
1490 if (tree_int_cst_equal (arg0
, arg1
))
1491 return build_int_cst (ctype
, 0);
1492 else if (tree_int_cst_lt (arg1
, arg0
))
1493 return fold_convert_loc (loc
, ctype
,
1494 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1496 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1497 fold_convert_loc (loc
, ctype
,
1498 size_binop_loc (loc
,
1503 /* A subroutine of fold_convert_const handling conversions of an
1504 INTEGER_CST to another integer type. */
1507 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1511 /* Given an integer constant, make new constant with new type,
1512 appropriately sign-extended or truncated. */
1513 t
= force_fit_type_double (type
, TREE_INT_CST_LOW (arg1
),
1514 TREE_INT_CST_HIGH (arg1
),
1515 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1516 (TREE_INT_CST_HIGH (arg1
) < 0
1517 && (TYPE_UNSIGNED (type
)
1518 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1519 | TREE_OVERFLOW (arg1
));
1524 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1525 to an integer type. */
1528 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1533 /* The following code implements the floating point to integer
1534 conversion rules required by the Java Language Specification,
1535 that IEEE NaNs are mapped to zero and values that overflow
1536 the target precision saturate, i.e. values greater than
1537 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1538 are mapped to INT_MIN. These semantics are allowed by the
1539 C and C++ standards that simply state that the behavior of
1540 FP-to-integer conversion is unspecified upon overflow. */
1544 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1548 case FIX_TRUNC_EXPR
:
1549 real_trunc (&r
, VOIDmode
, &x
);
1556 /* If R is NaN, return zero and show we have an overflow. */
1557 if (REAL_VALUE_ISNAN (r
))
1560 val
= double_int_zero
;
1563 /* See if R is less than the lower bound or greater than the
1568 tree lt
= TYPE_MIN_VALUE (type
);
1569 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1570 if (REAL_VALUES_LESS (r
, l
))
1573 val
= tree_to_double_int (lt
);
1579 tree ut
= TYPE_MAX_VALUE (type
);
1582 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1583 if (REAL_VALUES_LESS (u
, r
))
1586 val
= tree_to_double_int (ut
);
1592 real_to_integer2 ((HOST_WIDE_INT
*) &val
.low
, &val
.high
, &r
);
1594 t
= force_fit_type_double (type
, val
.low
, val
.high
, -1,
1595 overflow
| TREE_OVERFLOW (arg1
));
1599 /* A subroutine of fold_convert_const handling conversions of a
1600 FIXED_CST to an integer type. */
1603 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1606 double_int temp
, temp_trunc
;
1609 /* Right shift FIXED_CST to temp by fbit. */
1610 temp
= TREE_FIXED_CST (arg1
).data
;
1611 mode
= TREE_FIXED_CST (arg1
).mode
;
1612 if (GET_MODE_FBIT (mode
) < 2 * HOST_BITS_PER_WIDE_INT
)
1614 temp
= double_int_rshift (temp
, GET_MODE_FBIT (mode
),
1615 HOST_BITS_PER_DOUBLE_INT
,
1616 SIGNED_FIXED_POINT_MODE_P (mode
));
1618 /* Left shift temp to temp_trunc by fbit. */
1619 temp_trunc
= double_int_lshift (temp
, GET_MODE_FBIT (mode
),
1620 HOST_BITS_PER_DOUBLE_INT
,
1621 SIGNED_FIXED_POINT_MODE_P (mode
));
1625 temp
= double_int_zero
;
1626 temp_trunc
= double_int_zero
;
1629 /* If FIXED_CST is negative, we need to round the value toward 0.
1630 By checking if the fractional bits are not zero to add 1 to temp. */
1631 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1632 && double_int_negative_p (temp_trunc
)
1633 && !double_int_equal_p (TREE_FIXED_CST (arg1
).data
, temp_trunc
))
1634 temp
= double_int_add (temp
, double_int_one
);
1636 /* Given a fixed-point constant, make new constant with new type,
1637 appropriately sign-extended or truncated. */
1638 t
= force_fit_type_double (type
, temp
.low
, temp
.high
, -1,
1639 (double_int_negative_p (temp
)
1640 && (TYPE_UNSIGNED (type
)
1641 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1642 | TREE_OVERFLOW (arg1
));
1647 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1648 to another floating point type. */
1651 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1653 REAL_VALUE_TYPE value
;
1656 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1657 t
= build_real (type
, value
);
1659 /* If converting an infinity or NAN to a representation that doesn't
1660 have one, set the overflow bit so that we can produce some kind of
1661 error message at the appropriate point if necessary. It's not the
1662 most user-friendly message, but it's better than nothing. */
1663 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1664 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1665 TREE_OVERFLOW (t
) = 1;
1666 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1667 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1668 TREE_OVERFLOW (t
) = 1;
1669 /* Regular overflow, conversion produced an infinity in a mode that
1670 can't represent them. */
1671 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1672 && REAL_VALUE_ISINF (value
)
1673 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1674 TREE_OVERFLOW (t
) = 1;
1676 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1680 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1681 to a floating point type. */
1684 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1686 REAL_VALUE_TYPE value
;
1689 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1690 t
= build_real (type
, value
);
1692 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1696 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1697 to another fixed-point type. */
1700 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1702 FIXED_VALUE_TYPE value
;
1706 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1707 TYPE_SATURATING (type
));
1708 t
= build_fixed (type
, value
);
1710 /* Propagate overflow flags. */
1711 if (overflow_p
| TREE_OVERFLOW (arg1
))
1712 TREE_OVERFLOW (t
) = 1;
1716 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1717 to a fixed-point type. */
1720 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1722 FIXED_VALUE_TYPE value
;
1726 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
1727 TREE_INT_CST (arg1
),
1728 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1729 TYPE_SATURATING (type
));
1730 t
= build_fixed (type
, value
);
1732 /* Propagate overflow flags. */
1733 if (overflow_p
| TREE_OVERFLOW (arg1
))
1734 TREE_OVERFLOW (t
) = 1;
1738 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1739 to a fixed-point type. */
1742 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1744 FIXED_VALUE_TYPE value
;
1748 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1749 &TREE_REAL_CST (arg1
),
1750 TYPE_SATURATING (type
));
1751 t
= build_fixed (type
, value
);
1753 /* Propagate overflow flags. */
1754 if (overflow_p
| TREE_OVERFLOW (arg1
))
1755 TREE_OVERFLOW (t
) = 1;
1759 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1760 type TYPE. If no simplification can be done return NULL_TREE. */
1763 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1765 if (TREE_TYPE (arg1
) == type
)
1768 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1769 || TREE_CODE (type
) == OFFSET_TYPE
)
1771 if (TREE_CODE (arg1
) == INTEGER_CST
)
1772 return fold_convert_const_int_from_int (type
, arg1
);
1773 else if (TREE_CODE (arg1
) == REAL_CST
)
1774 return fold_convert_const_int_from_real (code
, type
, arg1
);
1775 else if (TREE_CODE (arg1
) == FIXED_CST
)
1776 return fold_convert_const_int_from_fixed (type
, arg1
);
1778 else if (TREE_CODE (type
) == REAL_TYPE
)
1780 if (TREE_CODE (arg1
) == INTEGER_CST
)
1781 return build_real_from_int_cst (type
, arg1
);
1782 else if (TREE_CODE (arg1
) == REAL_CST
)
1783 return fold_convert_const_real_from_real (type
, arg1
);
1784 else if (TREE_CODE (arg1
) == FIXED_CST
)
1785 return fold_convert_const_real_from_fixed (type
, arg1
);
1787 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1789 if (TREE_CODE (arg1
) == FIXED_CST
)
1790 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1791 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1792 return fold_convert_const_fixed_from_int (type
, arg1
);
1793 else if (TREE_CODE (arg1
) == REAL_CST
)
1794 return fold_convert_const_fixed_from_real (type
, arg1
);
1799 /* Construct a vector of zero elements of vector type TYPE. */
1802 build_zero_vector (tree type
)
1807 elem
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1808 units
= TYPE_VECTOR_SUBPARTS (type
);
1811 for (i
= 0; i
< units
; i
++)
1812 list
= tree_cons (NULL_TREE
, elem
, list
);
1813 return build_vector (type
, list
);
1816 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1819 fold_convertible_p (const_tree type
, const_tree arg
)
1821 tree orig
= TREE_TYPE (arg
);
1826 if (TREE_CODE (arg
) == ERROR_MARK
1827 || TREE_CODE (type
) == ERROR_MARK
1828 || TREE_CODE (orig
) == ERROR_MARK
)
1831 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1834 switch (TREE_CODE (type
))
1836 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1837 case POINTER_TYPE
: case REFERENCE_TYPE
:
1839 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1840 || TREE_CODE (orig
) == OFFSET_TYPE
)
1842 return (TREE_CODE (orig
) == VECTOR_TYPE
1843 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1846 case FIXED_POINT_TYPE
:
1850 return TREE_CODE (type
) == TREE_CODE (orig
);
1857 /* Convert expression ARG to type TYPE. Used by the middle-end for
1858 simple conversions in preference to calling the front-end's convert. */
1861 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1863 tree orig
= TREE_TYPE (arg
);
1869 if (TREE_CODE (arg
) == ERROR_MARK
1870 || TREE_CODE (type
) == ERROR_MARK
1871 || TREE_CODE (orig
) == ERROR_MARK
)
1872 return error_mark_node
;
1874 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1875 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1877 switch (TREE_CODE (type
))
1880 case REFERENCE_TYPE
:
1881 /* Handle conversions between pointers to different address spaces. */
1882 if (POINTER_TYPE_P (orig
)
1883 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1884 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1885 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1888 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1890 if (TREE_CODE (arg
) == INTEGER_CST
)
1892 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1893 if (tem
!= NULL_TREE
)
1896 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1897 || TREE_CODE (orig
) == OFFSET_TYPE
)
1898 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1899 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1900 return fold_convert_loc (loc
, type
,
1901 fold_build1_loc (loc
, REALPART_EXPR
,
1902 TREE_TYPE (orig
), arg
));
1903 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1904 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1905 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1908 if (TREE_CODE (arg
) == INTEGER_CST
)
1910 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1911 if (tem
!= NULL_TREE
)
1914 else if (TREE_CODE (arg
) == REAL_CST
)
1916 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1917 if (tem
!= NULL_TREE
)
1920 else if (TREE_CODE (arg
) == FIXED_CST
)
1922 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1923 if (tem
!= NULL_TREE
)
1927 switch (TREE_CODE (orig
))
1930 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1931 case POINTER_TYPE
: case REFERENCE_TYPE
:
1932 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1935 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1937 case FIXED_POINT_TYPE
:
1938 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1941 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1942 return fold_convert_loc (loc
, type
, tem
);
1948 case FIXED_POINT_TYPE
:
1949 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1950 || TREE_CODE (arg
) == REAL_CST
)
1952 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1953 if (tem
!= NULL_TREE
)
1954 goto fold_convert_exit
;
1957 switch (TREE_CODE (orig
))
1959 case FIXED_POINT_TYPE
:
1964 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1967 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1968 return fold_convert_loc (loc
, type
, tem
);
1975 switch (TREE_CODE (orig
))
1978 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1979 case POINTER_TYPE
: case REFERENCE_TYPE
:
1981 case FIXED_POINT_TYPE
:
1982 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
1983 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
1984 fold_convert_loc (loc
, TREE_TYPE (type
),
1985 integer_zero_node
));
1990 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1992 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1993 TREE_OPERAND (arg
, 0));
1994 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1995 TREE_OPERAND (arg
, 1));
1996 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
1999 arg
= save_expr (arg
);
2000 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2001 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2002 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2003 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2004 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2012 if (integer_zerop (arg
))
2013 return build_zero_vector (type
);
2014 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2015 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2016 || TREE_CODE (orig
) == VECTOR_TYPE
);
2017 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2020 tem
= fold_ignored_result (arg
);
2021 if (TREE_CODE (tem
) == MODIFY_EXPR
)
2022 goto fold_convert_exit
;
2023 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2029 protected_set_expr_location (tem
, loc
);
2033 /* Return false if expr can be assumed not to be an lvalue, true
2037 maybe_lvalue_p (const_tree x
)
2039 /* We only need to wrap lvalue tree codes. */
2040 switch (TREE_CODE (x
))
2051 case ALIGN_INDIRECT_REF
:
2052 case MISALIGNED_INDIRECT_REF
:
2054 case ARRAY_RANGE_REF
:
2060 case PREINCREMENT_EXPR
:
2061 case PREDECREMENT_EXPR
:
2063 case TRY_CATCH_EXPR
:
2064 case WITH_CLEANUP_EXPR
:
2073 /* Assume the worst for front-end tree codes. */
2074 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2082 /* Return an expr equal to X but certainly not valid as an lvalue. */
2085 non_lvalue_loc (location_t loc
, tree x
)
2087 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2092 if (! maybe_lvalue_p (x
))
2094 x
= build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2095 SET_EXPR_LOCATION (x
, loc
);
2099 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2100 Zero means allow extended lvalues. */
2102 int pedantic_lvalues
;
2104 /* When pedantic, return an expr equal to X but certainly not valid as a
2105 pedantic lvalue. Otherwise, return X. */
2108 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2110 if (pedantic_lvalues
)
2111 return non_lvalue_loc (loc
, x
);
2112 protected_set_expr_location (x
, loc
);
2116 /* Given a tree comparison code, return the code that is the logical inverse
2117 of the given code. It is not safe to do this for floating-point
2118 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2119 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2122 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2124 if (honor_nans
&& flag_trapping_math
)
2134 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2136 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2138 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2140 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2154 return UNORDERED_EXPR
;
2155 case UNORDERED_EXPR
:
2156 return ORDERED_EXPR
;
2162 /* Similar, but return the comparison that results if the operands are
2163 swapped. This is safe for floating-point. */
2166 swap_tree_comparison (enum tree_code code
)
2173 case UNORDERED_EXPR
:
2199 /* Convert a comparison tree code from an enum tree_code representation
2200 into a compcode bit-based encoding. This function is the inverse of
2201 compcode_to_comparison. */
2203 static enum comparison_code
2204 comparison_to_compcode (enum tree_code code
)
2221 return COMPCODE_ORD
;
2222 case UNORDERED_EXPR
:
2223 return COMPCODE_UNORD
;
2225 return COMPCODE_UNLT
;
2227 return COMPCODE_UNEQ
;
2229 return COMPCODE_UNLE
;
2231 return COMPCODE_UNGT
;
2233 return COMPCODE_LTGT
;
2235 return COMPCODE_UNGE
;
2241 /* Convert a compcode bit-based encoding of a comparison operator back
2242 to GCC's enum tree_code representation. This function is the
2243 inverse of comparison_to_compcode. */
2245 static enum tree_code
2246 compcode_to_comparison (enum comparison_code code
)
2263 return ORDERED_EXPR
;
2264 case COMPCODE_UNORD
:
2265 return UNORDERED_EXPR
;
2283 /* Return a tree for the comparison which is the combination of
2284 doing the AND or OR (depending on CODE) of the two operations LCODE
2285 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2286 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2287 if this makes the transformation invalid. */
2290 combine_comparisons (location_t loc
,
2291 enum tree_code code
, enum tree_code lcode
,
2292 enum tree_code rcode
, tree truth_type
,
2293 tree ll_arg
, tree lr_arg
)
2295 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2296 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2297 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2302 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2303 compcode
= lcompcode
& rcompcode
;
2306 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2307 compcode
= lcompcode
| rcompcode
;
2316 /* Eliminate unordered comparisons, as well as LTGT and ORD
2317 which are not used unless the mode has NaNs. */
2318 compcode
&= ~COMPCODE_UNORD
;
2319 if (compcode
== COMPCODE_LTGT
)
2320 compcode
= COMPCODE_NE
;
2321 else if (compcode
== COMPCODE_ORD
)
2322 compcode
= COMPCODE_TRUE
;
2324 else if (flag_trapping_math
)
2326 /* Check that the original operation and the optimized ones will trap
2327 under the same condition. */
2328 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2329 && (lcompcode
!= COMPCODE_EQ
)
2330 && (lcompcode
!= COMPCODE_ORD
);
2331 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2332 && (rcompcode
!= COMPCODE_EQ
)
2333 && (rcompcode
!= COMPCODE_ORD
);
2334 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2335 && (compcode
!= COMPCODE_EQ
)
2336 && (compcode
!= COMPCODE_ORD
);
2338 /* In a short-circuited boolean expression the LHS might be
2339 such that the RHS, if evaluated, will never trap. For
2340 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2341 if neither x nor y is NaN. (This is a mixed blessing: for
2342 example, the expression above will never trap, hence
2343 optimizing it to x < y would be invalid). */
2344 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2345 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2348 /* If the comparison was short-circuited, and only the RHS
2349 trapped, we may now generate a spurious trap. */
2351 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2354 /* If we changed the conditions that cause a trap, we lose. */
2355 if ((ltrap
|| rtrap
) != trap
)
2359 if (compcode
== COMPCODE_TRUE
)
2360 return constant_boolean_node (true, truth_type
);
2361 else if (compcode
== COMPCODE_FALSE
)
2362 return constant_boolean_node (false, truth_type
);
2365 enum tree_code tcode
;
2367 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2368 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2372 /* Return nonzero if two operands (typically of the same tree node)
2373 are necessarily equal. If either argument has side-effects this
2374 function returns zero. FLAGS modifies behavior as follows:
2376 If OEP_ONLY_CONST is set, only return nonzero for constants.
2377 This function tests whether the operands are indistinguishable;
2378 it does not test whether they are equal using C's == operation.
2379 The distinction is important for IEEE floating point, because
2380 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2381 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2383 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2384 even though it may hold multiple values during a function.
2385 This is because a GCC tree node guarantees that nothing else is
2386 executed between the evaluation of its "operands" (which may often
2387 be evaluated in arbitrary order). Hence if the operands themselves
2388 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2389 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2390 unset means assuming isochronic (or instantaneous) tree equivalence.
2391 Unless comparing arbitrary expression trees, such as from different
2392 statements, this flag can usually be left unset.
2394 If OEP_PURE_SAME is set, then pure functions with identical arguments
2395 are considered the same. It is used when the caller has other ways
2396 to ensure that global memory is unchanged in between. */
2399 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2401 /* If either is ERROR_MARK, they aren't equal. */
2402 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2403 || TREE_TYPE (arg0
) == error_mark_node
2404 || TREE_TYPE (arg1
) == error_mark_node
)
2407 /* Similar, if either does not have a type (like a released SSA name),
2408 they aren't equal. */
2409 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2412 /* Check equality of integer constants before bailing out due to
2413 precision differences. */
2414 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2415 return tree_int_cst_equal (arg0
, arg1
);
2417 /* If both types don't have the same signedness, then we can't consider
2418 them equal. We must check this before the STRIP_NOPS calls
2419 because they may change the signedness of the arguments. As pointers
2420 strictly don't have a signedness, require either two pointers or
2421 two non-pointers as well. */
2422 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2423 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2426 /* We cannot consider pointers to different address space equal. */
2427 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2428 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2429 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2432 /* If both types don't have the same precision, then it is not safe
2434 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
2440 /* In case both args are comparisons but with different comparison
2441 code, try to swap the comparison operands of one arg to produce
2442 a match and compare that variant. */
2443 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2444 && COMPARISON_CLASS_P (arg0
)
2445 && COMPARISON_CLASS_P (arg1
))
2447 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2449 if (TREE_CODE (arg0
) == swap_code
)
2450 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2451 TREE_OPERAND (arg1
, 1), flags
)
2452 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2453 TREE_OPERAND (arg1
, 0), flags
);
2456 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2457 /* This is needed for conversions and for COMPONENT_REF.
2458 Might as well play it safe and always test this. */
2459 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2460 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2461 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2464 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2465 We don't care about side effects in that case because the SAVE_EXPR
2466 takes care of that for us. In all other cases, two expressions are
2467 equal if they have no side effects. If we have two identical
2468 expressions with side effects that should be treated the same due
2469 to the only side effects being identical SAVE_EXPR's, that will
2470 be detected in the recursive calls below. */
2471 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2472 && (TREE_CODE (arg0
) == SAVE_EXPR
2473 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2476 /* Next handle constant cases, those for which we can return 1 even
2477 if ONLY_CONST is set. */
2478 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2479 switch (TREE_CODE (arg0
))
2482 return tree_int_cst_equal (arg0
, arg1
);
2485 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2486 TREE_FIXED_CST (arg1
));
2489 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2490 TREE_REAL_CST (arg1
)))
2494 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2496 /* If we do not distinguish between signed and unsigned zero,
2497 consider them equal. */
2498 if (real_zerop (arg0
) && real_zerop (arg1
))
2507 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2508 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2511 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2514 v1
= TREE_CHAIN (v1
);
2515 v2
= TREE_CHAIN (v2
);
2522 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2524 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2528 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2529 && ! memcmp (TREE_STRING_POINTER (arg0
),
2530 TREE_STRING_POINTER (arg1
),
2531 TREE_STRING_LENGTH (arg0
)));
2534 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2540 if (flags
& OEP_ONLY_CONST
)
2543 /* Define macros to test an operand from arg0 and arg1 for equality and a
2544 variant that allows null and views null as being different from any
2545 non-null value. In the latter case, if either is null, the both
2546 must be; otherwise, do the normal comparison. */
2547 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2548 TREE_OPERAND (arg1, N), flags)
2550 #define OP_SAME_WITH_NULL(N) \
2551 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2552 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2554 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2557 /* Two conversions are equal only if signedness and modes match. */
2558 switch (TREE_CODE (arg0
))
2561 case FIX_TRUNC_EXPR
:
2562 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2563 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2573 case tcc_comparison
:
2575 if (OP_SAME (0) && OP_SAME (1))
2578 /* For commutative ops, allow the other order. */
2579 return (commutative_tree_code (TREE_CODE (arg0
))
2580 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2581 TREE_OPERAND (arg1
, 1), flags
)
2582 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2583 TREE_OPERAND (arg1
, 0), flags
));
2586 /* If either of the pointer (or reference) expressions we are
2587 dereferencing contain a side effect, these cannot be equal. */
2588 if (TREE_SIDE_EFFECTS (arg0
)
2589 || TREE_SIDE_EFFECTS (arg1
))
2592 switch (TREE_CODE (arg0
))
2595 case ALIGN_INDIRECT_REF
:
2596 case MISALIGNED_INDIRECT_REF
:
2602 case ARRAY_RANGE_REF
:
2603 /* Operands 2 and 3 may be null.
2604 Compare the array index by value if it is constant first as we
2605 may have different types but same value here. */
2607 && (tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2608 TREE_OPERAND (arg1
, 1))
2610 && OP_SAME_WITH_NULL (2)
2611 && OP_SAME_WITH_NULL (3));
2614 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2615 may be NULL when we're called to compare MEM_EXPRs. */
2616 return OP_SAME_WITH_NULL (0)
2618 && OP_SAME_WITH_NULL (2);
2621 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2627 case tcc_expression
:
2628 switch (TREE_CODE (arg0
))
2631 case TRUTH_NOT_EXPR
:
2634 case TRUTH_ANDIF_EXPR
:
2635 case TRUTH_ORIF_EXPR
:
2636 return OP_SAME (0) && OP_SAME (1);
2638 case TRUTH_AND_EXPR
:
2640 case TRUTH_XOR_EXPR
:
2641 if (OP_SAME (0) && OP_SAME (1))
2644 /* Otherwise take into account this is a commutative operation. */
2645 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2646 TREE_OPERAND (arg1
, 1), flags
)
2647 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2648 TREE_OPERAND (arg1
, 0), flags
));
2651 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2658 switch (TREE_CODE (arg0
))
2661 /* If the CALL_EXPRs call different functions, then they
2662 clearly can not be equal. */
2663 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2668 unsigned int cef
= call_expr_flags (arg0
);
2669 if (flags
& OEP_PURE_SAME
)
2670 cef
&= ECF_CONST
| ECF_PURE
;
2677 /* Now see if all the arguments are the same. */
2679 const_call_expr_arg_iterator iter0
, iter1
;
2681 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2682 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2684 a0
= next_const_call_expr_arg (&iter0
),
2685 a1
= next_const_call_expr_arg (&iter1
))
2686 if (! operand_equal_p (a0
, a1
, flags
))
2689 /* If we get here and both argument lists are exhausted
2690 then the CALL_EXPRs are equal. */
2691 return ! (a0
|| a1
);
2697 case tcc_declaration
:
2698 /* Consider __builtin_sqrt equal to sqrt. */
2699 return (TREE_CODE (arg0
) == FUNCTION_DECL
2700 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2701 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2702 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2709 #undef OP_SAME_WITH_NULL
2712 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2713 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2715 When in doubt, return 0. */
2718 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2720 int unsignedp1
, unsignedpo
;
2721 tree primarg0
, primarg1
, primother
;
2722 unsigned int correct_width
;
2724 if (operand_equal_p (arg0
, arg1
, 0))
2727 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2728 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2731 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2732 and see if the inner values are the same. This removes any
2733 signedness comparison, which doesn't matter here. */
2734 primarg0
= arg0
, primarg1
= arg1
;
2735 STRIP_NOPS (primarg0
);
2736 STRIP_NOPS (primarg1
);
2737 if (operand_equal_p (primarg0
, primarg1
, 0))
2740 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2741 actual comparison operand, ARG0.
2743 First throw away any conversions to wider types
2744 already present in the operands. */
2746 primarg1
= get_narrower (arg1
, &unsignedp1
);
2747 primother
= get_narrower (other
, &unsignedpo
);
2749 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2750 if (unsignedp1
== unsignedpo
2751 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2752 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2754 tree type
= TREE_TYPE (arg0
);
2756 /* Make sure shorter operand is extended the right way
2757 to match the longer operand. */
2758 primarg1
= fold_convert (signed_or_unsigned_type_for
2759 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2761 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2768 /* See if ARG is an expression that is either a comparison or is performing
2769 arithmetic on comparisons. The comparisons must only be comparing
2770 two different values, which will be stored in *CVAL1 and *CVAL2; if
2771 they are nonzero it means that some operands have already been found.
2772 No variables may be used anywhere else in the expression except in the
2773 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2774 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2776 If this is true, return 1. Otherwise, return zero. */
2779 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2781 enum tree_code code
= TREE_CODE (arg
);
2782 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2784 /* We can handle some of the tcc_expression cases here. */
2785 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2787 else if (tclass
== tcc_expression
2788 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2789 || code
== COMPOUND_EXPR
))
2790 tclass
= tcc_binary
;
2792 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2793 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2795 /* If we've already found a CVAL1 or CVAL2, this expression is
2796 two complex to handle. */
2797 if (*cval1
|| *cval2
)
2807 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2810 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2811 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2812 cval1
, cval2
, save_p
));
2817 case tcc_expression
:
2818 if (code
== COND_EXPR
)
2819 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2820 cval1
, cval2
, save_p
)
2821 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2822 cval1
, cval2
, save_p
)
2823 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2824 cval1
, cval2
, save_p
));
2827 case tcc_comparison
:
2828 /* First see if we can handle the first operand, then the second. For
2829 the second operand, we know *CVAL1 can't be zero. It must be that
2830 one side of the comparison is each of the values; test for the
2831 case where this isn't true by failing if the two operands
2834 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2835 TREE_OPERAND (arg
, 1), 0))
2839 *cval1
= TREE_OPERAND (arg
, 0);
2840 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2842 else if (*cval2
== 0)
2843 *cval2
= TREE_OPERAND (arg
, 0);
2844 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2849 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2851 else if (*cval2
== 0)
2852 *cval2
= TREE_OPERAND (arg
, 1);
2853 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2865 /* ARG is a tree that is known to contain just arithmetic operations and
2866 comparisons. Evaluate the operations in the tree substituting NEW0 for
2867 any occurrence of OLD0 as an operand of a comparison and likewise for
2871 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2872 tree old1
, tree new1
)
2874 tree type
= TREE_TYPE (arg
);
2875 enum tree_code code
= TREE_CODE (arg
);
2876 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2878 /* We can handle some of the tcc_expression cases here. */
2879 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2881 else if (tclass
== tcc_expression
2882 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2883 tclass
= tcc_binary
;
2888 return fold_build1_loc (loc
, code
, type
,
2889 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2890 old0
, new0
, old1
, new1
));
2893 return fold_build2_loc (loc
, code
, type
,
2894 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2895 old0
, new0
, old1
, new1
),
2896 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2897 old0
, new0
, old1
, new1
));
2899 case tcc_expression
:
2903 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
2907 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
2911 return fold_build3_loc (loc
, code
, type
,
2912 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2913 old0
, new0
, old1
, new1
),
2914 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2915 old0
, new0
, old1
, new1
),
2916 eval_subst (loc
, TREE_OPERAND (arg
, 2),
2917 old0
, new0
, old1
, new1
));
2921 /* Fall through - ??? */
2923 case tcc_comparison
:
2925 tree arg0
= TREE_OPERAND (arg
, 0);
2926 tree arg1
= TREE_OPERAND (arg
, 1);
2928 /* We need to check both for exact equality and tree equality. The
2929 former will be true if the operand has a side-effect. In that
2930 case, we know the operand occurred exactly once. */
2932 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2934 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2937 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2939 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2942 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
2950 /* Return a tree for the case when the result of an expression is RESULT
2951 converted to TYPE and OMITTED was previously an operand of the expression
2952 but is now not needed (e.g., we folded OMITTED * 0).
2954 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2955 the conversion of RESULT to TYPE. */
2958 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
2960 tree t
= fold_convert_loc (loc
, type
, result
);
2962 /* If the resulting operand is an empty statement, just return the omitted
2963 statement casted to void. */
2964 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2966 t
= build1 (NOP_EXPR
, void_type_node
, fold_ignored_result (omitted
));
2967 goto omit_one_operand_exit
;
2970 if (TREE_SIDE_EFFECTS (omitted
))
2972 t
= build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2973 goto omit_one_operand_exit
;
2976 return non_lvalue_loc (loc
, t
);
2978 omit_one_operand_exit
:
2979 protected_set_expr_location (t
, loc
);
2983 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2986 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
2989 tree t
= fold_convert_loc (loc
, type
, result
);
2991 /* If the resulting operand is an empty statement, just return the omitted
2992 statement casted to void. */
2993 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2995 t
= build1 (NOP_EXPR
, void_type_node
, fold_ignored_result (omitted
));
2996 goto pedantic_omit_one_operand_exit
;
2999 if (TREE_SIDE_EFFECTS (omitted
))
3001 t
= build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3002 goto pedantic_omit_one_operand_exit
;
3005 return pedantic_non_lvalue_loc (loc
, t
);
3007 pedantic_omit_one_operand_exit
:
3008 protected_set_expr_location (t
, loc
);
3012 /* Return a tree for the case when the result of an expression is RESULT
3013 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3014 of the expression but are now not needed.
3016 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3017 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3018 evaluated before OMITTED2. Otherwise, if neither has side effects,
3019 just do the conversion of RESULT to TYPE. */
3022 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3023 tree omitted1
, tree omitted2
)
3025 tree t
= fold_convert_loc (loc
, type
, result
);
3027 if (TREE_SIDE_EFFECTS (omitted2
))
3029 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
3030 SET_EXPR_LOCATION (t
, loc
);
3032 if (TREE_SIDE_EFFECTS (omitted1
))
3034 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
3035 SET_EXPR_LOCATION (t
, loc
);
3038 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3042 /* Return a simplified tree node for the truth-negation of ARG. This
3043 never alters ARG itself. We assume that ARG is an operation that
3044 returns a truth value (0 or 1).
3046 FIXME: one would think we would fold the result, but it causes
3047 problems with the dominator optimizer. */
3050 fold_truth_not_expr (location_t loc
, tree arg
)
3052 tree t
, type
= TREE_TYPE (arg
);
3053 enum tree_code code
= TREE_CODE (arg
);
3054 location_t loc1
, loc2
;
3056 /* If this is a comparison, we can simply invert it, except for
3057 floating-point non-equality comparisons, in which case we just
3058 enclose a TRUTH_NOT_EXPR around what we have. */
3060 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3062 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3063 if (FLOAT_TYPE_P (op_type
)
3064 && flag_trapping_math
3065 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3066 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3069 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3070 if (code
== ERROR_MARK
)
3073 t
= build2 (code
, type
, TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
3074 SET_EXPR_LOCATION (t
, loc
);
3081 return constant_boolean_node (integer_zerop (arg
), type
);
3083 case TRUTH_AND_EXPR
:
3084 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3085 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3086 if (loc1
== UNKNOWN_LOCATION
)
3088 if (loc2
== UNKNOWN_LOCATION
)
3090 t
= build2 (TRUTH_OR_EXPR
, type
,
3091 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3092 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3096 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3097 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3098 if (loc1
== UNKNOWN_LOCATION
)
3100 if (loc2
== UNKNOWN_LOCATION
)
3102 t
= build2 (TRUTH_AND_EXPR
, type
,
3103 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3104 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3107 case TRUTH_XOR_EXPR
:
3108 /* Here we can invert either operand. We invert the first operand
3109 unless the second operand is a TRUTH_NOT_EXPR in which case our
3110 result is the XOR of the first operand with the inside of the
3111 negation of the second operand. */
3113 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3114 t
= build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3115 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3117 t
= build2 (TRUTH_XOR_EXPR
, type
,
3118 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3119 TREE_OPERAND (arg
, 1));
3122 case TRUTH_ANDIF_EXPR
:
3123 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3124 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3125 if (loc1
== UNKNOWN_LOCATION
)
3127 if (loc2
== UNKNOWN_LOCATION
)
3129 t
= build2 (TRUTH_ORIF_EXPR
, type
,
3130 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3131 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3134 case TRUTH_ORIF_EXPR
:
3135 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3136 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3137 if (loc1
== UNKNOWN_LOCATION
)
3139 if (loc2
== UNKNOWN_LOCATION
)
3141 t
= build2 (TRUTH_ANDIF_EXPR
, type
,
3142 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3143 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3146 case TRUTH_NOT_EXPR
:
3147 return TREE_OPERAND (arg
, 0);
3151 tree arg1
= TREE_OPERAND (arg
, 1);
3152 tree arg2
= TREE_OPERAND (arg
, 2);
3154 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3155 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 2));
3156 if (loc1
== UNKNOWN_LOCATION
)
3158 if (loc2
== UNKNOWN_LOCATION
)
3161 /* A COND_EXPR may have a throw as one operand, which
3162 then has void type. Just leave void operands
3164 t
= build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3165 VOID_TYPE_P (TREE_TYPE (arg1
))
3166 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3167 VOID_TYPE_P (TREE_TYPE (arg2
))
3168 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3173 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3174 if (loc1
== UNKNOWN_LOCATION
)
3176 t
= build2 (COMPOUND_EXPR
, type
,
3177 TREE_OPERAND (arg
, 0),
3178 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3181 case NON_LVALUE_EXPR
:
3182 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3183 if (loc1
== UNKNOWN_LOCATION
)
3185 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3188 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3190 t
= build1 (TRUTH_NOT_EXPR
, type
, arg
);
3194 /* ... fall through ... */
3197 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3198 if (loc1
== UNKNOWN_LOCATION
)
3200 t
= build1 (TREE_CODE (arg
), type
,
3201 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3205 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3207 t
= build2 (EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3211 t
= build1 (TRUTH_NOT_EXPR
, type
, arg
);
3214 case CLEANUP_POINT_EXPR
:
3215 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3216 if (loc1
== UNKNOWN_LOCATION
)
3218 t
= build1 (CLEANUP_POINT_EXPR
, type
,
3219 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3228 SET_EXPR_LOCATION (t
, loc
);
3233 /* Return a simplified tree node for the truth-negation of ARG. This
3234 never alters ARG itself. We assume that ARG is an operation that
3235 returns a truth value (0 or 1).
3237 FIXME: one would think we would fold the result, but it causes
3238 problems with the dominator optimizer. */
3241 invert_truthvalue_loc (location_t loc
, tree arg
)
3245 if (TREE_CODE (arg
) == ERROR_MARK
)
3248 tem
= fold_truth_not_expr (loc
, arg
);
3251 tem
= build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3252 SET_EXPR_LOCATION (tem
, loc
);
3258 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3259 operands are another bit-wise operation with a common input. If so,
3260 distribute the bit operations to save an operation and possibly two if
3261 constants are involved. For example, convert
3262 (A | B) & (A | C) into A | (B & C)
3263 Further simplification will occur if B and C are constants.
3265 If this optimization cannot be done, 0 will be returned. */
3268 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3269 tree arg0
, tree arg1
)
3274 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3275 || TREE_CODE (arg0
) == code
3276 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3277 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3280 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3282 common
= TREE_OPERAND (arg0
, 0);
3283 left
= TREE_OPERAND (arg0
, 1);
3284 right
= TREE_OPERAND (arg1
, 1);
3286 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3288 common
= TREE_OPERAND (arg0
, 0);
3289 left
= TREE_OPERAND (arg0
, 1);
3290 right
= TREE_OPERAND (arg1
, 0);
3292 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3294 common
= TREE_OPERAND (arg0
, 1);
3295 left
= TREE_OPERAND (arg0
, 0);
3296 right
= TREE_OPERAND (arg1
, 1);
3298 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3300 common
= TREE_OPERAND (arg0
, 1);
3301 left
= TREE_OPERAND (arg0
, 0);
3302 right
= TREE_OPERAND (arg1
, 0);
3307 common
= fold_convert_loc (loc
, type
, common
);
3308 left
= fold_convert_loc (loc
, type
, left
);
3309 right
= fold_convert_loc (loc
, type
, right
);
3310 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3311 fold_build2_loc (loc
, code
, type
, left
, right
));
3314 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3315 with code CODE. This optimization is unsafe. */
3317 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3318 tree arg0
, tree arg1
)
3320 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3321 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3323 /* (A / C) +- (B / C) -> (A +- B) / C. */
3325 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3326 TREE_OPERAND (arg1
, 1), 0))
3327 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3328 fold_build2_loc (loc
, code
, type
,
3329 TREE_OPERAND (arg0
, 0),
3330 TREE_OPERAND (arg1
, 0)),
3331 TREE_OPERAND (arg0
, 1));
3333 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3334 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3335 TREE_OPERAND (arg1
, 0), 0)
3336 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3337 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3339 REAL_VALUE_TYPE r0
, r1
;
3340 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3341 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3343 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3345 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3346 real_arithmetic (&r0
, code
, &r0
, &r1
);
3347 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3348 TREE_OPERAND (arg0
, 0),
3349 build_real (type
, r0
));
3355 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3356 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3359 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3360 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3362 tree result
, bftype
;
3366 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3367 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3368 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3369 && host_integerp (size
, 0)
3370 && tree_low_cst (size
, 0) == bitsize
)
3371 return fold_convert_loc (loc
, type
, inner
);
3375 if (TYPE_PRECISION (bftype
) != bitsize
3376 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3377 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3379 result
= build3 (BIT_FIELD_REF
, bftype
, inner
,
3380 size_int (bitsize
), bitsize_int (bitpos
));
3381 SET_EXPR_LOCATION (result
, loc
);
3384 result
= fold_convert_loc (loc
, type
, result
);
3389 /* Optimize a bit-field compare.
3391 There are two cases: First is a compare against a constant and the
3392 second is a comparison of two items where the fields are at the same
3393 bit position relative to the start of a chunk (byte, halfword, word)
3394 large enough to contain it. In these cases we can avoid the shift
3395 implicit in bitfield extractions.
3397 For constants, we emit a compare of the shifted constant with the
3398 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3399 compared. For two fields at the same position, we do the ANDs with the
3400 similar mask and compare the result of the ANDs.
3402 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3403 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3404 are the left and right operands of the comparison, respectively.
3406 If the optimization described above can be done, we return the resulting
3407 tree. Otherwise we return zero. */
3410 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3411 tree compare_type
, tree lhs
, tree rhs
)
3413 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3414 tree type
= TREE_TYPE (lhs
);
3415 tree signed_type
, unsigned_type
;
3416 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3417 enum machine_mode lmode
, rmode
, nmode
;
3418 int lunsignedp
, runsignedp
;
3419 int lvolatilep
= 0, rvolatilep
= 0;
3420 tree linner
, rinner
= NULL_TREE
;
3424 /* Get all the information about the extractions being done. If the bit size
3425 if the same as the size of the underlying object, we aren't doing an
3426 extraction at all and so can do nothing. We also don't want to
3427 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3428 then will no longer be able to replace it. */
3429 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3430 &lunsignedp
, &lvolatilep
, false);
3431 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3432 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3437 /* If this is not a constant, we can only do something if bit positions,
3438 sizes, and signedness are the same. */
3439 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3440 &runsignedp
, &rvolatilep
, false);
3442 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3443 || lunsignedp
!= runsignedp
|| offset
!= 0
3444 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3448 /* See if we can find a mode to refer to this field. We should be able to,
3449 but fail if we can't. */
3451 && GET_MODE_BITSIZE (lmode
) > 0
3452 && flag_strict_volatile_bitfields
> 0)
3455 nmode
= get_best_mode (lbitsize
, lbitpos
,
3456 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3457 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3458 TYPE_ALIGN (TREE_TYPE (rinner
))),
3459 word_mode
, lvolatilep
|| rvolatilep
);
3460 if (nmode
== VOIDmode
)
3463 /* Set signed and unsigned types of the precision of this mode for the
3465 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3466 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3468 /* Compute the bit position and size for the new reference and our offset
3469 within it. If the new reference is the same size as the original, we
3470 won't optimize anything, so return zero. */
3471 nbitsize
= GET_MODE_BITSIZE (nmode
);
3472 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3474 if (nbitsize
== lbitsize
)
3477 if (BYTES_BIG_ENDIAN
)
3478 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3480 /* Make the mask to be used against the extracted field. */
3481 mask
= build_int_cst_type (unsigned_type
, -1);
3482 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3483 mask
= const_binop (RSHIFT_EXPR
, mask
,
3484 size_int (nbitsize
- lbitsize
- lbitpos
));
3487 /* If not comparing with constant, just rework the comparison
3489 return fold_build2_loc (loc
, code
, compare_type
,
3490 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3491 make_bit_field_ref (loc
, linner
,
3496 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3497 make_bit_field_ref (loc
, rinner
,
3503 /* Otherwise, we are handling the constant case. See if the constant is too
3504 big for the field. Warn and return a tree of for 0 (false) if so. We do
3505 this not only for its own sake, but to avoid having to test for this
3506 error case below. If we didn't, we might generate wrong code.
3508 For unsigned fields, the constant shifted right by the field length should
3509 be all zero. For signed fields, the high-order bits should agree with
3514 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3515 fold_convert_loc (loc
,
3516 unsigned_type
, rhs
),
3517 size_int (lbitsize
))))
3519 warning (0, "comparison is always %d due to width of bit-field",
3521 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3526 tree tem
= const_binop (RSHIFT_EXPR
,
3527 fold_convert_loc (loc
, signed_type
, rhs
),
3528 size_int (lbitsize
- 1));
3529 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3531 warning (0, "comparison is always %d due to width of bit-field",
3533 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3537 /* Single-bit compares should always be against zero. */
3538 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3540 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3541 rhs
= build_int_cst (type
, 0);
3544 /* Make a new bitfield reference, shift the constant over the
3545 appropriate number of bits and mask it with the computed mask
3546 (in case this was a signed field). If we changed it, make a new one. */
3547 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3550 TREE_SIDE_EFFECTS (lhs
) = 1;
3551 TREE_THIS_VOLATILE (lhs
) = 1;
3554 rhs
= const_binop (BIT_AND_EXPR
,
3555 const_binop (LSHIFT_EXPR
,
3556 fold_convert_loc (loc
, unsigned_type
, rhs
),
3557 size_int (lbitpos
)),
3560 lhs
= build2 (code
, compare_type
,
3561 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
3563 SET_EXPR_LOCATION (lhs
, loc
);
3567 /* Subroutine for fold_truthop: decode a field reference.
3569 If EXP is a comparison reference, we return the innermost reference.
3571 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3572 set to the starting bit number.
3574 If the innermost field can be completely contained in a mode-sized
3575 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3577 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3578 otherwise it is not changed.
3580 *PUNSIGNEDP is set to the signedness of the field.
3582 *PMASK is set to the mask used. This is either contained in a
3583 BIT_AND_EXPR or derived from the width of the field.
3585 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3587 Return 0 if this is not a component reference or is one that we can't
3588 do anything with. */
3591 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3592 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3593 int *punsignedp
, int *pvolatilep
,
3594 tree
*pmask
, tree
*pand_mask
)
3596 tree outer_type
= 0;
3598 tree mask
, inner
, offset
;
3600 unsigned int precision
;
3602 /* All the optimizations using this function assume integer fields.
3603 There are problems with FP fields since the type_for_size call
3604 below can fail for, e.g., XFmode. */
3605 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3608 /* We are interested in the bare arrangement of bits, so strip everything
3609 that doesn't affect the machine mode. However, record the type of the
3610 outermost expression if it may matter below. */
3611 if (CONVERT_EXPR_P (exp
)
3612 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3613 outer_type
= TREE_TYPE (exp
);
3616 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3618 and_mask
= TREE_OPERAND (exp
, 1);
3619 exp
= TREE_OPERAND (exp
, 0);
3620 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3621 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3625 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3626 punsignedp
, pvolatilep
, false);
3627 if ((inner
== exp
&& and_mask
== 0)
3628 || *pbitsize
< 0 || offset
!= 0
3629 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3632 /* If the number of bits in the reference is the same as the bitsize of
3633 the outer type, then the outer type gives the signedness. Otherwise
3634 (in case of a small bitfield) the signedness is unchanged. */
3635 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3636 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3638 /* Compute the mask to access the bitfield. */
3639 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3640 precision
= TYPE_PRECISION (unsigned_type
);
3642 mask
= build_int_cst_type (unsigned_type
, -1);
3644 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3645 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3647 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3649 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3650 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3653 *pand_mask
= and_mask
;
3657 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3661 all_ones_mask_p (const_tree mask
, int size
)
3663 tree type
= TREE_TYPE (mask
);
3664 unsigned int precision
= TYPE_PRECISION (type
);
3667 tmask
= build_int_cst_type (signed_type_for (type
), -1);
3670 tree_int_cst_equal (mask
,
3671 const_binop (RSHIFT_EXPR
,
3672 const_binop (LSHIFT_EXPR
, tmask
,
3673 size_int (precision
- size
)),
3674 size_int (precision
- size
)));
3677 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3678 represents the sign bit of EXP's type. If EXP represents a sign
3679 or zero extension, also test VAL against the unextended type.
3680 The return value is the (sub)expression whose sign bit is VAL,
3681 or NULL_TREE otherwise. */
3684 sign_bit_p (tree exp
, const_tree val
)
3686 unsigned HOST_WIDE_INT mask_lo
, lo
;
3687 HOST_WIDE_INT mask_hi
, hi
;
3691 /* Tree EXP must have an integral type. */
3692 t
= TREE_TYPE (exp
);
3693 if (! INTEGRAL_TYPE_P (t
))
3696 /* Tree VAL must be an integer constant. */
3697 if (TREE_CODE (val
) != INTEGER_CST
3698 || TREE_OVERFLOW (val
))
3701 width
= TYPE_PRECISION (t
);
3702 if (width
> HOST_BITS_PER_WIDE_INT
)
3704 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3707 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3708 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3714 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3717 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3718 >> (HOST_BITS_PER_WIDE_INT
- width
));
3721 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3722 treat VAL as if it were unsigned. */
3723 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3724 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3727 /* Handle extension from a narrower type. */
3728 if (TREE_CODE (exp
) == NOP_EXPR
3729 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3730 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3735 /* Subroutine for fold_truthop: determine if an operand is simple enough
3736 to be evaluated unconditionally. */
3739 simple_operand_p (const_tree exp
)
3741 /* Strip any conversions that don't change the machine mode. */
3744 return (CONSTANT_CLASS_P (exp
)
3745 || TREE_CODE (exp
) == SSA_NAME
3747 && ! TREE_ADDRESSABLE (exp
)
3748 && ! TREE_THIS_VOLATILE (exp
)
3749 && ! DECL_NONLOCAL (exp
)
3750 /* Don't regard global variables as simple. They may be
3751 allocated in ways unknown to the compiler (shared memory,
3752 #pragma weak, etc). */
3753 && ! TREE_PUBLIC (exp
)
3754 && ! DECL_EXTERNAL (exp
)
3755 /* Loading a static variable is unduly expensive, but global
3756 registers aren't expensive. */
3757 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3760 /* The following functions are subroutines to fold_range_test and allow it to
3761 try to change a logical combination of comparisons into a range test.
3764 X == 2 || X == 3 || X == 4 || X == 5
3768 (unsigned) (X - 2) <= 3
3770 We describe each set of comparisons as being either inside or outside
3771 a range, using a variable named like IN_P, and then describe the
3772 range with a lower and upper bound. If one of the bounds is omitted,
3773 it represents either the highest or lowest value of the type.
3775 In the comments below, we represent a range by two numbers in brackets
3776 preceded by a "+" to designate being inside that range, or a "-" to
3777 designate being outside that range, so the condition can be inverted by
3778 flipping the prefix. An omitted bound is represented by a "-". For
3779 example, "- [-, 10]" means being outside the range starting at the lowest
3780 possible value and ending at 10, in other words, being greater than 10.
3781 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3784 We set up things so that the missing bounds are handled in a consistent
3785 manner so neither a missing bound nor "true" and "false" need to be
3786 handled using a special case. */
3788 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3789 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3790 and UPPER1_P are nonzero if the respective argument is an upper bound
3791 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3792 must be specified for a comparison. ARG1 will be converted to ARG0's
3793 type if both are specified. */
3796 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3797 tree arg1
, int upper1_p
)
3803 /* If neither arg represents infinity, do the normal operation.
3804 Else, if not a comparison, return infinity. Else handle the special
3805 comparison rules. Note that most of the cases below won't occur, but
3806 are handled for consistency. */
3808 if (arg0
!= 0 && arg1
!= 0)
3810 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3811 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3813 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3816 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3819 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3820 for neither. In real maths, we cannot assume open ended ranges are
3821 the same. But, this is computer arithmetic, where numbers are finite.
3822 We can therefore make the transformation of any unbounded range with
3823 the value Z, Z being greater than any representable number. This permits
3824 us to treat unbounded ranges as equal. */
3825 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3826 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3830 result
= sgn0
== sgn1
;
3833 result
= sgn0
!= sgn1
;
3836 result
= sgn0
< sgn1
;
3839 result
= sgn0
<= sgn1
;
3842 result
= sgn0
> sgn1
;
3845 result
= sgn0
>= sgn1
;
3851 return constant_boolean_node (result
, type
);
3854 /* Given EXP, a logical expression, set the range it is testing into
3855 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3856 actually being tested. *PLOW and *PHIGH will be made of the same
3857 type as the returned expression. If EXP is not a comparison, we
3858 will most likely not be returning a useful value and range. Set
3859 *STRICT_OVERFLOW_P to true if the return value is only valid
3860 because signed overflow is undefined; otherwise, do not change
3861 *STRICT_OVERFLOW_P. */
3864 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
3865 bool *strict_overflow_p
)
3867 enum tree_code code
;
3868 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
3869 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
3871 tree low
, high
, n_low
, n_high
;
3872 location_t loc
= EXPR_LOCATION (exp
);
3874 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3875 and see if we can refine the range. Some of the cases below may not
3876 happen, but it doesn't seem worth worrying about this. We "continue"
3877 the outer loop when we've changed something; otherwise we "break"
3878 the switch, which will "break" the while. */
3881 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
3885 code
= TREE_CODE (exp
);
3886 exp_type
= TREE_TYPE (exp
);
3888 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3890 if (TREE_OPERAND_LENGTH (exp
) > 0)
3891 arg0
= TREE_OPERAND (exp
, 0);
3892 if (TREE_CODE_CLASS (code
) == tcc_comparison
3893 || TREE_CODE_CLASS (code
) == tcc_unary
3894 || TREE_CODE_CLASS (code
) == tcc_binary
)
3895 arg0_type
= TREE_TYPE (arg0
);
3896 if (TREE_CODE_CLASS (code
) == tcc_binary
3897 || TREE_CODE_CLASS (code
) == tcc_comparison
3898 || (TREE_CODE_CLASS (code
) == tcc_expression
3899 && TREE_OPERAND_LENGTH (exp
) > 1))
3900 arg1
= TREE_OPERAND (exp
, 1);
3905 case TRUTH_NOT_EXPR
:
3906 in_p
= ! in_p
, exp
= arg0
;
3909 case EQ_EXPR
: case NE_EXPR
:
3910 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3911 /* We can only do something if the range is testing for zero
3912 and if the second operand is an integer constant. Note that
3913 saying something is "in" the range we make is done by
3914 complementing IN_P since it will set in the initial case of
3915 being not equal to zero; "out" is leaving it alone. */
3916 if (low
== 0 || high
== 0
3917 || ! integer_zerop (low
) || ! integer_zerop (high
)
3918 || TREE_CODE (arg1
) != INTEGER_CST
)
3923 case NE_EXPR
: /* - [c, c] */
3926 case EQ_EXPR
: /* + [c, c] */
3927 in_p
= ! in_p
, low
= high
= arg1
;
3929 case GT_EXPR
: /* - [-, c] */
3930 low
= 0, high
= arg1
;
3932 case GE_EXPR
: /* + [c, -] */
3933 in_p
= ! in_p
, low
= arg1
, high
= 0;
3935 case LT_EXPR
: /* - [c, -] */
3936 low
= arg1
, high
= 0;
3938 case LE_EXPR
: /* + [-, c] */
3939 in_p
= ! in_p
, low
= 0, high
= arg1
;
3945 /* If this is an unsigned comparison, we also know that EXP is
3946 greater than or equal to zero. We base the range tests we make
3947 on that fact, so we record it here so we can parse existing
3948 range tests. We test arg0_type since often the return type
3949 of, e.g. EQ_EXPR, is boolean. */
3950 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3952 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3954 build_int_cst (arg0_type
, 0),
3958 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3960 /* If the high bound is missing, but we have a nonzero low
3961 bound, reverse the range so it goes from zero to the low bound
3963 if (high
== 0 && low
&& ! integer_zerop (low
))
3966 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3967 integer_one_node
, 0);
3968 low
= build_int_cst (arg0_type
, 0);
3976 /* (-x) IN [a,b] -> x in [-b, -a] */
3977 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3978 build_int_cst (exp_type
, 0),
3980 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3981 build_int_cst (exp_type
, 0),
3983 low
= n_low
, high
= n_high
;
3989 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3990 build_int_cst (exp_type
, 1));
3991 SET_EXPR_LOCATION (exp
, loc
);
3994 case PLUS_EXPR
: case MINUS_EXPR
:
3995 if (TREE_CODE (arg1
) != INTEGER_CST
)
3998 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3999 move a constant to the other side. */
4000 if (!TYPE_UNSIGNED (arg0_type
)
4001 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4004 /* If EXP is signed, any overflow in the computation is undefined,
4005 so we don't worry about it so long as our computations on
4006 the bounds don't overflow. For unsigned, overflow is defined
4007 and this is exactly the right thing. */
4008 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4009 arg0_type
, low
, 0, arg1
, 0);
4010 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4011 arg0_type
, high
, 1, arg1
, 0);
4012 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4013 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4016 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4017 *strict_overflow_p
= true;
4019 /* Check for an unsigned range which has wrapped around the maximum
4020 value thus making n_high < n_low, and normalize it. */
4021 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4023 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4024 integer_one_node
, 0);
4025 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4026 integer_one_node
, 0);
4028 /* If the range is of the form +/- [ x+1, x ], we won't
4029 be able to normalize it. But then, it represents the
4030 whole range or the empty set, so make it
4032 if (tree_int_cst_equal (n_low
, low
)
4033 && tree_int_cst_equal (n_high
, high
))
4039 low
= n_low
, high
= n_high
;
4044 CASE_CONVERT
: case NON_LVALUE_EXPR
:
4045 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4048 if (! INTEGRAL_TYPE_P (arg0_type
)
4049 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4050 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4053 n_low
= low
, n_high
= high
;
4056 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4059 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4062 /* If we're converting arg0 from an unsigned type, to exp,
4063 a signed type, we will be doing the comparison as unsigned.
4064 The tests above have already verified that LOW and HIGH
4067 So we have to ensure that we will handle large unsigned
4068 values the same way that the current signed bounds treat
4071 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4075 /* For fixed-point modes, we need to pass the saturating flag
4076 as the 2nd parameter. */
4077 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4078 equiv_type
= lang_hooks
.types
.type_for_mode
4079 (TYPE_MODE (arg0_type
),
4080 TYPE_SATURATING (arg0_type
));
4082 equiv_type
= lang_hooks
.types
.type_for_mode
4083 (TYPE_MODE (arg0_type
), 1);
4085 /* A range without an upper bound is, naturally, unbounded.
4086 Since convert would have cropped a very large value, use
4087 the max value for the destination type. */
4089 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4090 : TYPE_MAX_VALUE (arg0_type
);
4092 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4093 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4094 fold_convert_loc (loc
, arg0_type
,
4096 build_int_cst (arg0_type
, 1));
4098 /* If the low bound is specified, "and" the range with the
4099 range for which the original unsigned value will be
4103 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4104 1, n_low
, n_high
, 1,
4105 fold_convert_loc (loc
, arg0_type
,
4110 in_p
= (n_in_p
== in_p
);
4114 /* Otherwise, "or" the range with the range of the input
4115 that will be interpreted as negative. */
4116 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4117 0, n_low
, n_high
, 1,
4118 fold_convert_loc (loc
, arg0_type
,
4123 in_p
= (in_p
!= n_in_p
);
4128 low
= n_low
, high
= n_high
;
4138 /* If EXP is a constant, we can evaluate whether this is true or false. */
4139 if (TREE_CODE (exp
) == INTEGER_CST
)
4141 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4143 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4149 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4153 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4154 type, TYPE, return an expression to test if EXP is in (or out of, depending
4155 on IN_P) the range. Return 0 if the test couldn't be created. */
4158 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4159 tree low
, tree high
)
4161 tree etype
= TREE_TYPE (exp
), value
;
4163 #ifdef HAVE_canonicalize_funcptr_for_compare
4164 /* Disable this optimization for function pointer expressions
4165 on targets that require function pointer canonicalization. */
4166 if (HAVE_canonicalize_funcptr_for_compare
4167 && TREE_CODE (etype
) == POINTER_TYPE
4168 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4174 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4176 return invert_truthvalue_loc (loc
, value
);
4181 if (low
== 0 && high
== 0)
4182 return build_int_cst (type
, 1);
4185 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4186 fold_convert_loc (loc
, etype
, high
));
4189 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4190 fold_convert_loc (loc
, etype
, low
));
4192 if (operand_equal_p (low
, high
, 0))
4193 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4194 fold_convert_loc (loc
, etype
, low
));
4196 if (integer_zerop (low
))
4198 if (! TYPE_UNSIGNED (etype
))
4200 etype
= unsigned_type_for (etype
);
4201 high
= fold_convert_loc (loc
, etype
, high
);
4202 exp
= fold_convert_loc (loc
, etype
, exp
);
4204 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4207 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4208 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4210 unsigned HOST_WIDE_INT lo
;
4214 prec
= TYPE_PRECISION (etype
);
4215 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4218 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4222 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4223 lo
= (unsigned HOST_WIDE_INT
) -1;
4226 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4228 if (TYPE_UNSIGNED (etype
))
4230 tree signed_etype
= signed_type_for (etype
);
4231 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4233 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4235 etype
= signed_etype
;
4236 exp
= fold_convert_loc (loc
, etype
, exp
);
4238 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4239 build_int_cst (etype
, 0));
4243 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4244 This requires wrap-around arithmetics for the type of the expression.
4245 First make sure that arithmetics in this type is valid, then make sure
4246 that it wraps around. */
4247 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4248 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4249 TYPE_UNSIGNED (etype
));
4251 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4253 tree utype
, minv
, maxv
;
4255 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4256 for the type in question, as we rely on this here. */
4257 utype
= unsigned_type_for (etype
);
4258 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4259 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4260 integer_one_node
, 1);
4261 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4263 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4270 high
= fold_convert_loc (loc
, etype
, high
);
4271 low
= fold_convert_loc (loc
, etype
, low
);
4272 exp
= fold_convert_loc (loc
, etype
, exp
);
4274 value
= const_binop (MINUS_EXPR
, high
, low
);
4277 if (POINTER_TYPE_P (etype
))
4279 if (value
!= 0 && !TREE_OVERFLOW (value
))
4281 low
= fold_convert_loc (loc
, sizetype
, low
);
4282 low
= fold_build1_loc (loc
, NEGATE_EXPR
, sizetype
, low
);
4283 return build_range_check (loc
, type
,
4284 fold_build2_loc (loc
, POINTER_PLUS_EXPR
,
4286 1, build_int_cst (etype
, 0), value
);
4291 if (value
!= 0 && !TREE_OVERFLOW (value
))
4292 return build_range_check (loc
, type
,
4293 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4294 1, build_int_cst (etype
, 0), value
);
4299 /* Return the predecessor of VAL in its type, handling the infinite case. */
4302 range_predecessor (tree val
)
4304 tree type
= TREE_TYPE (val
);
4306 if (INTEGRAL_TYPE_P (type
)
4307 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4310 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4313 /* Return the successor of VAL in its type, handling the infinite case. */
4316 range_successor (tree val
)
4318 tree type
= TREE_TYPE (val
);
4320 if (INTEGRAL_TYPE_P (type
)
4321 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4324 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4327 /* Given two ranges, see if we can merge them into one. Return 1 if we
4328 can, 0 if we can't. Set the output range into the specified parameters. */
4331 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4332 tree high0
, int in1_p
, tree low1
, tree high1
)
4340 int lowequal
= ((low0
== 0 && low1
== 0)
4341 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4342 low0
, 0, low1
, 0)));
4343 int highequal
= ((high0
== 0 && high1
== 0)
4344 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4345 high0
, 1, high1
, 1)));
4347 /* Make range 0 be the range that starts first, or ends last if they
4348 start at the same value. Swap them if it isn't. */
4349 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4352 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4353 high1
, 1, high0
, 1))))
4355 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4356 tem
= low0
, low0
= low1
, low1
= tem
;
4357 tem
= high0
, high0
= high1
, high1
= tem
;
4360 /* Now flag two cases, whether the ranges are disjoint or whether the
4361 second range is totally subsumed in the first. Note that the tests
4362 below are simplified by the ones above. */
4363 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4364 high0
, 1, low1
, 0));
4365 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4366 high1
, 1, high0
, 1));
4368 /* We now have four cases, depending on whether we are including or
4369 excluding the two ranges. */
4372 /* If they don't overlap, the result is false. If the second range
4373 is a subset it is the result. Otherwise, the range is from the start
4374 of the second to the end of the first. */
4376 in_p
= 0, low
= high
= 0;
4378 in_p
= 1, low
= low1
, high
= high1
;
4380 in_p
= 1, low
= low1
, high
= high0
;
4383 else if (in0_p
&& ! in1_p
)
4385 /* If they don't overlap, the result is the first range. If they are
4386 equal, the result is false. If the second range is a subset of the
4387 first, and the ranges begin at the same place, we go from just after
4388 the end of the second range to the end of the first. If the second
4389 range is not a subset of the first, or if it is a subset and both
4390 ranges end at the same place, the range starts at the start of the
4391 first range and ends just before the second range.
4392 Otherwise, we can't describe this as a single range. */
4394 in_p
= 1, low
= low0
, high
= high0
;
4395 else if (lowequal
&& highequal
)
4396 in_p
= 0, low
= high
= 0;
4397 else if (subset
&& lowequal
)
4399 low
= range_successor (high1
);
4404 /* We are in the weird situation where high0 > high1 but
4405 high1 has no successor. Punt. */
4409 else if (! subset
|| highequal
)
4412 high
= range_predecessor (low1
);
4416 /* low0 < low1 but low1 has no predecessor. Punt. */
4424 else if (! in0_p
&& in1_p
)
4426 /* If they don't overlap, the result is the second range. If the second
4427 is a subset of the first, the result is false. Otherwise,
4428 the range starts just after the first range and ends at the
4429 end of the second. */
4431 in_p
= 1, low
= low1
, high
= high1
;
4432 else if (subset
|| highequal
)
4433 in_p
= 0, low
= high
= 0;
4436 low
= range_successor (high0
);
4441 /* high1 > high0 but high0 has no successor. Punt. */
4449 /* The case where we are excluding both ranges. Here the complex case
4450 is if they don't overlap. In that case, the only time we have a
4451 range is if they are adjacent. If the second is a subset of the
4452 first, the result is the first. Otherwise, the range to exclude
4453 starts at the beginning of the first range and ends at the end of the
4457 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4458 range_successor (high0
),
4460 in_p
= 0, low
= low0
, high
= high1
;
4463 /* Canonicalize - [min, x] into - [-, x]. */
4464 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4465 switch (TREE_CODE (TREE_TYPE (low0
)))
4468 if (TYPE_PRECISION (TREE_TYPE (low0
))
4469 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4473 if (tree_int_cst_equal (low0
,
4474 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4478 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4479 && integer_zerop (low0
))
4486 /* Canonicalize - [x, max] into - [x, -]. */
4487 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4488 switch (TREE_CODE (TREE_TYPE (high1
)))
4491 if (TYPE_PRECISION (TREE_TYPE (high1
))
4492 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4496 if (tree_int_cst_equal (high1
,
4497 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4501 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4502 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4504 integer_one_node
, 1)))
4511 /* The ranges might be also adjacent between the maximum and
4512 minimum values of the given type. For
4513 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4514 return + [x + 1, y - 1]. */
4515 if (low0
== 0 && high1
== 0)
4517 low
= range_successor (high0
);
4518 high
= range_predecessor (low1
);
4519 if (low
== 0 || high
== 0)
4529 in_p
= 0, low
= low0
, high
= high0
;
4531 in_p
= 0, low
= low0
, high
= high1
;
4534 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4539 /* Subroutine of fold, looking inside expressions of the form
4540 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4541 of the COND_EXPR. This function is being used also to optimize
4542 A op B ? C : A, by reversing the comparison first.
4544 Return a folded expression whose code is not a COND_EXPR
4545 anymore, or NULL_TREE if no folding opportunity is found. */
4548 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4549 tree arg0
, tree arg1
, tree arg2
)
4551 enum tree_code comp_code
= TREE_CODE (arg0
);
4552 tree arg00
= TREE_OPERAND (arg0
, 0);
4553 tree arg01
= TREE_OPERAND (arg0
, 1);
4554 tree arg1_type
= TREE_TYPE (arg1
);
4560 /* If we have A op 0 ? A : -A, consider applying the following
4563 A == 0? A : -A same as -A
4564 A != 0? A : -A same as A
4565 A >= 0? A : -A same as abs (A)
4566 A > 0? A : -A same as abs (A)
4567 A <= 0? A : -A same as -abs (A)
4568 A < 0? A : -A same as -abs (A)
4570 None of these transformations work for modes with signed
4571 zeros. If A is +/-0, the first two transformations will
4572 change the sign of the result (from +0 to -0, or vice
4573 versa). The last four will fix the sign of the result,
4574 even though the original expressions could be positive or
4575 negative, depending on the sign of A.
4577 Note that all these transformations are correct if A is
4578 NaN, since the two alternatives (A and -A) are also NaNs. */
4579 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4580 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4581 ? real_zerop (arg01
)
4582 : integer_zerop (arg01
))
4583 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4584 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4585 /* In the case that A is of the form X-Y, '-A' (arg2) may
4586 have already been folded to Y-X, check for that. */
4587 || (TREE_CODE (arg1
) == MINUS_EXPR
4588 && TREE_CODE (arg2
) == MINUS_EXPR
4589 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4590 TREE_OPERAND (arg2
, 1), 0)
4591 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4592 TREE_OPERAND (arg2
, 0), 0))))
4597 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4598 return pedantic_non_lvalue_loc (loc
,
4599 fold_convert_loc (loc
, type
,
4600 negate_expr (tem
)));
4603 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4606 if (flag_trapping_math
)
4611 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4612 arg1
= fold_convert_loc (loc
, signed_type_for
4613 (TREE_TYPE (arg1
)), arg1
);
4614 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4615 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4618 if (flag_trapping_math
)
4622 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4623 arg1
= fold_convert_loc (loc
, signed_type_for
4624 (TREE_TYPE (arg1
)), arg1
);
4625 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4626 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4628 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4632 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4633 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4634 both transformations are correct when A is NaN: A != 0
4635 is then true, and A == 0 is false. */
4637 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4638 && integer_zerop (arg01
) && integer_zerop (arg2
))
4640 if (comp_code
== NE_EXPR
)
4641 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4642 else if (comp_code
== EQ_EXPR
)
4643 return build_int_cst (type
, 0);
4646 /* Try some transformations of A op B ? A : B.
4648 A == B? A : B same as B
4649 A != B? A : B same as A
4650 A >= B? A : B same as max (A, B)
4651 A > B? A : B same as max (B, A)
4652 A <= B? A : B same as min (A, B)
4653 A < B? A : B same as min (B, A)
4655 As above, these transformations don't work in the presence
4656 of signed zeros. For example, if A and B are zeros of
4657 opposite sign, the first two transformations will change
4658 the sign of the result. In the last four, the original
4659 expressions give different results for (A=+0, B=-0) and
4660 (A=-0, B=+0), but the transformed expressions do not.
4662 The first two transformations are correct if either A or B
4663 is a NaN. In the first transformation, the condition will
4664 be false, and B will indeed be chosen. In the case of the
4665 second transformation, the condition A != B will be true,
4666 and A will be chosen.
4668 The conversions to max() and min() are not correct if B is
4669 a number and A is not. The conditions in the original
4670 expressions will be false, so all four give B. The min()
4671 and max() versions would give a NaN instead. */
4672 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4673 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4674 /* Avoid these transformations if the COND_EXPR may be used
4675 as an lvalue in the C++ front-end. PR c++/19199. */
4677 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4678 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4679 || ! maybe_lvalue_p (arg1
)
4680 || ! maybe_lvalue_p (arg2
)))
4682 tree comp_op0
= arg00
;
4683 tree comp_op1
= arg01
;
4684 tree comp_type
= TREE_TYPE (comp_op0
);
4686 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4687 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4697 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4699 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4704 /* In C++ a ?: expression can be an lvalue, so put the
4705 operand which will be used if they are equal first
4706 so that we can convert this back to the
4707 corresponding COND_EXPR. */
4708 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4710 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4711 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4712 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4713 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4714 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4715 comp_op1
, comp_op0
);
4716 return pedantic_non_lvalue_loc (loc
,
4717 fold_convert_loc (loc
, type
, tem
));
4724 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4726 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4727 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4728 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4729 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4730 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4731 comp_op1
, comp_op0
);
4732 return pedantic_non_lvalue_loc (loc
,
4733 fold_convert_loc (loc
, type
, tem
));
4737 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4738 return pedantic_non_lvalue_loc (loc
,
4739 fold_convert_loc (loc
, type
, arg2
));
4742 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4743 return pedantic_non_lvalue_loc (loc
,
4744 fold_convert_loc (loc
, type
, arg1
));
4747 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4752 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4753 we might still be able to simplify this. For example,
4754 if C1 is one less or one more than C2, this might have started
4755 out as a MIN or MAX and been transformed by this function.
4756 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4758 if (INTEGRAL_TYPE_P (type
)
4759 && TREE_CODE (arg01
) == INTEGER_CST
4760 && TREE_CODE (arg2
) == INTEGER_CST
)
4764 if (TREE_CODE (arg1
) == INTEGER_CST
)
4766 /* We can replace A with C1 in this case. */
4767 arg1
= fold_convert_loc (loc
, type
, arg01
);
4768 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4771 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4772 MIN_EXPR, to preserve the signedness of the comparison. */
4773 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4775 && operand_equal_p (arg01
,
4776 const_binop (PLUS_EXPR
, arg2
,
4777 build_int_cst (type
, 1)),
4780 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4781 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4783 return pedantic_non_lvalue_loc (loc
,
4784 fold_convert_loc (loc
, type
, tem
));
4789 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4791 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4793 && operand_equal_p (arg01
,
4794 const_binop (MINUS_EXPR
, arg2
,
4795 build_int_cst (type
, 1)),
4798 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4799 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4801 return pedantic_non_lvalue_loc (loc
,
4802 fold_convert_loc (loc
, type
, tem
));
4807 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4808 MAX_EXPR, to preserve the signedness of the comparison. */
4809 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4811 && operand_equal_p (arg01
,
4812 const_binop (MINUS_EXPR
, arg2
,
4813 build_int_cst (type
, 1)),
4816 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4817 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4819 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4824 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4825 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4827 && operand_equal_p (arg01
,
4828 const_binop (PLUS_EXPR
, arg2
,
4829 build_int_cst (type
, 1)),
4832 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4833 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4835 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4849 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4850 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4851 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4855 /* EXP is some logical combination of boolean tests. See if we can
4856 merge it into some range test. Return the new tree if so. */
4859 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4862 int or_op
= (code
== TRUTH_ORIF_EXPR
4863 || code
== TRUTH_OR_EXPR
);
4864 int in0_p
, in1_p
, in_p
;
4865 tree low0
, low1
, low
, high0
, high1
, high
;
4866 bool strict_overflow_p
= false;
4867 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4868 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4870 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4871 "when simplifying range test");
4873 /* If this is an OR operation, invert both sides; we will invert
4874 again at the end. */
4876 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4878 /* If both expressions are the same, if we can merge the ranges, and we
4879 can build the range test, return it or it inverted. If one of the
4880 ranges is always true or always false, consider it to be the same
4881 expression as the other. */
4882 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4883 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4885 && 0 != (tem
= (build_range_check (UNKNOWN_LOCATION
, type
,
4887 : rhs
!= 0 ? rhs
: integer_zero_node
,
4890 if (strict_overflow_p
)
4891 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4892 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4895 /* On machines where the branch cost is expensive, if this is a
4896 short-circuited branch and the underlying object on both sides
4897 is the same, make a non-short-circuit operation. */
4898 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4899 && lhs
!= 0 && rhs
!= 0
4900 && (code
== TRUTH_ANDIF_EXPR
4901 || code
== TRUTH_ORIF_EXPR
)
4902 && operand_equal_p (lhs
, rhs
, 0))
4904 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4905 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4906 which cases we can't do this. */
4907 if (simple_operand_p (lhs
))
4909 tem
= build2 (code
== TRUTH_ANDIF_EXPR
4910 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4912 SET_EXPR_LOCATION (tem
, loc
);
4916 else if (lang_hooks
.decls
.global_bindings_p () == 0
4917 && ! CONTAINS_PLACEHOLDER_P (lhs
))
4919 tree common
= save_expr (lhs
);
4921 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4922 or_op
? ! in0_p
: in0_p
,
4924 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4925 or_op
? ! in1_p
: in1_p
,
4928 if (strict_overflow_p
)
4929 fold_overflow_warning (warnmsg
,
4930 WARN_STRICT_OVERFLOW_COMPARISON
);
4931 tem
= build2 (code
== TRUTH_ANDIF_EXPR
4932 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4934 SET_EXPR_LOCATION (tem
, loc
);
4943 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4944 bit value. Arrange things so the extra bits will be set to zero if and
4945 only if C is signed-extended to its full width. If MASK is nonzero,
4946 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4949 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4951 tree type
= TREE_TYPE (c
);
4952 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4955 if (p
== modesize
|| unsignedp
)
4958 /* We work by getting just the sign bit into the low-order bit, then
4959 into the high-order bit, then sign-extend. We then XOR that value
4961 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1));
4962 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1));
4964 /* We must use a signed type in order to get an arithmetic right shift.
4965 However, we must also avoid introducing accidental overflows, so that
4966 a subsequent call to integer_zerop will work. Hence we must
4967 do the type conversion here. At this point, the constant is either
4968 zero or one, and the conversion to a signed type can never overflow.
4969 We could get an overflow if this conversion is done anywhere else. */
4970 if (TYPE_UNSIGNED (type
))
4971 temp
= fold_convert (signed_type_for (type
), temp
);
4973 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
4974 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
4976 temp
= const_binop (BIT_AND_EXPR
, temp
,
4977 fold_convert (TREE_TYPE (c
), mask
));
4978 /* If necessary, convert the type back to match the type of C. */
4979 if (TYPE_UNSIGNED (type
))
4980 temp
= fold_convert (type
, temp
);
4982 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
4985 /* For an expression that has the form
4989 we can drop one of the inner expressions and simplify to
4993 LOC is the location of the resulting expression. OP is the inner
4994 logical operation; the left-hand side in the examples above, while CMPOP
4995 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4996 removing a condition that guards another, as in
4997 (A != NULL && A->...) || A == NULL
4998 which we must not transform. If RHS_ONLY is true, only eliminate the
4999 right-most operand of the inner logical operation. */
5002 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5005 tree type
= TREE_TYPE (cmpop
);
5006 enum tree_code code
= TREE_CODE (cmpop
);
5007 enum tree_code truthop_code
= TREE_CODE (op
);
5008 tree lhs
= TREE_OPERAND (op
, 0);
5009 tree rhs
= TREE_OPERAND (op
, 1);
5010 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5011 enum tree_code rhs_code
= TREE_CODE (rhs
);
5012 enum tree_code lhs_code
= TREE_CODE (lhs
);
5013 enum tree_code inv_code
;
5015 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5018 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5021 if (rhs_code
== truthop_code
)
5023 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5024 if (newrhs
!= NULL_TREE
)
5027 rhs_code
= TREE_CODE (rhs
);
5030 if (lhs_code
== truthop_code
&& !rhs_only
)
5032 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5033 if (newlhs
!= NULL_TREE
)
5036 lhs_code
= TREE_CODE (lhs
);
5040 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5041 if (inv_code
== rhs_code
5042 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5043 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5045 if (!rhs_only
&& inv_code
== lhs_code
5046 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5047 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5049 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5050 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5055 /* Find ways of folding logical expressions of LHS and RHS:
5056 Try to merge two comparisons to the same innermost item.
5057 Look for range tests like "ch >= '0' && ch <= '9'".
5058 Look for combinations of simple terms on machines with expensive branches
5059 and evaluate the RHS unconditionally.
5061 For example, if we have p->a == 2 && p->b == 4 and we can make an
5062 object large enough to span both A and B, we can do this with a comparison
5063 against the object ANDed with the a mask.
5065 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5066 operations to do this with one comparison.
5068 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5069 function and the one above.
5071 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5072 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5074 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5077 We return the simplified tree or 0 if no optimization is possible. */
5080 fold_truthop (location_t loc
, enum tree_code code
, tree truth_type
,
5083 /* If this is the "or" of two comparisons, we can do something if
5084 the comparisons are NE_EXPR. If this is the "and", we can do something
5085 if the comparisons are EQ_EXPR. I.e.,
5086 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5088 WANTED_CODE is this operation code. For single bit fields, we can
5089 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5090 comparison for one-bit fields. */
5092 enum tree_code wanted_code
;
5093 enum tree_code lcode
, rcode
;
5094 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5095 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5096 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5097 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5098 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5099 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5100 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5101 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5102 enum machine_mode lnmode
, rnmode
;
5103 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5104 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5105 tree l_const
, r_const
;
5106 tree lntype
, rntype
, result
;
5107 HOST_WIDE_INT first_bit
, end_bit
;
5109 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5110 enum tree_code orig_code
= code
;
5112 /* Start by getting the comparison codes. Fail if anything is volatile.
5113 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5114 it were surrounded with a NE_EXPR. */
5116 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5119 lcode
= TREE_CODE (lhs
);
5120 rcode
= TREE_CODE (rhs
);
5122 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5124 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5125 build_int_cst (TREE_TYPE (lhs
), 0));
5129 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5131 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5132 build_int_cst (TREE_TYPE (rhs
), 0));
5136 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5137 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5140 ll_arg
= TREE_OPERAND (lhs
, 0);
5141 lr_arg
= TREE_OPERAND (lhs
, 1);
5142 rl_arg
= TREE_OPERAND (rhs
, 0);
5143 rr_arg
= TREE_OPERAND (rhs
, 1);
5145 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5146 if (simple_operand_p (ll_arg
)
5147 && simple_operand_p (lr_arg
))
5150 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5151 && operand_equal_p (lr_arg
, rr_arg
, 0))
5153 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5154 truth_type
, ll_arg
, lr_arg
);
5158 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5159 && operand_equal_p (lr_arg
, rl_arg
, 0))
5161 result
= combine_comparisons (loc
, code
, lcode
,
5162 swap_tree_comparison (rcode
),
5163 truth_type
, ll_arg
, lr_arg
);
5169 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5170 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5172 /* If the RHS can be evaluated unconditionally and its operands are
5173 simple, it wins to evaluate the RHS unconditionally on machines
5174 with expensive branches. In this case, this isn't a comparison
5175 that can be merged. Avoid doing this if the RHS is a floating-point
5176 comparison since those can trap. */
5178 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5180 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5181 && simple_operand_p (rl_arg
)
5182 && simple_operand_p (rr_arg
))
5184 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5185 if (code
== TRUTH_OR_EXPR
5186 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5187 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5188 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5189 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5191 result
= build2 (NE_EXPR
, truth_type
,
5192 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5194 build_int_cst (TREE_TYPE (ll_arg
), 0));
5195 goto fold_truthop_exit
;
5198 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5199 if (code
== TRUTH_AND_EXPR
5200 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5201 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5202 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5203 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5205 result
= build2 (EQ_EXPR
, truth_type
,
5206 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5208 build_int_cst (TREE_TYPE (ll_arg
), 0));
5209 goto fold_truthop_exit
;
5212 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
5214 if (code
!= orig_code
|| lhs
!= orig_lhs
|| rhs
!= orig_rhs
)
5216 result
= build2 (code
, truth_type
, lhs
, rhs
);
5217 goto fold_truthop_exit
;
5223 /* See if the comparisons can be merged. Then get all the parameters for
5226 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5227 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5231 ll_inner
= decode_field_reference (loc
, ll_arg
,
5232 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5233 &ll_unsignedp
, &volatilep
, &ll_mask
,
5235 lr_inner
= decode_field_reference (loc
, lr_arg
,
5236 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5237 &lr_unsignedp
, &volatilep
, &lr_mask
,
5239 rl_inner
= decode_field_reference (loc
, rl_arg
,
5240 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5241 &rl_unsignedp
, &volatilep
, &rl_mask
,
5243 rr_inner
= decode_field_reference (loc
, rr_arg
,
5244 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5245 &rr_unsignedp
, &volatilep
, &rr_mask
,
5248 /* It must be true that the inner operation on the lhs of each
5249 comparison must be the same if we are to be able to do anything.
5250 Then see if we have constants. If not, the same must be true for
5252 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5253 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5256 if (TREE_CODE (lr_arg
) == INTEGER_CST
5257 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5258 l_const
= lr_arg
, r_const
= rr_arg
;
5259 else if (lr_inner
== 0 || rr_inner
== 0
5260 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5263 l_const
= r_const
= 0;
5265 /* If either comparison code is not correct for our logical operation,
5266 fail. However, we can convert a one-bit comparison against zero into
5267 the opposite comparison against that bit being set in the field. */
5269 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5270 if (lcode
!= wanted_code
)
5272 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5274 /* Make the left operand unsigned, since we are only interested
5275 in the value of one bit. Otherwise we are doing the wrong
5284 /* This is analogous to the code for l_const above. */
5285 if (rcode
!= wanted_code
)
5287 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5296 /* See if we can find a mode that contains both fields being compared on
5297 the left. If we can't, fail. Otherwise, update all constants and masks
5298 to be relative to a field of that size. */
5299 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5300 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5301 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5302 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5304 if (lnmode
== VOIDmode
)
5307 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5308 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5309 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5310 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5312 if (BYTES_BIG_ENDIAN
)
5314 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5315 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5318 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5319 size_int (xll_bitpos
));
5320 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5321 size_int (xrl_bitpos
));
5325 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5326 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5327 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5328 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5329 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5332 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5334 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5339 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5340 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5341 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5342 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5343 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5346 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5348 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5352 /* If the right sides are not constant, do the same for it. Also,
5353 disallow this optimization if a size or signedness mismatch occurs
5354 between the left and right sides. */
5357 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5358 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5359 /* Make sure the two fields on the right
5360 correspond to the left without being swapped. */
5361 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5364 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5365 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5366 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5367 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5369 if (rnmode
== VOIDmode
)
5372 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5373 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5374 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5375 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5377 if (BYTES_BIG_ENDIAN
)
5379 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5380 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5383 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5385 size_int (xlr_bitpos
));
5386 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5388 size_int (xrr_bitpos
));
5390 /* Make a mask that corresponds to both fields being compared.
5391 Do this for both items being compared. If the operands are the
5392 same size and the bits being compared are in the same position
5393 then we can do this by masking both and comparing the masked
5395 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5396 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5397 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5399 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5400 ll_unsignedp
|| rl_unsignedp
);
5401 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5402 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5404 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5405 lr_unsignedp
|| rr_unsignedp
);
5406 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5407 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5409 result
= build2 (wanted_code
, truth_type
, lhs
, rhs
);
5410 goto fold_truthop_exit
;
5413 /* There is still another way we can do something: If both pairs of
5414 fields being compared are adjacent, we may be able to make a wider
5415 field containing them both.
5417 Note that we still must mask the lhs/rhs expressions. Furthermore,
5418 the mask must be shifted to account for the shift done by
5419 make_bit_field_ref. */
5420 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5421 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5422 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5423 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5427 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5428 ll_bitsize
+ rl_bitsize
,
5429 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5430 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5431 lr_bitsize
+ rr_bitsize
,
5432 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5434 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5435 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5436 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5437 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5439 /* Convert to the smaller type before masking out unwanted bits. */
5441 if (lntype
!= rntype
)
5443 if (lnbitsize
> rnbitsize
)
5445 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5446 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5449 else if (lnbitsize
< rnbitsize
)
5451 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5452 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5457 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5458 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5460 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5461 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5463 result
= build2 (wanted_code
, truth_type
, lhs
, rhs
);
5464 goto fold_truthop_exit
;
5470 /* Handle the case of comparisons with constants. If there is something in
5471 common between the masks, those bits of the constants must be the same.
5472 If not, the condition is always false. Test for this to avoid generating
5473 incorrect code below. */
5474 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5475 if (! integer_zerop (result
)
5476 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5477 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5479 if (wanted_code
== NE_EXPR
)
5481 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5482 return constant_boolean_node (true, truth_type
);
5486 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5487 return constant_boolean_node (false, truth_type
);
5491 /* Construct the expression we will return. First get the component
5492 reference we will make. Unless the mask is all ones the width of
5493 that field, perform the mask operation. Then compare with the
5495 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5496 ll_unsignedp
|| rl_unsignedp
);
5498 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5499 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5501 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5502 SET_EXPR_LOCATION (result
, loc
);
5505 result
= build2 (wanted_code
, truth_type
, result
,
5506 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5509 SET_EXPR_LOCATION (result
, loc
);
5513 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5517 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5521 enum tree_code op_code
;
5524 int consts_equal
, consts_lt
;
5527 STRIP_SIGN_NOPS (arg0
);
5529 op_code
= TREE_CODE (arg0
);
5530 minmax_const
= TREE_OPERAND (arg0
, 1);
5531 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5532 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5533 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5534 inner
= TREE_OPERAND (arg0
, 0);
5536 /* If something does not permit us to optimize, return the original tree. */
5537 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5538 || TREE_CODE (comp_const
) != INTEGER_CST
5539 || TREE_OVERFLOW (comp_const
)
5540 || TREE_CODE (minmax_const
) != INTEGER_CST
5541 || TREE_OVERFLOW (minmax_const
))
5544 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5545 and GT_EXPR, doing the rest with recursive calls using logical
5549 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5552 = optimize_minmax_comparison (loc
,
5553 invert_tree_comparison (code
, false),
5556 return invert_truthvalue_loc (loc
, tem
);
5562 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5563 optimize_minmax_comparison
5564 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5565 optimize_minmax_comparison
5566 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5569 if (op_code
== MAX_EXPR
&& consts_equal
)
5570 /* MAX (X, 0) == 0 -> X <= 0 */
5571 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5573 else if (op_code
== MAX_EXPR
&& consts_lt
)
5574 /* MAX (X, 0) == 5 -> X == 5 */
5575 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5577 else if (op_code
== MAX_EXPR
)
5578 /* MAX (X, 0) == -1 -> false */
5579 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5581 else if (consts_equal
)
5582 /* MIN (X, 0) == 0 -> X >= 0 */
5583 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5586 /* MIN (X, 0) == 5 -> false */
5587 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5590 /* MIN (X, 0) == -1 -> X == -1 */
5591 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5594 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5595 /* MAX (X, 0) > 0 -> X > 0
5596 MAX (X, 0) > 5 -> X > 5 */
5597 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5599 else if (op_code
== MAX_EXPR
)
5600 /* MAX (X, 0) > -1 -> true */
5601 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5603 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5604 /* MIN (X, 0) > 0 -> false
5605 MIN (X, 0) > 5 -> false */
5606 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5609 /* MIN (X, 0) > -1 -> X > -1 */
5610 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5617 /* T is an integer expression that is being multiplied, divided, or taken a
5618 modulus (CODE says which and what kind of divide or modulus) by a
5619 constant C. See if we can eliminate that operation by folding it with
5620 other operations already in T. WIDE_TYPE, if non-null, is a type that
5621 should be used for the computation if wider than our type.
5623 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5624 (X * 2) + (Y * 4). We must, however, be assured that either the original
5625 expression would not overflow or that overflow is undefined for the type
5626 in the language in question.
5628 If we return a non-null expression, it is an equivalent form of the
5629 original computation, but need not be in the original type.
5631 We set *STRICT_OVERFLOW_P to true if the return values depends on
5632 signed overflow being undefined. Otherwise we do not change
5633 *STRICT_OVERFLOW_P. */
5636 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5637 bool *strict_overflow_p
)
5639 /* To avoid exponential search depth, refuse to allow recursion past
5640 three levels. Beyond that (1) it's highly unlikely that we'll find
5641 something interesting and (2) we've probably processed it before
5642 when we built the inner expression. */
5651 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5658 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5659 bool *strict_overflow_p
)
5661 tree type
= TREE_TYPE (t
);
5662 enum tree_code tcode
= TREE_CODE (t
);
5663 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5664 > GET_MODE_SIZE (TYPE_MODE (type
)))
5665 ? wide_type
: type
);
5667 int same_p
= tcode
== code
;
5668 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5669 bool sub_strict_overflow_p
;
5671 /* Don't deal with constants of zero here; they confuse the code below. */
5672 if (integer_zerop (c
))
5675 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5676 op0
= TREE_OPERAND (t
, 0);
5678 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5679 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5681 /* Note that we need not handle conditional operations here since fold
5682 already handles those cases. So just do arithmetic here. */
5686 /* For a constant, we can always simplify if we are a multiply
5687 or (for divide and modulus) if it is a multiple of our constant. */
5688 if (code
== MULT_EXPR
5689 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
)))
5690 return const_binop (code
, fold_convert (ctype
, t
),
5691 fold_convert (ctype
, c
));
5694 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5695 /* If op0 is an expression ... */
5696 if ((COMPARISON_CLASS_P (op0
)
5697 || UNARY_CLASS_P (op0
)
5698 || BINARY_CLASS_P (op0
)
5699 || VL_EXP_CLASS_P (op0
)
5700 || EXPRESSION_CLASS_P (op0
))
5701 /* ... and has wrapping overflow, and its type is smaller
5702 than ctype, then we cannot pass through as widening. */
5703 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5704 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5705 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5706 && (TYPE_PRECISION (ctype
)
5707 > TYPE_PRECISION (TREE_TYPE (op0
))))
5708 /* ... or this is a truncation (t is narrower than op0),
5709 then we cannot pass through this narrowing. */
5710 || (TYPE_PRECISION (type
)
5711 < TYPE_PRECISION (TREE_TYPE (op0
)))
5712 /* ... or signedness changes for division or modulus,
5713 then we cannot pass through this conversion. */
5714 || (code
!= MULT_EXPR
5715 && (TYPE_UNSIGNED (ctype
)
5716 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5717 /* ... or has undefined overflow while the converted to
5718 type has not, we cannot do the operation in the inner type
5719 as that would introduce undefined overflow. */
5720 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5721 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5724 /* Pass the constant down and see if we can make a simplification. If
5725 we can, replace this expression with the inner simplification for
5726 possible later conversion to our or some other type. */
5727 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5728 && TREE_CODE (t2
) == INTEGER_CST
5729 && !TREE_OVERFLOW (t2
)
5730 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5732 ? ctype
: NULL_TREE
,
5733 strict_overflow_p
))))
5738 /* If widening the type changes it from signed to unsigned, then we
5739 must avoid building ABS_EXPR itself as unsigned. */
5740 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5742 tree cstype
= (*signed_type_for
) (ctype
);
5743 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5746 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5747 return fold_convert (ctype
, t1
);
5751 /* If the constant is negative, we cannot simplify this. */
5752 if (tree_int_cst_sgn (c
) == -1)
5756 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5758 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5761 case MIN_EXPR
: case MAX_EXPR
:
5762 /* If widening the type changes the signedness, then we can't perform
5763 this optimization as that changes the result. */
5764 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5767 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5768 sub_strict_overflow_p
= false;
5769 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5770 &sub_strict_overflow_p
)) != 0
5771 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5772 &sub_strict_overflow_p
)) != 0)
5774 if (tree_int_cst_sgn (c
) < 0)
5775 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5776 if (sub_strict_overflow_p
)
5777 *strict_overflow_p
= true;
5778 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5779 fold_convert (ctype
, t2
));
5783 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5784 /* If the second operand is constant, this is a multiplication
5785 or floor division, by a power of two, so we can treat it that
5786 way unless the multiplier or divisor overflows. Signed
5787 left-shift overflow is implementation-defined rather than
5788 undefined in C90, so do not convert signed left shift into
5790 if (TREE_CODE (op1
) == INTEGER_CST
5791 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5792 /* const_binop may not detect overflow correctly,
5793 so check for it explicitly here. */
5794 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5795 && TREE_INT_CST_HIGH (op1
) == 0
5796 && 0 != (t1
= fold_convert (ctype
,
5797 const_binop (LSHIFT_EXPR
,
5800 && !TREE_OVERFLOW (t1
))
5801 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5802 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5804 fold_convert (ctype
, op0
),
5806 c
, code
, wide_type
, strict_overflow_p
);
5809 case PLUS_EXPR
: case MINUS_EXPR
:
5810 /* See if we can eliminate the operation on both sides. If we can, we
5811 can return a new PLUS or MINUS. If we can't, the only remaining
5812 cases where we can do anything are if the second operand is a
5814 sub_strict_overflow_p
= false;
5815 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5816 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5817 if (t1
!= 0 && t2
!= 0
5818 && (code
== MULT_EXPR
5819 /* If not multiplication, we can only do this if both operands
5820 are divisible by c. */
5821 || (multiple_of_p (ctype
, op0
, c
)
5822 && multiple_of_p (ctype
, op1
, c
))))
5824 if (sub_strict_overflow_p
)
5825 *strict_overflow_p
= true;
5826 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5827 fold_convert (ctype
, t2
));
5830 /* If this was a subtraction, negate OP1 and set it to be an addition.
5831 This simplifies the logic below. */
5832 if (tcode
== MINUS_EXPR
)
5834 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5835 /* If OP1 was not easily negatable, the constant may be OP0. */
5836 if (TREE_CODE (op0
) == INTEGER_CST
)
5847 if (TREE_CODE (op1
) != INTEGER_CST
)
5850 /* If either OP1 or C are negative, this optimization is not safe for
5851 some of the division and remainder types while for others we need
5852 to change the code. */
5853 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5855 if (code
== CEIL_DIV_EXPR
)
5856 code
= FLOOR_DIV_EXPR
;
5857 else if (code
== FLOOR_DIV_EXPR
)
5858 code
= CEIL_DIV_EXPR
;
5859 else if (code
!= MULT_EXPR
5860 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5864 /* If it's a multiply or a division/modulus operation of a multiple
5865 of our constant, do the operation and verify it doesn't overflow. */
5866 if (code
== MULT_EXPR
5867 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5869 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5870 fold_convert (ctype
, c
));
5871 /* We allow the constant to overflow with wrapping semantics. */
5873 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5879 /* If we have an unsigned type is not a sizetype, we cannot widen
5880 the operation since it will change the result if the original
5881 computation overflowed. */
5882 if (TYPE_UNSIGNED (ctype
)
5883 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5887 /* If we were able to eliminate our operation from the first side,
5888 apply our operation to the second side and reform the PLUS. */
5889 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5890 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5892 /* The last case is if we are a multiply. In that case, we can
5893 apply the distributive law to commute the multiply and addition
5894 if the multiplication of the constants doesn't overflow. */
5895 if (code
== MULT_EXPR
)
5896 return fold_build2 (tcode
, ctype
,
5897 fold_build2 (code
, ctype
,
5898 fold_convert (ctype
, op0
),
5899 fold_convert (ctype
, c
)),
5905 /* We have a special case here if we are doing something like
5906 (C * 8) % 4 since we know that's zero. */
5907 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5908 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5909 /* If the multiplication can overflow we cannot optimize this.
5910 ??? Until we can properly mark individual operations as
5911 not overflowing we need to treat sizetype special here as
5912 stor-layout relies on this opimization to make
5913 DECL_FIELD_BIT_OFFSET always a constant. */
5914 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5915 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
5916 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))))
5917 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5918 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5920 *strict_overflow_p
= true;
5921 return omit_one_operand (type
, integer_zero_node
, op0
);
5924 /* ... fall through ... */
5926 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5927 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5928 /* If we can extract our operation from the LHS, do so and return a
5929 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5930 do something only if the second operand is a constant. */
5932 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5933 strict_overflow_p
)) != 0)
5934 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5935 fold_convert (ctype
, op1
));
5936 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5937 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5938 strict_overflow_p
)) != 0)
5939 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5940 fold_convert (ctype
, t1
));
5941 else if (TREE_CODE (op1
) != INTEGER_CST
)
5944 /* If these are the same operation types, we can associate them
5945 assuming no overflow. */
5947 && 0 != (t1
= int_const_binop (MULT_EXPR
,
5948 fold_convert (ctype
, op1
),
5949 fold_convert (ctype
, c
), 1))
5950 && 0 != (t1
= force_fit_type_double (ctype
, TREE_INT_CST_LOW (t1
),
5951 TREE_INT_CST_HIGH (t1
),
5952 (TYPE_UNSIGNED (ctype
)
5953 && tcode
!= MULT_EXPR
) ? -1 : 1,
5954 TREE_OVERFLOW (t1
)))
5955 && !TREE_OVERFLOW (t1
))
5956 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
5958 /* If these operations "cancel" each other, we have the main
5959 optimizations of this pass, which occur when either constant is a
5960 multiple of the other, in which case we replace this with either an
5961 operation or CODE or TCODE.
5963 If we have an unsigned type that is not a sizetype, we cannot do
5964 this since it will change the result if the original computation
5966 if ((TYPE_OVERFLOW_UNDEFINED (ctype
)
5967 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5968 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5969 || (tcode
== MULT_EXPR
5970 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5971 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5972 && code
!= MULT_EXPR
)))
5974 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5976 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5977 *strict_overflow_p
= true;
5978 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5979 fold_convert (ctype
,
5980 const_binop (TRUNC_DIV_EXPR
,
5983 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
)))
5985 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5986 *strict_overflow_p
= true;
5987 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5988 fold_convert (ctype
,
5989 const_binop (TRUNC_DIV_EXPR
,
6002 /* Return a node which has the indicated constant VALUE (either 0 or
6003 1), and is of the indicated TYPE. */
6006 constant_boolean_node (int value
, tree type
)
6008 if (type
== integer_type_node
)
6009 return value
? integer_one_node
: integer_zero_node
;
6010 else if (type
== boolean_type_node
)
6011 return value
? boolean_true_node
: boolean_false_node
;
6013 return build_int_cst (type
, value
);
6017 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6018 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6019 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6020 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6021 COND is the first argument to CODE; otherwise (as in the example
6022 given here), it is the second argument. TYPE is the type of the
6023 original expression. Return NULL_TREE if no simplification is
6027 fold_binary_op_with_conditional_arg (location_t loc
,
6028 enum tree_code code
,
6029 tree type
, tree op0
, tree op1
,
6030 tree cond
, tree arg
, int cond_first_p
)
6032 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6033 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6034 tree test
, true_value
, false_value
;
6035 tree lhs
= NULL_TREE
;
6036 tree rhs
= NULL_TREE
;
6038 if (TREE_CODE (cond
) == COND_EXPR
)
6040 test
= TREE_OPERAND (cond
, 0);
6041 true_value
= TREE_OPERAND (cond
, 1);
6042 false_value
= TREE_OPERAND (cond
, 2);
6043 /* If this operand throws an expression, then it does not make
6044 sense to try to perform a logical or arithmetic operation
6046 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6048 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6053 tree testtype
= TREE_TYPE (cond
);
6055 true_value
= constant_boolean_node (true, testtype
);
6056 false_value
= constant_boolean_node (false, testtype
);
6059 /* This transformation is only worthwhile if we don't have to wrap ARG
6060 in a SAVE_EXPR and the operation can be simplified on at least one
6061 of the branches once its pushed inside the COND_EXPR. */
6062 if (!TREE_CONSTANT (arg
)
6063 && (TREE_SIDE_EFFECTS (arg
)
6064 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6067 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6070 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6072 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6074 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6078 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6080 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6082 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6085 /* Check that we have simplified at least one of the branches. */
6086 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6089 return fold_build3_loc (loc
, COND_EXPR
, type
, test
, lhs
, rhs
);
6093 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6095 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6096 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6097 ADDEND is the same as X.
6099 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6100 and finite. The problematic cases are when X is zero, and its mode
6101 has signed zeros. In the case of rounding towards -infinity,
6102 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6103 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6106 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6108 if (!real_zerop (addend
))
6111 /* Don't allow the fold with -fsignaling-nans. */
6112 if (HONOR_SNANS (TYPE_MODE (type
)))
6115 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6116 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6119 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6120 if (TREE_CODE (addend
) == REAL_CST
6121 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6124 /* The mode has signed zeros, and we have to honor their sign.
6125 In this situation, there is only one case we can return true for.
6126 X - 0 is the same as X unless rounding towards -infinity is
6128 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6131 /* Subroutine of fold() that checks comparisons of built-in math
6132 functions against real constants.
6134 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6135 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6136 is the type of the result and ARG0 and ARG1 are the operands of the
6137 comparison. ARG1 must be a TREE_REAL_CST.
6139 The function returns the constant folded tree if a simplification
6140 can be made, and NULL_TREE otherwise. */
6143 fold_mathfn_compare (location_t loc
,
6144 enum built_in_function fcode
, enum tree_code code
,
6145 tree type
, tree arg0
, tree arg1
)
6149 if (BUILTIN_SQRT_P (fcode
))
6151 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6152 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6154 c
= TREE_REAL_CST (arg1
);
6155 if (REAL_VALUE_NEGATIVE (c
))
6157 /* sqrt(x) < y is always false, if y is negative. */
6158 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6159 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6161 /* sqrt(x) > y is always true, if y is negative and we
6162 don't care about NaNs, i.e. negative values of x. */
6163 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6164 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6166 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6167 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6168 build_real (TREE_TYPE (arg
), dconst0
));
6170 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6174 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6175 real_convert (&c2
, mode
, &c2
);
6177 if (REAL_VALUE_ISINF (c2
))
6179 /* sqrt(x) > y is x == +Inf, when y is very large. */
6180 if (HONOR_INFINITIES (mode
))
6181 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6182 build_real (TREE_TYPE (arg
), c2
));
6184 /* sqrt(x) > y is always false, when y is very large
6185 and we don't care about infinities. */
6186 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6189 /* sqrt(x) > c is the same as x > c*c. */
6190 return fold_build2_loc (loc
, code
, type
, arg
,
6191 build_real (TREE_TYPE (arg
), c2
));
6193 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6197 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6198 real_convert (&c2
, mode
, &c2
);
6200 if (REAL_VALUE_ISINF (c2
))
6202 /* sqrt(x) < y is always true, when y is a very large
6203 value and we don't care about NaNs or Infinities. */
6204 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6205 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6207 /* sqrt(x) < y is x != +Inf when y is very large and we
6208 don't care about NaNs. */
6209 if (! HONOR_NANS (mode
))
6210 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6211 build_real (TREE_TYPE (arg
), c2
));
6213 /* sqrt(x) < y is x >= 0 when y is very large and we
6214 don't care about Infinities. */
6215 if (! HONOR_INFINITIES (mode
))
6216 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6217 build_real (TREE_TYPE (arg
), dconst0
));
6219 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6220 if (lang_hooks
.decls
.global_bindings_p () != 0
6221 || CONTAINS_PLACEHOLDER_P (arg
))
6224 arg
= save_expr (arg
);
6225 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6226 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6227 build_real (TREE_TYPE (arg
),
6229 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6230 build_real (TREE_TYPE (arg
),
6234 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6235 if (! HONOR_NANS (mode
))
6236 return fold_build2_loc (loc
, code
, type
, arg
,
6237 build_real (TREE_TYPE (arg
), c2
));
6239 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6240 if (lang_hooks
.decls
.global_bindings_p () == 0
6241 && ! CONTAINS_PLACEHOLDER_P (arg
))
6243 arg
= save_expr (arg
);
6244 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6245 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6246 build_real (TREE_TYPE (arg
),
6248 fold_build2_loc (loc
, code
, type
, arg
,
6249 build_real (TREE_TYPE (arg
),
6258 /* Subroutine of fold() that optimizes comparisons against Infinities,
6259 either +Inf or -Inf.
6261 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6262 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6263 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6265 The function returns the constant folded tree if a simplification
6266 can be made, and NULL_TREE otherwise. */
6269 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6270 tree arg0
, tree arg1
)
6272 enum machine_mode mode
;
6273 REAL_VALUE_TYPE max
;
6277 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6279 /* For negative infinity swap the sense of the comparison. */
6280 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6282 code
= swap_tree_comparison (code
);
6287 /* x > +Inf is always false, if with ignore sNANs. */
6288 if (HONOR_SNANS (mode
))
6290 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6293 /* x <= +Inf is always true, if we don't case about NaNs. */
6294 if (! HONOR_NANS (mode
))
6295 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6297 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6298 if (lang_hooks
.decls
.global_bindings_p () == 0
6299 && ! CONTAINS_PLACEHOLDER_P (arg0
))
6301 arg0
= save_expr (arg0
);
6302 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6308 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6309 real_maxval (&max
, neg
, mode
);
6310 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6311 arg0
, build_real (TREE_TYPE (arg0
), max
));
6314 /* x < +Inf is always equal to x <= DBL_MAX. */
6315 real_maxval (&max
, neg
, mode
);
6316 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6317 arg0
, build_real (TREE_TYPE (arg0
), max
));
6320 /* x != +Inf is always equal to !(x > DBL_MAX). */
6321 real_maxval (&max
, neg
, mode
);
6322 if (! HONOR_NANS (mode
))
6323 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6324 arg0
, build_real (TREE_TYPE (arg0
), max
));
6326 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6327 arg0
, build_real (TREE_TYPE (arg0
), max
));
6328 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6337 /* Subroutine of fold() that optimizes comparisons of a division by
6338 a nonzero integer constant against an integer constant, i.e.
6341 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6342 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6343 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6345 The function returns the constant folded tree if a simplification
6346 can be made, and NULL_TREE otherwise. */
6349 fold_div_compare (location_t loc
,
6350 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6352 tree prod
, tmp
, hi
, lo
;
6353 tree arg00
= TREE_OPERAND (arg0
, 0);
6354 tree arg01
= TREE_OPERAND (arg0
, 1);
6355 unsigned HOST_WIDE_INT lpart
;
6356 HOST_WIDE_INT hpart
;
6357 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6361 /* We have to do this the hard way to detect unsigned overflow.
6362 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6363 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
6364 TREE_INT_CST_HIGH (arg01
),
6365 TREE_INT_CST_LOW (arg1
),
6366 TREE_INT_CST_HIGH (arg1
),
6367 &lpart
, &hpart
, unsigned_p
);
6368 prod
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
6370 neg_overflow
= false;
6374 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6375 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6378 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6379 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
6380 TREE_INT_CST_HIGH (prod
),
6381 TREE_INT_CST_LOW (tmp
),
6382 TREE_INT_CST_HIGH (tmp
),
6383 &lpart
, &hpart
, unsigned_p
);
6384 hi
= force_fit_type_double (TREE_TYPE (arg00
), lpart
, hpart
,
6385 -1, overflow
| TREE_OVERFLOW (prod
));
6387 else if (tree_int_cst_sgn (arg01
) >= 0)
6389 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6390 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6391 switch (tree_int_cst_sgn (arg1
))
6394 neg_overflow
= true;
6395 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6400 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6405 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6415 /* A negative divisor reverses the relational operators. */
6416 code
= swap_tree_comparison (code
);
6418 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6419 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6420 switch (tree_int_cst_sgn (arg1
))
6423 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6428 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6433 neg_overflow
= true;
6434 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6446 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6447 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6448 if (TREE_OVERFLOW (hi
))
6449 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6450 if (TREE_OVERFLOW (lo
))
6451 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6452 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6455 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6456 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6457 if (TREE_OVERFLOW (hi
))
6458 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6459 if (TREE_OVERFLOW (lo
))
6460 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6461 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6464 if (TREE_OVERFLOW (lo
))
6466 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6467 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6469 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6472 if (TREE_OVERFLOW (hi
))
6474 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6475 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6477 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6480 if (TREE_OVERFLOW (hi
))
6482 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6483 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6485 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6488 if (TREE_OVERFLOW (lo
))
6490 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6491 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6493 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6503 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6504 equality/inequality test, then return a simplified form of the test
6505 using a sign testing. Otherwise return NULL. TYPE is the desired
6509 fold_single_bit_test_into_sign_test (location_t loc
,
6510 enum tree_code code
, tree arg0
, tree arg1
,
6513 /* If this is testing a single bit, we can optimize the test. */
6514 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6515 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6516 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6518 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6519 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6520 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6522 if (arg00
!= NULL_TREE
6523 /* This is only a win if casting to a signed type is cheap,
6524 i.e. when arg00's type is not a partial mode. */
6525 && TYPE_PRECISION (TREE_TYPE (arg00
))
6526 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6528 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6529 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6531 fold_convert_loc (loc
, stype
, arg00
),
6532 build_int_cst (stype
, 0));
6539 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6540 equality/inequality test, then return a simplified form of
6541 the test using shifts and logical operations. Otherwise return
6542 NULL. TYPE is the desired result type. */
6545 fold_single_bit_test (location_t loc
, enum tree_code code
,
6546 tree arg0
, tree arg1
, tree result_type
)
6548 /* If this is testing a single bit, we can optimize the test. */
6549 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6550 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6551 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6553 tree inner
= TREE_OPERAND (arg0
, 0);
6554 tree type
= TREE_TYPE (arg0
);
6555 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6556 enum machine_mode operand_mode
= TYPE_MODE (type
);
6558 tree signed_type
, unsigned_type
, intermediate_type
;
6561 /* First, see if we can fold the single bit test into a sign-bit
6563 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6568 /* Otherwise we have (A & C) != 0 where C is a single bit,
6569 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6570 Similarly for (A & C) == 0. */
6572 /* If INNER is a right shift of a constant and it plus BITNUM does
6573 not overflow, adjust BITNUM and INNER. */
6574 if (TREE_CODE (inner
) == RSHIFT_EXPR
6575 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6576 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6577 && bitnum
< TYPE_PRECISION (type
)
6578 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6579 bitnum
- TYPE_PRECISION (type
)))
6581 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6582 inner
= TREE_OPERAND (inner
, 0);
6585 /* If we are going to be able to omit the AND below, we must do our
6586 operations as unsigned. If we must use the AND, we have a choice.
6587 Normally unsigned is faster, but for some machines signed is. */
6588 #ifdef LOAD_EXTEND_OP
6589 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6590 && !flag_syntax_only
) ? 0 : 1;
6595 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6596 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6597 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6598 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6601 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6602 inner
, size_int (bitnum
));
6604 one
= build_int_cst (intermediate_type
, 1);
6606 if (code
== EQ_EXPR
)
6607 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6609 /* Put the AND last so it can combine with more things. */
6610 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6612 /* Make sure to return the proper type. */
6613 inner
= fold_convert_loc (loc
, result_type
, inner
);
6620 /* Check whether we are allowed to reorder operands arg0 and arg1,
6621 such that the evaluation of arg1 occurs before arg0. */
6624 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6626 if (! flag_evaluation_order
)
6628 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6630 return ! TREE_SIDE_EFFECTS (arg0
)
6631 && ! TREE_SIDE_EFFECTS (arg1
);
6634 /* Test whether it is preferable two swap two operands, ARG0 and
6635 ARG1, for example because ARG0 is an integer constant and ARG1
6636 isn't. If REORDER is true, only recommend swapping if we can
6637 evaluate the operands in reverse order. */
6640 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6642 STRIP_SIGN_NOPS (arg0
);
6643 STRIP_SIGN_NOPS (arg1
);
6645 if (TREE_CODE (arg1
) == INTEGER_CST
)
6647 if (TREE_CODE (arg0
) == INTEGER_CST
)
6650 if (TREE_CODE (arg1
) == REAL_CST
)
6652 if (TREE_CODE (arg0
) == REAL_CST
)
6655 if (TREE_CODE (arg1
) == FIXED_CST
)
6657 if (TREE_CODE (arg0
) == FIXED_CST
)
6660 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6662 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6665 if (TREE_CONSTANT (arg1
))
6667 if (TREE_CONSTANT (arg0
))
6670 if (optimize_function_for_size_p (cfun
))
6673 if (reorder
&& flag_evaluation_order
6674 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6677 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6678 for commutative and comparison operators. Ensuring a canonical
6679 form allows the optimizers to find additional redundancies without
6680 having to explicitly check for both orderings. */
6681 if (TREE_CODE (arg0
) == SSA_NAME
6682 && TREE_CODE (arg1
) == SSA_NAME
6683 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6686 /* Put SSA_NAMEs last. */
6687 if (TREE_CODE (arg1
) == SSA_NAME
)
6689 if (TREE_CODE (arg0
) == SSA_NAME
)
6692 /* Put variables last. */
6701 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6702 ARG0 is extended to a wider type. */
6705 fold_widened_comparison (location_t loc
, enum tree_code code
,
6706 tree type
, tree arg0
, tree arg1
)
6708 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6710 tree shorter_type
, outer_type
;
6714 if (arg0_unw
== arg0
)
6716 shorter_type
= TREE_TYPE (arg0_unw
);
6718 #ifdef HAVE_canonicalize_funcptr_for_compare
6719 /* Disable this optimization if we're casting a function pointer
6720 type on targets that require function pointer canonicalization. */
6721 if (HAVE_canonicalize_funcptr_for_compare
6722 && TREE_CODE (shorter_type
) == POINTER_TYPE
6723 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6727 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6730 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6732 /* If possible, express the comparison in the shorter mode. */
6733 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6734 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6735 && (TREE_TYPE (arg1_unw
) == shorter_type
6736 || ((TYPE_PRECISION (shorter_type
)
6737 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6738 && (TYPE_UNSIGNED (shorter_type
)
6739 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6740 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6741 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6742 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6743 && int_fits_type_p (arg1_unw
, shorter_type
))))
6744 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6745 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6747 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6748 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6749 || !int_fits_type_p (arg1_unw
, shorter_type
))
6752 /* If we are comparing with the integer that does not fit into the range
6753 of the shorter type, the result is known. */
6754 outer_type
= TREE_TYPE (arg1_unw
);
6755 min
= lower_bound_in_type (outer_type
, shorter_type
);
6756 max
= upper_bound_in_type (outer_type
, shorter_type
);
6758 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6760 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6767 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6772 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6778 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6780 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6785 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6787 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6796 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6797 ARG0 just the signedness is changed. */
6800 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6801 tree arg0
, tree arg1
)
6804 tree inner_type
, outer_type
;
6806 if (!CONVERT_EXPR_P (arg0
))
6809 outer_type
= TREE_TYPE (arg0
);
6810 arg0_inner
= TREE_OPERAND (arg0
, 0);
6811 inner_type
= TREE_TYPE (arg0_inner
);
6813 #ifdef HAVE_canonicalize_funcptr_for_compare
6814 /* Disable this optimization if we're casting a function pointer
6815 type on targets that require function pointer canonicalization. */
6816 if (HAVE_canonicalize_funcptr_for_compare
6817 && TREE_CODE (inner_type
) == POINTER_TYPE
6818 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6822 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6825 if (TREE_CODE (arg1
) != INTEGER_CST
6826 && !(CONVERT_EXPR_P (arg1
)
6827 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6830 if ((TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6831 || POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6836 if (TREE_CODE (arg1
) == INTEGER_CST
)
6837 arg1
= force_fit_type_double (inner_type
, TREE_INT_CST_LOW (arg1
),
6838 TREE_INT_CST_HIGH (arg1
), 0,
6839 TREE_OVERFLOW (arg1
));
6841 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6843 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6846 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6847 step of the array. Reconstructs s and delta in the case of s *
6848 delta being an integer constant (and thus already folded). ADDR is
6849 the address. MULT is the multiplicative expression. If the
6850 function succeeds, the new address expression is returned.
6851 Otherwise NULL_TREE is returned. LOC is the location of the
6852 resulting expression. */
6855 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6857 tree s
, delta
, step
;
6858 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6863 /* Strip the nops that might be added when converting op1 to sizetype. */
6866 /* Canonicalize op1 into a possibly non-constant delta
6867 and an INTEGER_CST s. */
6868 if (TREE_CODE (op1
) == MULT_EXPR
)
6870 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6875 if (TREE_CODE (arg0
) == INTEGER_CST
)
6880 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6888 else if (TREE_CODE (op1
) == INTEGER_CST
)
6895 /* Simulate we are delta * 1. */
6897 s
= integer_one_node
;
6900 for (;; ref
= TREE_OPERAND (ref
, 0))
6902 if (TREE_CODE (ref
) == ARRAY_REF
)
6906 /* Remember if this was a multi-dimensional array. */
6907 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6910 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6913 itype
= TREE_TYPE (domain
);
6915 step
= array_ref_element_size (ref
);
6916 if (TREE_CODE (step
) != INTEGER_CST
)
6921 if (! tree_int_cst_equal (step
, s
))
6926 /* Try if delta is a multiple of step. */
6927 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6933 /* Only fold here if we can verify we do not overflow one
6934 dimension of a multi-dimensional array. */
6939 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
6940 || !TYPE_MAX_VALUE (domain
)
6941 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6944 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6945 fold_convert_loc (loc
, itype
,
6946 TREE_OPERAND (ref
, 1)),
6947 fold_convert_loc (loc
, itype
, delta
));
6949 || TREE_CODE (tmp
) != INTEGER_CST
6950 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6959 if (!handled_component_p (ref
))
6963 /* We found the suitable array reference. So copy everything up to it,
6964 and replace the index. */
6966 pref
= TREE_OPERAND (addr
, 0);
6967 ret
= copy_node (pref
);
6968 SET_EXPR_LOCATION (ret
, loc
);
6973 pref
= TREE_OPERAND (pref
, 0);
6974 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6975 pos
= TREE_OPERAND (pos
, 0);
6978 TREE_OPERAND (pos
, 1) = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
6979 fold_convert_loc (loc
, itype
,
6980 TREE_OPERAND (pos
, 1)),
6981 fold_convert_loc (loc
, itype
, delta
));
6983 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6987 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6988 means A >= Y && A != MAX, but in this case we know that
6989 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6992 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6994 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6996 if (TREE_CODE (bound
) == LT_EXPR
)
6997 a
= TREE_OPERAND (bound
, 0);
6998 else if (TREE_CODE (bound
) == GT_EXPR
)
6999 a
= TREE_OPERAND (bound
, 1);
7003 typea
= TREE_TYPE (a
);
7004 if (!INTEGRAL_TYPE_P (typea
)
7005 && !POINTER_TYPE_P (typea
))
7008 if (TREE_CODE (ineq
) == LT_EXPR
)
7010 a1
= TREE_OPERAND (ineq
, 1);
7011 y
= TREE_OPERAND (ineq
, 0);
7013 else if (TREE_CODE (ineq
) == GT_EXPR
)
7015 a1
= TREE_OPERAND (ineq
, 0);
7016 y
= TREE_OPERAND (ineq
, 1);
7021 if (TREE_TYPE (a1
) != typea
)
7024 if (POINTER_TYPE_P (typea
))
7026 /* Convert the pointer types into integer before taking the difference. */
7027 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7028 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7029 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7032 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7034 if (!diff
|| !integer_onep (diff
))
7037 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7040 /* Fold a sum or difference of at least one multiplication.
7041 Returns the folded tree or NULL if no simplification could be made. */
7044 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7045 tree arg0
, tree arg1
)
7047 tree arg00
, arg01
, arg10
, arg11
;
7048 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7050 /* (A * C) +- (B * C) -> (A+-B) * C.
7051 (A * C) +- A -> A * (C+-1).
7052 We are most concerned about the case where C is a constant,
7053 but other combinations show up during loop reduction. Since
7054 it is not difficult, try all four possibilities. */
7056 if (TREE_CODE (arg0
) == MULT_EXPR
)
7058 arg00
= TREE_OPERAND (arg0
, 0);
7059 arg01
= TREE_OPERAND (arg0
, 1);
7061 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7063 arg00
= build_one_cst (type
);
7068 /* We cannot generate constant 1 for fract. */
7069 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7072 arg01
= build_one_cst (type
);
7074 if (TREE_CODE (arg1
) == MULT_EXPR
)
7076 arg10
= TREE_OPERAND (arg1
, 0);
7077 arg11
= TREE_OPERAND (arg1
, 1);
7079 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7081 arg10
= build_one_cst (type
);
7082 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7083 the purpose of this canonicalization. */
7084 if (TREE_INT_CST_HIGH (arg1
) == -1
7085 && negate_expr_p (arg1
)
7086 && code
== PLUS_EXPR
)
7088 arg11
= negate_expr (arg1
);
7096 /* We cannot generate constant 1 for fract. */
7097 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7100 arg11
= build_one_cst (type
);
7104 if (operand_equal_p (arg01
, arg11
, 0))
7105 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7106 else if (operand_equal_p (arg00
, arg10
, 0))
7107 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7108 else if (operand_equal_p (arg00
, arg11
, 0))
7109 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7110 else if (operand_equal_p (arg01
, arg10
, 0))
7111 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7113 /* No identical multiplicands; see if we can find a common
7114 power-of-two factor in non-power-of-two multiplies. This
7115 can help in multi-dimensional array access. */
7116 else if (host_integerp (arg01
, 0)
7117 && host_integerp (arg11
, 0))
7119 HOST_WIDE_INT int01
, int11
, tmp
;
7122 int01
= TREE_INT_CST_LOW (arg01
);
7123 int11
= TREE_INT_CST_LOW (arg11
);
7125 /* Move min of absolute values to int11. */
7126 if ((int01
>= 0 ? int01
: -int01
)
7127 < (int11
>= 0 ? int11
: -int11
))
7129 tmp
= int01
, int01
= int11
, int11
= tmp
;
7130 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7137 if (exact_log2 (abs (int11
)) > 0 && int01
% int11
== 0
7138 /* The remainder should not be a constant, otherwise we
7139 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7140 increased the number of multiplications necessary. */
7141 && TREE_CODE (arg10
) != INTEGER_CST
)
7143 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7144 build_int_cst (TREE_TYPE (arg00
),
7149 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7154 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7155 fold_build2_loc (loc
, code
, type
,
7156 fold_convert_loc (loc
, type
, alt0
),
7157 fold_convert_loc (loc
, type
, alt1
)),
7158 fold_convert_loc (loc
, type
, same
));
7163 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7164 specified by EXPR into the buffer PTR of length LEN bytes.
7165 Return the number of bytes placed in the buffer, or zero
7169 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7171 tree type
= TREE_TYPE (expr
);
7172 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7173 int byte
, offset
, word
, words
;
7174 unsigned char value
;
7176 if (total_bytes
> len
)
7178 words
= total_bytes
/ UNITS_PER_WORD
;
7180 for (byte
= 0; byte
< total_bytes
; byte
++)
7182 int bitpos
= byte
* BITS_PER_UNIT
;
7183 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7184 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7186 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7187 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7189 if (total_bytes
> UNITS_PER_WORD
)
7191 word
= byte
/ UNITS_PER_WORD
;
7192 if (WORDS_BIG_ENDIAN
)
7193 word
= (words
- 1) - word
;
7194 offset
= word
* UNITS_PER_WORD
;
7195 if (BYTES_BIG_ENDIAN
)
7196 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7198 offset
+= byte
% UNITS_PER_WORD
;
7201 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7202 ptr
[offset
] = value
;
7208 /* Subroutine of native_encode_expr. Encode the REAL_CST
7209 specified by EXPR into the buffer PTR of length LEN bytes.
7210 Return the number of bytes placed in the buffer, or zero
7214 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7216 tree type
= TREE_TYPE (expr
);
7217 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7218 int byte
, offset
, word
, words
, bitpos
;
7219 unsigned char value
;
7221 /* There are always 32 bits in each long, no matter the size of
7222 the hosts long. We handle floating point representations with
7226 if (total_bytes
> len
)
7228 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7230 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7232 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7233 bitpos
+= BITS_PER_UNIT
)
7235 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7236 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7238 if (UNITS_PER_WORD
< 4)
7240 word
= byte
/ UNITS_PER_WORD
;
7241 if (WORDS_BIG_ENDIAN
)
7242 word
= (words
- 1) - word
;
7243 offset
= word
* UNITS_PER_WORD
;
7244 if (BYTES_BIG_ENDIAN
)
7245 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7247 offset
+= byte
% UNITS_PER_WORD
;
7250 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7251 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7256 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7257 specified by EXPR into the buffer PTR of length LEN bytes.
7258 Return the number of bytes placed in the buffer, or zero
7262 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7267 part
= TREE_REALPART (expr
);
7268 rsize
= native_encode_expr (part
, ptr
, len
);
7271 part
= TREE_IMAGPART (expr
);
7272 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7275 return rsize
+ isize
;
7279 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7280 specified by EXPR into the buffer PTR of length LEN bytes.
7281 Return the number of bytes placed in the buffer, or zero
7285 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7287 int i
, size
, offset
, count
;
7288 tree itype
, elem
, elements
;
7291 elements
= TREE_VECTOR_CST_ELTS (expr
);
7292 count
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
));
7293 itype
= TREE_TYPE (TREE_TYPE (expr
));
7294 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7295 for (i
= 0; i
< count
; i
++)
7299 elem
= TREE_VALUE (elements
);
7300 elements
= TREE_CHAIN (elements
);
7307 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7312 if (offset
+ size
> len
)
7314 memset (ptr
+offset
, 0, size
);
7322 /* Subroutine of native_encode_expr. Encode the STRING_CST
7323 specified by EXPR into the buffer PTR of length LEN bytes.
7324 Return the number of bytes placed in the buffer, or zero
7328 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
7330 tree type
= TREE_TYPE (expr
);
7331 HOST_WIDE_INT total_bytes
;
7333 if (TREE_CODE (type
) != ARRAY_TYPE
7334 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7335 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7336 || !host_integerp (TYPE_SIZE_UNIT (type
), 0))
7338 total_bytes
= tree_low_cst (TYPE_SIZE_UNIT (type
), 0);
7339 if (total_bytes
> len
)
7341 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
7343 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
7344 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
7345 total_bytes
- TREE_STRING_LENGTH (expr
));
7348 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
7353 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7354 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7355 buffer PTR of length LEN bytes. Return the number of bytes
7356 placed in the buffer, or zero upon failure. */
7359 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7361 switch (TREE_CODE (expr
))
7364 return native_encode_int (expr
, ptr
, len
);
7367 return native_encode_real (expr
, ptr
, len
);
7370 return native_encode_complex (expr
, ptr
, len
);
7373 return native_encode_vector (expr
, ptr
, len
);
7376 return native_encode_string (expr
, ptr
, len
);
7384 /* Subroutine of native_interpret_expr. Interpret the contents of
7385 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7386 If the buffer cannot be interpreted, return NULL_TREE. */
7389 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7391 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7392 int byte
, offset
, word
, words
;
7393 unsigned char value
;
7396 if (total_bytes
> len
)
7398 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
7401 result
= double_int_zero
;
7402 words
= total_bytes
/ UNITS_PER_WORD
;
7404 for (byte
= 0; byte
< total_bytes
; byte
++)
7406 int bitpos
= byte
* BITS_PER_UNIT
;
7407 if (total_bytes
> UNITS_PER_WORD
)
7409 word
= byte
/ UNITS_PER_WORD
;
7410 if (WORDS_BIG_ENDIAN
)
7411 word
= (words
- 1) - word
;
7412 offset
= word
* UNITS_PER_WORD
;
7413 if (BYTES_BIG_ENDIAN
)
7414 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7416 offset
+= byte
% UNITS_PER_WORD
;
7419 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7420 value
= ptr
[offset
];
7422 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7423 result
.low
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7425 result
.high
|= (unsigned HOST_WIDE_INT
) value
7426 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7429 return double_int_to_tree (type
, result
);
7433 /* Subroutine of native_interpret_expr. Interpret the contents of
7434 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7435 If the buffer cannot be interpreted, return NULL_TREE. */
7438 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7440 enum machine_mode mode
= TYPE_MODE (type
);
7441 int total_bytes
= GET_MODE_SIZE (mode
);
7442 int byte
, offset
, word
, words
, bitpos
;
7443 unsigned char value
;
7444 /* There are always 32 bits in each long, no matter the size of
7445 the hosts long. We handle floating point representations with
7450 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7451 if (total_bytes
> len
|| total_bytes
> 24)
7453 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7455 memset (tmp
, 0, sizeof (tmp
));
7456 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7457 bitpos
+= BITS_PER_UNIT
)
7459 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7460 if (UNITS_PER_WORD
< 4)
7462 word
= byte
/ UNITS_PER_WORD
;
7463 if (WORDS_BIG_ENDIAN
)
7464 word
= (words
- 1) - word
;
7465 offset
= word
* UNITS_PER_WORD
;
7466 if (BYTES_BIG_ENDIAN
)
7467 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7469 offset
+= byte
% UNITS_PER_WORD
;
7472 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7473 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7475 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7478 real_from_target (&r
, tmp
, mode
);
7479 return build_real (type
, r
);
7483 /* Subroutine of native_interpret_expr. Interpret the contents of
7484 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7485 If the buffer cannot be interpreted, return NULL_TREE. */
7488 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7490 tree etype
, rpart
, ipart
;
7493 etype
= TREE_TYPE (type
);
7494 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7497 rpart
= native_interpret_expr (etype
, ptr
, size
);
7500 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7503 return build_complex (type
, rpart
, ipart
);
7507 /* Subroutine of native_interpret_expr. Interpret the contents of
7508 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7509 If the buffer cannot be interpreted, return NULL_TREE. */
7512 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7514 tree etype
, elem
, elements
;
7517 etype
= TREE_TYPE (type
);
7518 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7519 count
= TYPE_VECTOR_SUBPARTS (type
);
7520 if (size
* count
> len
)
7523 elements
= NULL_TREE
;
7524 for (i
= count
- 1; i
>= 0; i
--)
7526 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7529 elements
= tree_cons (NULL_TREE
, elem
, elements
);
7531 return build_vector (type
, elements
);
7535 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7536 the buffer PTR of length LEN as a constant of type TYPE. For
7537 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7538 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7539 return NULL_TREE. */
7542 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7544 switch (TREE_CODE (type
))
7549 return native_interpret_int (type
, ptr
, len
);
7552 return native_interpret_real (type
, ptr
, len
);
7555 return native_interpret_complex (type
, ptr
, len
);
7558 return native_interpret_vector (type
, ptr
, len
);
7566 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7567 TYPE at compile-time. If we're unable to perform the conversion
7568 return NULL_TREE. */
7571 fold_view_convert_expr (tree type
, tree expr
)
7573 /* We support up to 512-bit values (for V8DFmode). */
7574 unsigned char buffer
[64];
7577 /* Check that the host and target are sane. */
7578 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7581 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7585 return native_interpret_expr (type
, buffer
, len
);
7588 /* Build an expression for the address of T. Folds away INDIRECT_REF
7589 to avoid confusing the gimplify process. */
7592 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7594 /* The size of the object is not relevant when talking about its address. */
7595 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7596 t
= TREE_OPERAND (t
, 0);
7598 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7599 if (TREE_CODE (t
) == INDIRECT_REF
7600 || TREE_CODE (t
) == MISALIGNED_INDIRECT_REF
)
7602 t
= TREE_OPERAND (t
, 0);
7604 if (TREE_TYPE (t
) != ptrtype
)
7606 t
= build1 (NOP_EXPR
, ptrtype
, t
);
7607 SET_EXPR_LOCATION (t
, loc
);
7610 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7612 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7614 if (TREE_TYPE (t
) != ptrtype
)
7615 t
= fold_convert_loc (loc
, ptrtype
, t
);
7619 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
7620 SET_EXPR_LOCATION (t
, loc
);
7626 /* Build an expression for the address of T. */
7629 build_fold_addr_expr_loc (location_t loc
, tree t
)
7631 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7633 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7636 /* Fold a unary expression of code CODE and type TYPE with operand
7637 OP0. Return the folded expression if folding is successful.
7638 Otherwise, return NULL_TREE. */
7641 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7645 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7647 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7648 && TREE_CODE_LENGTH (code
) == 1);
7653 if (CONVERT_EXPR_CODE_P (code
)
7654 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
)
7656 /* Don't use STRIP_NOPS, because signedness of argument type
7658 STRIP_SIGN_NOPS (arg0
);
7662 /* Strip any conversions that don't change the mode. This
7663 is safe for every expression, except for a comparison
7664 expression because its signedness is derived from its
7667 Note that this is done as an internal manipulation within
7668 the constant folder, in order to find the simplest
7669 representation of the arguments so that their form can be
7670 studied. In any cases, the appropriate type conversions
7671 should be put back in the tree that will get out of the
7677 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7679 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7680 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7681 fold_build1_loc (loc
, code
, type
,
7682 fold_convert_loc (loc
, TREE_TYPE (op0
),
7683 TREE_OPERAND (arg0
, 1))));
7684 else if (TREE_CODE (arg0
) == COND_EXPR
)
7686 tree arg01
= TREE_OPERAND (arg0
, 1);
7687 tree arg02
= TREE_OPERAND (arg0
, 2);
7688 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7689 arg01
= fold_build1_loc (loc
, code
, type
,
7690 fold_convert_loc (loc
,
7691 TREE_TYPE (op0
), arg01
));
7692 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7693 arg02
= fold_build1_loc (loc
, code
, type
,
7694 fold_convert_loc (loc
,
7695 TREE_TYPE (op0
), arg02
));
7696 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7699 /* If this was a conversion, and all we did was to move into
7700 inside the COND_EXPR, bring it back out. But leave it if
7701 it is a conversion from integer to integer and the
7702 result precision is no wider than a word since such a
7703 conversion is cheap and may be optimized away by combine,
7704 while it couldn't if it were outside the COND_EXPR. Then return
7705 so we don't get into an infinite recursion loop taking the
7706 conversion out and then back in. */
7708 if ((CONVERT_EXPR_CODE_P (code
)
7709 || code
== NON_LVALUE_EXPR
)
7710 && TREE_CODE (tem
) == COND_EXPR
7711 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7712 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7713 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7714 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7715 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7716 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7717 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7719 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7720 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7721 || flag_syntax_only
))
7723 tem
= build1 (code
, type
,
7725 TREE_TYPE (TREE_OPERAND
7726 (TREE_OPERAND (tem
, 1), 0)),
7727 TREE_OPERAND (tem
, 0),
7728 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7729 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
7730 SET_EXPR_LOCATION (tem
, loc
);
7734 else if (COMPARISON_CLASS_P (arg0
))
7736 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7738 arg0
= copy_node (arg0
);
7739 TREE_TYPE (arg0
) = type
;
7742 else if (TREE_CODE (type
) != INTEGER_TYPE
)
7743 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
,
7744 fold_build1_loc (loc
, code
, type
,
7746 fold_build1_loc (loc
, code
, type
,
7747 integer_zero_node
));
7754 /* Re-association barriers around constants and other re-association
7755 barriers can be removed. */
7756 if (CONSTANT_CLASS_P (op0
)
7757 || TREE_CODE (op0
) == PAREN_EXPR
)
7758 return fold_convert_loc (loc
, type
, op0
);
7763 case FIX_TRUNC_EXPR
:
7764 if (TREE_TYPE (op0
) == type
)
7767 /* If we have (type) (a CMP b) and type is an integral type, return
7768 new expression involving the new type. */
7769 if (COMPARISON_CLASS_P (op0
) && INTEGRAL_TYPE_P (type
))
7770 return fold_build2_loc (loc
, TREE_CODE (op0
), type
, TREE_OPERAND (op0
, 0),
7771 TREE_OPERAND (op0
, 1));
7773 /* Handle cases of two conversions in a row. */
7774 if (CONVERT_EXPR_P (op0
))
7776 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7777 tree inter_type
= TREE_TYPE (op0
);
7778 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7779 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7780 int inside_float
= FLOAT_TYPE_P (inside_type
);
7781 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7782 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7783 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7784 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7785 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7786 int inter_float
= FLOAT_TYPE_P (inter_type
);
7787 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7788 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7789 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7790 int final_int
= INTEGRAL_TYPE_P (type
);
7791 int final_ptr
= POINTER_TYPE_P (type
);
7792 int final_float
= FLOAT_TYPE_P (type
);
7793 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7794 unsigned int final_prec
= TYPE_PRECISION (type
);
7795 int final_unsignedp
= TYPE_UNSIGNED (type
);
7797 /* In addition to the cases of two conversions in a row
7798 handled below, if we are converting something to its own
7799 type via an object of identical or wider precision, neither
7800 conversion is needed. */
7801 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7802 && (((inter_int
|| inter_ptr
) && final_int
)
7803 || (inter_float
&& final_float
))
7804 && inter_prec
>= final_prec
)
7805 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7807 /* Likewise, if the intermediate and initial types are either both
7808 float or both integer, we don't need the middle conversion if the
7809 former is wider than the latter and doesn't change the signedness
7810 (for integers). Avoid this if the final type is a pointer since
7811 then we sometimes need the middle conversion. Likewise if the
7812 final type has a precision not equal to the size of its mode. */
7813 if (((inter_int
&& inside_int
)
7814 || (inter_float
&& inside_float
)
7815 || (inter_vec
&& inside_vec
))
7816 && inter_prec
>= inside_prec
7817 && (inter_float
|| inter_vec
7818 || inter_unsignedp
== inside_unsignedp
)
7819 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7820 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7822 && (! final_vec
|| inter_prec
== inside_prec
))
7823 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7825 /* If we have a sign-extension of a zero-extended value, we can
7826 replace that by a single zero-extension. */
7827 if (inside_int
&& inter_int
&& final_int
7828 && inside_prec
< inter_prec
&& inter_prec
< final_prec
7829 && inside_unsignedp
&& !inter_unsignedp
)
7830 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7832 /* Two conversions in a row are not needed unless:
7833 - some conversion is floating-point (overstrict for now), or
7834 - some conversion is a vector (overstrict for now), or
7835 - the intermediate type is narrower than both initial and
7837 - the intermediate type and innermost type differ in signedness,
7838 and the outermost type is wider than the intermediate, or
7839 - the initial type is a pointer type and the precisions of the
7840 intermediate and final types differ, or
7841 - the final type is a pointer type and the precisions of the
7842 initial and intermediate types differ. */
7843 if (! inside_float
&& ! inter_float
&& ! final_float
7844 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7845 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7846 && ! (inside_int
&& inter_int
7847 && inter_unsignedp
!= inside_unsignedp
7848 && inter_prec
< final_prec
)
7849 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7850 == (final_unsignedp
&& final_prec
> inter_prec
))
7851 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7852 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7853 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7854 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7855 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7858 /* Handle (T *)&A.B.C for A being of type T and B and C
7859 living at offset zero. This occurs frequently in
7860 C++ upcasting and then accessing the base. */
7861 if (TREE_CODE (op0
) == ADDR_EXPR
7862 && POINTER_TYPE_P (type
)
7863 && handled_component_p (TREE_OPERAND (op0
, 0)))
7865 HOST_WIDE_INT bitsize
, bitpos
;
7867 enum machine_mode mode
;
7868 int unsignedp
, volatilep
;
7869 tree base
= TREE_OPERAND (op0
, 0);
7870 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7871 &mode
, &unsignedp
, &volatilep
, false);
7872 /* If the reference was to a (constant) zero offset, we can use
7873 the address of the base if it has the same base type
7874 as the result type and the pointer type is unqualified. */
7875 if (! offset
&& bitpos
== 0
7876 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7877 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7878 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7879 return fold_convert_loc (loc
, type
,
7880 build_fold_addr_expr_loc (loc
, base
));
7883 if (TREE_CODE (op0
) == MODIFY_EXPR
7884 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7885 /* Detect assigning a bitfield. */
7886 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7888 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7890 /* Don't leave an assignment inside a conversion
7891 unless assigning a bitfield. */
7892 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7893 /* First do the assignment, then return converted constant. */
7894 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7895 TREE_NO_WARNING (tem
) = 1;
7896 TREE_USED (tem
) = 1;
7897 SET_EXPR_LOCATION (tem
, loc
);
7901 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7902 constants (if x has signed type, the sign bit cannot be set
7903 in c). This folds extension into the BIT_AND_EXPR.
7904 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7905 very likely don't have maximal range for their precision and this
7906 transformation effectively doesn't preserve non-maximal ranges. */
7907 if (TREE_CODE (type
) == INTEGER_TYPE
7908 && TREE_CODE (op0
) == BIT_AND_EXPR
7909 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7911 tree and_expr
= op0
;
7912 tree and0
= TREE_OPERAND (and_expr
, 0);
7913 tree and1
= TREE_OPERAND (and_expr
, 1);
7916 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7917 || (TYPE_PRECISION (type
)
7918 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7920 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7921 <= HOST_BITS_PER_WIDE_INT
7922 && host_integerp (and1
, 1))
7924 unsigned HOST_WIDE_INT cst
;
7926 cst
= tree_low_cst (and1
, 1);
7927 cst
&= (HOST_WIDE_INT
) -1
7928 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7929 change
= (cst
== 0);
7930 #ifdef LOAD_EXTEND_OP
7932 && !flag_syntax_only
7933 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7936 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7937 and0
= fold_convert_loc (loc
, uns
, and0
);
7938 and1
= fold_convert_loc (loc
, uns
, and1
);
7944 tem
= force_fit_type_double (type
, TREE_INT_CST_LOW (and1
),
7945 TREE_INT_CST_HIGH (and1
), 0,
7946 TREE_OVERFLOW (and1
));
7947 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7948 fold_convert_loc (loc
, type
, and0
), tem
);
7952 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7953 when one of the new casts will fold away. Conservatively we assume
7954 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7955 if (POINTER_TYPE_P (type
)
7956 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7957 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7958 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7959 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7961 tree arg00
= TREE_OPERAND (arg0
, 0);
7962 tree arg01
= TREE_OPERAND (arg0
, 1);
7964 return fold_build2_loc (loc
,
7965 TREE_CODE (arg0
), type
,
7966 fold_convert_loc (loc
, type
, arg00
),
7967 fold_convert_loc (loc
, sizetype
, arg01
));
7970 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7971 of the same precision, and X is an integer type not narrower than
7972 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7973 if (INTEGRAL_TYPE_P (type
)
7974 && TREE_CODE (op0
) == BIT_NOT_EXPR
7975 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7976 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7977 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7979 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7980 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7981 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7982 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7983 fold_convert_loc (loc
, type
, tem
));
7986 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7987 type of X and Y (integer types only). */
7988 if (INTEGRAL_TYPE_P (type
)
7989 && TREE_CODE (op0
) == MULT_EXPR
7990 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7991 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7993 /* Be careful not to introduce new overflows. */
7995 if (TYPE_OVERFLOW_WRAPS (type
))
7998 mult_type
= unsigned_type_for (type
);
8000 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8002 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8003 fold_convert_loc (loc
, mult_type
,
8004 TREE_OPERAND (op0
, 0)),
8005 fold_convert_loc (loc
, mult_type
,
8006 TREE_OPERAND (op0
, 1)));
8007 return fold_convert_loc (loc
, type
, tem
);
8011 tem
= fold_convert_const (code
, type
, op0
);
8012 return tem
? tem
: NULL_TREE
;
8014 case ADDR_SPACE_CONVERT_EXPR
:
8015 if (integer_zerop (arg0
))
8016 return fold_convert_const (code
, type
, arg0
);
8019 case FIXED_CONVERT_EXPR
:
8020 tem
= fold_convert_const (code
, type
, arg0
);
8021 return tem
? tem
: NULL_TREE
;
8023 case VIEW_CONVERT_EXPR
:
8024 if (TREE_TYPE (op0
) == type
)
8026 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8027 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8028 type
, TREE_OPERAND (op0
, 0));
8030 /* For integral conversions with the same precision or pointer
8031 conversions use a NOP_EXPR instead. */
8032 if ((INTEGRAL_TYPE_P (type
)
8033 || POINTER_TYPE_P (type
))
8034 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8035 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8036 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8037 return fold_convert_loc (loc
, type
, op0
);
8039 /* Strip inner integral conversions that do not change the precision. */
8040 if (CONVERT_EXPR_P (op0
)
8041 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8042 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8043 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8044 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8045 && (TYPE_PRECISION (TREE_TYPE (op0
))
8046 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8047 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8048 type
, TREE_OPERAND (op0
, 0));
8050 return fold_view_convert_expr (type
, op0
);
8053 tem
= fold_negate_expr (loc
, arg0
);
8055 return fold_convert_loc (loc
, type
, tem
);
8059 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8060 return fold_abs_const (arg0
, type
);
8061 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8062 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8063 /* Convert fabs((double)float) into (double)fabsf(float). */
8064 else if (TREE_CODE (arg0
) == NOP_EXPR
8065 && TREE_CODE (type
) == REAL_TYPE
)
8067 tree targ0
= strip_float_extensions (arg0
);
8069 return fold_convert_loc (loc
, type
,
8070 fold_build1_loc (loc
, ABS_EXPR
,
8074 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8075 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8077 else if (tree_expr_nonnegative_p (arg0
))
8080 /* Strip sign ops from argument. */
8081 if (TREE_CODE (type
) == REAL_TYPE
)
8083 tem
= fold_strip_sign_ops (arg0
);
8085 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8086 fold_convert_loc (loc
, type
, tem
));
8091 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8092 return fold_convert_loc (loc
, type
, arg0
);
8093 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8095 tree itype
= TREE_TYPE (type
);
8096 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8097 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8098 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8099 negate_expr (ipart
));
8101 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8103 tree itype
= TREE_TYPE (type
);
8104 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8105 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8106 return build_complex (type
, rpart
, negate_expr (ipart
));
8108 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8109 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8113 if (TREE_CODE (arg0
) == INTEGER_CST
)
8114 return fold_not_const (arg0
, type
);
8115 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8116 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8117 /* Convert ~ (-A) to A - 1. */
8118 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8119 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8120 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8121 build_int_cst (type
, 1));
8122 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8123 else if (INTEGRAL_TYPE_P (type
)
8124 && ((TREE_CODE (arg0
) == MINUS_EXPR
8125 && integer_onep (TREE_OPERAND (arg0
, 1)))
8126 || (TREE_CODE (arg0
) == PLUS_EXPR
8127 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8128 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8129 fold_convert_loc (loc
, type
,
8130 TREE_OPERAND (arg0
, 0)));
8131 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8132 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8133 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8134 fold_convert_loc (loc
, type
,
8135 TREE_OPERAND (arg0
, 0)))))
8136 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8137 fold_convert_loc (loc
, type
,
8138 TREE_OPERAND (arg0
, 1)));
8139 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8140 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8141 fold_convert_loc (loc
, type
,
8142 TREE_OPERAND (arg0
, 1)))))
8143 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8144 fold_convert_loc (loc
, type
,
8145 TREE_OPERAND (arg0
, 0)), tem
);
8146 /* Perform BIT_NOT_EXPR on each element individually. */
8147 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8149 tree elements
= TREE_VECTOR_CST_ELTS (arg0
), elem
, list
= NULL_TREE
;
8150 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
8152 for (i
= 0; i
< count
; i
++)
8156 elem
= TREE_VALUE (elements
);
8157 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8158 if (elem
== NULL_TREE
)
8160 elements
= TREE_CHAIN (elements
);
8163 elem
= build_int_cst (TREE_TYPE (type
), -1);
8164 list
= tree_cons (NULL_TREE
, elem
, list
);
8167 return build_vector (type
, nreverse (list
));
8172 case TRUTH_NOT_EXPR
:
8173 /* The argument to invert_truthvalue must have Boolean type. */
8174 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8175 arg0
= fold_convert_loc (loc
, boolean_type_node
, arg0
);
8177 /* Note that the operand of this must be an int
8178 and its values must be 0 or 1.
8179 ("true" is a fixed value perhaps depending on the language,
8180 but we don't handle values other than 1 correctly yet.) */
8181 tem
= fold_truth_not_expr (loc
, arg0
);
8184 return fold_convert_loc (loc
, type
, tem
);
8187 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8188 return fold_convert_loc (loc
, type
, arg0
);
8189 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8190 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8191 TREE_OPERAND (arg0
, 1));
8192 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8193 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8194 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8196 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8197 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8198 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8199 TREE_OPERAND (arg0
, 0)),
8200 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8201 TREE_OPERAND (arg0
, 1)));
8202 return fold_convert_loc (loc
, type
, tem
);
8204 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8206 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8207 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8208 TREE_OPERAND (arg0
, 0));
8209 return fold_convert_loc (loc
, type
, tem
);
8211 if (TREE_CODE (arg0
) == CALL_EXPR
)
8213 tree fn
= get_callee_fndecl (arg0
);
8214 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8215 switch (DECL_FUNCTION_CODE (fn
))
8217 CASE_FLT_FN (BUILT_IN_CEXPI
):
8218 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8220 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8230 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8231 return fold_convert_loc (loc
, type
, integer_zero_node
);
8232 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8233 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8234 TREE_OPERAND (arg0
, 0));
8235 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8236 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8237 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8239 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8240 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8241 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8242 TREE_OPERAND (arg0
, 0)),
8243 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8244 TREE_OPERAND (arg0
, 1)));
8245 return fold_convert_loc (loc
, type
, tem
);
8247 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8249 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8250 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8251 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8253 if (TREE_CODE (arg0
) == CALL_EXPR
)
8255 tree fn
= get_callee_fndecl (arg0
);
8256 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8257 switch (DECL_FUNCTION_CODE (fn
))
8259 CASE_FLT_FN (BUILT_IN_CEXPI
):
8260 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8262 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8272 /* Fold *&X to X if X is an lvalue. */
8273 if (TREE_CODE (op0
) == ADDR_EXPR
)
8275 tree op00
= TREE_OPERAND (op0
, 0);
8276 if ((TREE_CODE (op00
) == VAR_DECL
8277 || TREE_CODE (op00
) == PARM_DECL
8278 || TREE_CODE (op00
) == RESULT_DECL
)
8279 && !TREE_READONLY (op00
))
8286 } /* switch (code) */
8290 /* If the operation was a conversion do _not_ mark a resulting constant
8291 with TREE_OVERFLOW if the original constant was not. These conversions
8292 have implementation defined behavior and retaining the TREE_OVERFLOW
8293 flag here would confuse later passes such as VRP. */
8295 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8296 tree type
, tree op0
)
8298 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8300 && TREE_CODE (res
) == INTEGER_CST
8301 && TREE_CODE (op0
) == INTEGER_CST
8302 && CONVERT_EXPR_CODE_P (code
))
8303 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8308 /* Fold a binary expression of code CODE and type TYPE with operands
8309 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8310 Return the folded expression if folding is successful. Otherwise,
8311 return NULL_TREE. */
8314 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8316 enum tree_code compl_code
;
8318 if (code
== MIN_EXPR
)
8319 compl_code
= MAX_EXPR
;
8320 else if (code
== MAX_EXPR
)
8321 compl_code
= MIN_EXPR
;
8325 /* MIN (MAX (a, b), b) == b. */
8326 if (TREE_CODE (op0
) == compl_code
8327 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8328 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8330 /* MIN (MAX (b, a), b) == b. */
8331 if (TREE_CODE (op0
) == compl_code
8332 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8333 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8334 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8336 /* MIN (a, MAX (a, b)) == a. */
8337 if (TREE_CODE (op1
) == compl_code
8338 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8339 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8340 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8342 /* MIN (a, MAX (b, a)) == a. */
8343 if (TREE_CODE (op1
) == compl_code
8344 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8345 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8346 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8351 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8352 by changing CODE to reduce the magnitude of constants involved in
8353 ARG0 of the comparison.
8354 Returns a canonicalized comparison tree if a simplification was
8355 possible, otherwise returns NULL_TREE.
8356 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8357 valid if signed overflow is undefined. */
8360 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8361 tree arg0
, tree arg1
,
8362 bool *strict_overflow_p
)
8364 enum tree_code code0
= TREE_CODE (arg0
);
8365 tree t
, cst0
= NULL_TREE
;
8369 /* Match A +- CST code arg1 and CST code arg1. We can change the
8370 first form only if overflow is undefined. */
8371 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8372 /* In principle pointers also have undefined overflow behavior,
8373 but that causes problems elsewhere. */
8374 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8375 && (code0
== MINUS_EXPR
8376 || code0
== PLUS_EXPR
)
8377 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8378 || code0
== INTEGER_CST
))
8381 /* Identify the constant in arg0 and its sign. */
8382 if (code0
== INTEGER_CST
)
8385 cst0
= TREE_OPERAND (arg0
, 1);
8386 sgn0
= tree_int_cst_sgn (cst0
);
8388 /* Overflowed constants and zero will cause problems. */
8389 if (integer_zerop (cst0
)
8390 || TREE_OVERFLOW (cst0
))
8393 /* See if we can reduce the magnitude of the constant in
8394 arg0 by changing the comparison code. */
8395 if (code0
== INTEGER_CST
)
8397 /* CST <= arg1 -> CST-1 < arg1. */
8398 if (code
== LE_EXPR
&& sgn0
== 1)
8400 /* -CST < arg1 -> -CST-1 <= arg1. */
8401 else if (code
== LT_EXPR
&& sgn0
== -1)
8403 /* CST > arg1 -> CST-1 >= arg1. */
8404 else if (code
== GT_EXPR
&& sgn0
== 1)
8406 /* -CST >= arg1 -> -CST-1 > arg1. */
8407 else if (code
== GE_EXPR
&& sgn0
== -1)
8411 /* arg1 code' CST' might be more canonical. */
8416 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8418 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8420 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8421 else if (code
== GT_EXPR
8422 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8424 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8425 else if (code
== LE_EXPR
8426 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8428 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8429 else if (code
== GE_EXPR
8430 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8434 *strict_overflow_p
= true;
8437 /* Now build the constant reduced in magnitude. But not if that
8438 would produce one outside of its types range. */
8439 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8441 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8442 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8444 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8445 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8446 /* We cannot swap the comparison here as that would cause us to
8447 endlessly recurse. */
8450 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8451 cst0
, build_int_cst (TREE_TYPE (cst0
), 1), 0);
8452 if (code0
!= INTEGER_CST
)
8453 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8455 /* If swapping might yield to a more canonical form, do so. */
8457 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8459 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8462 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8463 overflow further. Try to decrease the magnitude of constants involved
8464 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8465 and put sole constants at the second argument position.
8466 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8469 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8470 tree arg0
, tree arg1
)
8473 bool strict_overflow_p
;
8474 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8475 "when reducing constant in comparison");
8477 /* Try canonicalization by simplifying arg0. */
8478 strict_overflow_p
= false;
8479 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8480 &strict_overflow_p
);
8483 if (strict_overflow_p
)
8484 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8488 /* Try canonicalization by simplifying arg1 using the swapped
8490 code
= swap_tree_comparison (code
);
8491 strict_overflow_p
= false;
8492 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8493 &strict_overflow_p
);
8494 if (t
&& strict_overflow_p
)
8495 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8499 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8500 space. This is used to avoid issuing overflow warnings for
8501 expressions like &p->x which can not wrap. */
8504 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8506 unsigned HOST_WIDE_INT offset_low
, total_low
;
8507 HOST_WIDE_INT size
, offset_high
, total_high
;
8509 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8515 if (offset
== NULL_TREE
)
8520 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8524 offset_low
= TREE_INT_CST_LOW (offset
);
8525 offset_high
= TREE_INT_CST_HIGH (offset
);
8528 if (add_double_with_sign (offset_low
, offset_high
,
8529 bitpos
/ BITS_PER_UNIT
, 0,
8530 &total_low
, &total_high
,
8534 if (total_high
!= 0)
8537 size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8541 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8543 if (TREE_CODE (base
) == ADDR_EXPR
)
8545 HOST_WIDE_INT base_size
;
8547 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8548 if (base_size
> 0 && size
< base_size
)
8552 return total_low
> (unsigned HOST_WIDE_INT
) size
;
8555 /* Subroutine of fold_binary. This routine performs all of the
8556 transformations that are common to the equality/inequality
8557 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8558 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8559 fold_binary should call fold_binary. Fold a comparison with
8560 tree code CODE and type TYPE with operands OP0 and OP1. Return
8561 the folded comparison or NULL_TREE. */
8564 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8567 tree arg0
, arg1
, tem
;
8572 STRIP_SIGN_NOPS (arg0
);
8573 STRIP_SIGN_NOPS (arg1
);
8575 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8576 if (tem
!= NULL_TREE
)
8579 /* If one arg is a real or integer constant, put it last. */
8580 if (tree_swap_operands_p (arg0
, arg1
, true))
8581 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8583 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8584 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8585 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8586 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8587 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8588 && (TREE_CODE (arg1
) == INTEGER_CST
8589 && !TREE_OVERFLOW (arg1
)))
8591 tree const1
= TREE_OPERAND (arg0
, 1);
8593 tree variable
= TREE_OPERAND (arg0
, 0);
8596 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8598 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8599 TREE_TYPE (arg1
), const2
, const1
);
8601 /* If the constant operation overflowed this can be
8602 simplified as a comparison against INT_MAX/INT_MIN. */
8603 if (TREE_CODE (lhs
) == INTEGER_CST
8604 && TREE_OVERFLOW (lhs
))
8606 int const1_sgn
= tree_int_cst_sgn (const1
);
8607 enum tree_code code2
= code
;
8609 /* Get the sign of the constant on the lhs if the
8610 operation were VARIABLE + CONST1. */
8611 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8612 const1_sgn
= -const1_sgn
;
8614 /* The sign of the constant determines if we overflowed
8615 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8616 Canonicalize to the INT_MIN overflow by swapping the comparison
8618 if (const1_sgn
== -1)
8619 code2
= swap_tree_comparison (code
);
8621 /* We now can look at the canonicalized case
8622 VARIABLE + 1 CODE2 INT_MIN
8623 and decide on the result. */
8624 if (code2
== LT_EXPR
8626 || code2
== EQ_EXPR
)
8627 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8628 else if (code2
== NE_EXPR
8630 || code2
== GT_EXPR
)
8631 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8634 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8635 && (TREE_CODE (lhs
) != INTEGER_CST
8636 || !TREE_OVERFLOW (lhs
)))
8638 fold_overflow_warning ("assuming signed overflow does not occur "
8639 "when changing X +- C1 cmp C2 to "
8641 WARN_STRICT_OVERFLOW_COMPARISON
);
8642 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
8646 /* For comparisons of pointers we can decompose it to a compile time
8647 comparison of the base objects and the offsets into the object.
8648 This requires at least one operand being an ADDR_EXPR or a
8649 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8650 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8651 && (TREE_CODE (arg0
) == ADDR_EXPR
8652 || TREE_CODE (arg1
) == ADDR_EXPR
8653 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8654 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8656 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8657 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8658 enum machine_mode mode
;
8659 int volatilep
, unsignedp
;
8660 bool indirect_base0
= false, indirect_base1
= false;
8662 /* Get base and offset for the access. Strip ADDR_EXPR for
8663 get_inner_reference, but put it back by stripping INDIRECT_REF
8664 off the base object if possible. indirect_baseN will be true
8665 if baseN is not an address but refers to the object itself. */
8667 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8669 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8670 &bitsize
, &bitpos0
, &offset0
, &mode
,
8671 &unsignedp
, &volatilep
, false);
8672 if (TREE_CODE (base0
) == INDIRECT_REF
)
8673 base0
= TREE_OPERAND (base0
, 0);
8675 indirect_base0
= true;
8677 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8679 base0
= TREE_OPERAND (arg0
, 0);
8680 offset0
= TREE_OPERAND (arg0
, 1);
8684 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8686 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8687 &bitsize
, &bitpos1
, &offset1
, &mode
,
8688 &unsignedp
, &volatilep
, false);
8689 if (TREE_CODE (base1
) == INDIRECT_REF
)
8690 base1
= TREE_OPERAND (base1
, 0);
8692 indirect_base1
= true;
8694 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8696 base1
= TREE_OPERAND (arg1
, 0);
8697 offset1
= TREE_OPERAND (arg1
, 1);
8700 /* A local variable can never be pointed to by
8701 the default SSA name of an incoming parameter. */
8702 if ((TREE_CODE (arg0
) == ADDR_EXPR
8704 && TREE_CODE (base0
) == VAR_DECL
8705 && auto_var_in_fn_p (base0
, current_function_decl
)
8707 && TREE_CODE (base1
) == SSA_NAME
8708 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
8709 && SSA_NAME_IS_DEFAULT_DEF (base1
))
8710 || (TREE_CODE (arg1
) == ADDR_EXPR
8712 && TREE_CODE (base1
) == VAR_DECL
8713 && auto_var_in_fn_p (base1
, current_function_decl
)
8715 && TREE_CODE (base0
) == SSA_NAME
8716 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
8717 && SSA_NAME_IS_DEFAULT_DEF (base0
)))
8719 if (code
== NE_EXPR
)
8720 return constant_boolean_node (1, type
);
8721 else if (code
== EQ_EXPR
)
8722 return constant_boolean_node (0, type
);
8724 /* If we have equivalent bases we might be able to simplify. */
8725 else if (indirect_base0
== indirect_base1
8726 && operand_equal_p (base0
, base1
, 0))
8728 /* We can fold this expression to a constant if the non-constant
8729 offset parts are equal. */
8730 if ((offset0
== offset1
8731 || (offset0
&& offset1
8732 && operand_equal_p (offset0
, offset1
, 0)))
8735 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8740 && bitpos0
!= bitpos1
8741 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8742 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8743 fold_overflow_warning (("assuming pointer wraparound does not "
8744 "occur when comparing P +- C1 with "
8746 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8751 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8753 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8755 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8757 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8759 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8761 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8765 /* We can simplify the comparison to a comparison of the variable
8766 offset parts if the constant offset parts are equal.
8767 Be careful to use signed size type here because otherwise we
8768 mess with array offsets in the wrong way. This is possible
8769 because pointer arithmetic is restricted to retain within an
8770 object and overflow on pointer differences is undefined as of
8771 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8772 else if (bitpos0
== bitpos1
8773 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8774 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8776 /* By converting to signed size type we cover middle-end pointer
8777 arithmetic which operates on unsigned pointer types of size
8778 type size and ARRAY_REF offsets which are properly sign or
8779 zero extended from their type in case it is narrower than
8781 if (offset0
== NULL_TREE
)
8782 offset0
= build_int_cst (ssizetype
, 0);
8784 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8785 if (offset1
== NULL_TREE
)
8786 offset1
= build_int_cst (ssizetype
, 0);
8788 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
8792 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8793 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8794 fold_overflow_warning (("assuming pointer wraparound does not "
8795 "occur when comparing P +- C1 with "
8797 WARN_STRICT_OVERFLOW_COMPARISON
);
8799 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
8802 /* For non-equal bases we can simplify if they are addresses
8803 of local binding decls or constants. */
8804 else if (indirect_base0
&& indirect_base1
8805 /* We know that !operand_equal_p (base0, base1, 0)
8806 because the if condition was false. But make
8807 sure two decls are not the same. */
8809 && TREE_CODE (arg0
) == ADDR_EXPR
8810 && TREE_CODE (arg1
) == ADDR_EXPR
8811 && (((TREE_CODE (base0
) == VAR_DECL
8812 || TREE_CODE (base0
) == PARM_DECL
)
8813 && (targetm
.binds_local_p (base0
)
8814 || CONSTANT_CLASS_P (base1
)))
8815 || CONSTANT_CLASS_P (base0
))
8816 && (((TREE_CODE (base1
) == VAR_DECL
8817 || TREE_CODE (base1
) == PARM_DECL
)
8818 && (targetm
.binds_local_p (base1
)
8819 || CONSTANT_CLASS_P (base0
)))
8820 || CONSTANT_CLASS_P (base1
)))
8822 if (code
== EQ_EXPR
)
8823 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
8825 else if (code
== NE_EXPR
)
8826 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
8829 /* For equal offsets we can simplify to a comparison of the
8831 else if (bitpos0
== bitpos1
8833 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8835 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8836 && ((offset0
== offset1
)
8837 || (offset0
&& offset1
8838 && operand_equal_p (offset0
, offset1
, 0))))
8841 base0
= build_fold_addr_expr_loc (loc
, base0
);
8843 base1
= build_fold_addr_expr_loc (loc
, base1
);
8844 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
8848 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8849 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8850 the resulting offset is smaller in absolute value than the
8852 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8853 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8854 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8855 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8856 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8857 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8858 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8860 tree const1
= TREE_OPERAND (arg0
, 1);
8861 tree const2
= TREE_OPERAND (arg1
, 1);
8862 tree variable1
= TREE_OPERAND (arg0
, 0);
8863 tree variable2
= TREE_OPERAND (arg1
, 0);
8865 const char * const warnmsg
= G_("assuming signed overflow does not "
8866 "occur when combining constants around "
8869 /* Put the constant on the side where it doesn't overflow and is
8870 of lower absolute value than before. */
8871 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8872 ? MINUS_EXPR
: PLUS_EXPR
,
8874 if (!TREE_OVERFLOW (cst
)
8875 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
8877 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8878 return fold_build2_loc (loc
, code
, type
,
8880 fold_build2_loc (loc
,
8881 TREE_CODE (arg1
), TREE_TYPE (arg1
),
8885 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8886 ? MINUS_EXPR
: PLUS_EXPR
,
8888 if (!TREE_OVERFLOW (cst
)
8889 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
8891 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8892 return fold_build2_loc (loc
, code
, type
,
8893 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
8899 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8900 signed arithmetic case. That form is created by the compiler
8901 often enough for folding it to be of value. One example is in
8902 computing loop trip counts after Operator Strength Reduction. */
8903 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8904 && TREE_CODE (arg0
) == MULT_EXPR
8905 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8906 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8907 && integer_zerop (arg1
))
8909 tree const1
= TREE_OPERAND (arg0
, 1);
8910 tree const2
= arg1
; /* zero */
8911 tree variable1
= TREE_OPERAND (arg0
, 0);
8912 enum tree_code cmp_code
= code
;
8914 /* Handle unfolded multiplication by zero. */
8915 if (integer_zerop (const1
))
8916 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
8918 fold_overflow_warning (("assuming signed overflow does not occur when "
8919 "eliminating multiplication in comparison "
8921 WARN_STRICT_OVERFLOW_COMPARISON
);
8923 /* If const1 is negative we swap the sense of the comparison. */
8924 if (tree_int_cst_sgn (const1
) < 0)
8925 cmp_code
= swap_tree_comparison (cmp_code
);
8927 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
8930 tem
= maybe_canonicalize_comparison (loc
, code
, type
, op0
, op1
);
8934 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
8936 tree targ0
= strip_float_extensions (arg0
);
8937 tree targ1
= strip_float_extensions (arg1
);
8938 tree newtype
= TREE_TYPE (targ0
);
8940 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
8941 newtype
= TREE_TYPE (targ1
);
8943 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8944 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
8945 return fold_build2_loc (loc
, code
, type
,
8946 fold_convert_loc (loc
, newtype
, targ0
),
8947 fold_convert_loc (loc
, newtype
, targ1
));
8949 /* (-a) CMP (-b) -> b CMP a */
8950 if (TREE_CODE (arg0
) == NEGATE_EXPR
8951 && TREE_CODE (arg1
) == NEGATE_EXPR
)
8952 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
8953 TREE_OPERAND (arg0
, 0));
8955 if (TREE_CODE (arg1
) == REAL_CST
)
8957 REAL_VALUE_TYPE cst
;
8958 cst
= TREE_REAL_CST (arg1
);
8960 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8961 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8962 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
8963 TREE_OPERAND (arg0
, 0),
8964 build_real (TREE_TYPE (arg1
),
8965 real_value_negate (&cst
)));
8967 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8968 /* a CMP (-0) -> a CMP 0 */
8969 if (REAL_VALUE_MINUS_ZERO (cst
))
8970 return fold_build2_loc (loc
, code
, type
, arg0
,
8971 build_real (TREE_TYPE (arg1
), dconst0
));
8973 /* x != NaN is always true, other ops are always false. */
8974 if (REAL_VALUE_ISNAN (cst
)
8975 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
8977 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
8978 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
8981 /* Fold comparisons against infinity. */
8982 if (REAL_VALUE_ISINF (cst
)
8983 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
8985 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
8986 if (tem
!= NULL_TREE
)
8991 /* If this is a comparison of a real constant with a PLUS_EXPR
8992 or a MINUS_EXPR of a real constant, we can convert it into a
8993 comparison with a revised real constant as long as no overflow
8994 occurs when unsafe_math_optimizations are enabled. */
8995 if (flag_unsafe_math_optimizations
8996 && TREE_CODE (arg1
) == REAL_CST
8997 && (TREE_CODE (arg0
) == PLUS_EXPR
8998 || TREE_CODE (arg0
) == MINUS_EXPR
)
8999 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9000 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9001 ? MINUS_EXPR
: PLUS_EXPR
,
9002 arg1
, TREE_OPERAND (arg0
, 1)))
9003 && !TREE_OVERFLOW (tem
))
9004 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9006 /* Likewise, we can simplify a comparison of a real constant with
9007 a MINUS_EXPR whose first operand is also a real constant, i.e.
9008 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9009 floating-point types only if -fassociative-math is set. */
9010 if (flag_associative_math
9011 && TREE_CODE (arg1
) == REAL_CST
9012 && TREE_CODE (arg0
) == MINUS_EXPR
9013 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9014 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9016 && !TREE_OVERFLOW (tem
))
9017 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9018 TREE_OPERAND (arg0
, 1), tem
);
9020 /* Fold comparisons against built-in math functions. */
9021 if (TREE_CODE (arg1
) == REAL_CST
9022 && flag_unsafe_math_optimizations
9023 && ! flag_errno_math
)
9025 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9027 if (fcode
!= END_BUILTINS
)
9029 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9030 if (tem
!= NULL_TREE
)
9036 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9037 && CONVERT_EXPR_P (arg0
))
9039 /* If we are widening one operand of an integer comparison,
9040 see if the other operand is similarly being widened. Perhaps we
9041 can do the comparison in the narrower type. */
9042 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9046 /* Or if we are changing signedness. */
9047 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9052 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9053 constant, we can simplify it. */
9054 if (TREE_CODE (arg1
) == INTEGER_CST
9055 && (TREE_CODE (arg0
) == MIN_EXPR
9056 || TREE_CODE (arg0
) == MAX_EXPR
)
9057 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9059 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9064 /* Simplify comparison of something with itself. (For IEEE
9065 floating-point, we can only do some of these simplifications.) */
9066 if (operand_equal_p (arg0
, arg1
, 0))
9071 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9072 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9073 return constant_boolean_node (1, type
);
9078 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9079 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9080 return constant_boolean_node (1, type
);
9081 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9084 /* For NE, we can only do this simplification if integer
9085 or we don't honor IEEE floating point NaNs. */
9086 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9087 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9089 /* ... fall through ... */
9092 return constant_boolean_node (0, type
);
9098 /* If we are comparing an expression that just has comparisons
9099 of two integer values, arithmetic expressions of those comparisons,
9100 and constants, we can simplify it. There are only three cases
9101 to check: the two values can either be equal, the first can be
9102 greater, or the second can be greater. Fold the expression for
9103 those three values. Since each value must be 0 or 1, we have
9104 eight possibilities, each of which corresponds to the constant 0
9105 or 1 or one of the six possible comparisons.
9107 This handles common cases like (a > b) == 0 but also handles
9108 expressions like ((x > y) - (y > x)) > 0, which supposedly
9109 occur in macroized code. */
9111 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9113 tree cval1
= 0, cval2
= 0;
9116 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9117 /* Don't handle degenerate cases here; they should already
9118 have been handled anyway. */
9119 && cval1
!= 0 && cval2
!= 0
9120 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9121 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9122 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9123 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9124 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9125 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9126 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9128 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9129 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9131 /* We can't just pass T to eval_subst in case cval1 or cval2
9132 was the same as ARG1. */
9135 = fold_build2_loc (loc
, code
, type
,
9136 eval_subst (loc
, arg0
, cval1
, maxval
,
9140 = fold_build2_loc (loc
, code
, type
,
9141 eval_subst (loc
, arg0
, cval1
, maxval
,
9145 = fold_build2_loc (loc
, code
, type
,
9146 eval_subst (loc
, arg0
, cval1
, minval
,
9150 /* All three of these results should be 0 or 1. Confirm they are.
9151 Then use those values to select the proper code to use. */
9153 if (TREE_CODE (high_result
) == INTEGER_CST
9154 && TREE_CODE (equal_result
) == INTEGER_CST
9155 && TREE_CODE (low_result
) == INTEGER_CST
)
9157 /* Make a 3-bit mask with the high-order bit being the
9158 value for `>', the next for '=', and the low for '<'. */
9159 switch ((integer_onep (high_result
) * 4)
9160 + (integer_onep (equal_result
) * 2)
9161 + integer_onep (low_result
))
9165 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9186 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9191 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9192 SET_EXPR_LOCATION (tem
, loc
);
9195 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9200 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9201 into a single range test. */
9202 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9203 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9204 && TREE_CODE (arg1
) == INTEGER_CST
9205 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9206 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9207 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9208 && !TREE_OVERFLOW (arg1
))
9210 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9211 if (tem
!= NULL_TREE
)
9215 /* Fold ~X op ~Y as Y op X. */
9216 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9217 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9219 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9220 return fold_build2_loc (loc
, code
, type
,
9221 fold_convert_loc (loc
, cmp_type
,
9222 TREE_OPERAND (arg1
, 0)),
9223 TREE_OPERAND (arg0
, 0));
9226 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9227 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9228 && TREE_CODE (arg1
) == INTEGER_CST
)
9230 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9231 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9232 TREE_OPERAND (arg0
, 0),
9233 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9234 fold_convert_loc (loc
, cmp_type
, arg1
)));
9241 /* Subroutine of fold_binary. Optimize complex multiplications of the
9242 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9243 argument EXPR represents the expression "z" of type TYPE. */
9246 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9248 tree itype
= TREE_TYPE (type
);
9249 tree rpart
, ipart
, tem
;
9251 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9253 rpart
= TREE_OPERAND (expr
, 0);
9254 ipart
= TREE_OPERAND (expr
, 1);
9256 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9258 rpart
= TREE_REALPART (expr
);
9259 ipart
= TREE_IMAGPART (expr
);
9263 expr
= save_expr (expr
);
9264 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9265 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9268 rpart
= save_expr (rpart
);
9269 ipart
= save_expr (ipart
);
9270 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9271 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9272 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9273 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9274 fold_convert_loc (loc
, itype
, integer_zero_node
));
9278 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9279 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9280 guarantees that P and N have the same least significant log2(M) bits.
9281 N is not otherwise constrained. In particular, N is not normalized to
9282 0 <= N < M as is common. In general, the precise value of P is unknown.
9283 M is chosen as large as possible such that constant N can be determined.
9285 Returns M and sets *RESIDUE to N.
9287 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9288 account. This is not always possible due to PR 35705.
9291 static unsigned HOST_WIDE_INT
9292 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9293 bool allow_func_align
)
9295 enum tree_code code
;
9299 code
= TREE_CODE (expr
);
9300 if (code
== ADDR_EXPR
)
9302 expr
= TREE_OPERAND (expr
, 0);
9303 if (handled_component_p (expr
))
9305 HOST_WIDE_INT bitsize
, bitpos
;
9307 enum machine_mode mode
;
9308 int unsignedp
, volatilep
;
9310 expr
= get_inner_reference (expr
, &bitsize
, &bitpos
, &offset
,
9311 &mode
, &unsignedp
, &volatilep
, false);
9312 *residue
= bitpos
/ BITS_PER_UNIT
;
9315 if (TREE_CODE (offset
) == INTEGER_CST
)
9316 *residue
+= TREE_INT_CST_LOW (offset
);
9318 /* We don't handle more complicated offset expressions. */
9324 && (allow_func_align
|| TREE_CODE (expr
) != FUNCTION_DECL
))
9325 return DECL_ALIGN_UNIT (expr
);
9327 else if (code
== POINTER_PLUS_EXPR
)
9330 unsigned HOST_WIDE_INT modulus
;
9331 enum tree_code inner_code
;
9333 op0
= TREE_OPERAND (expr
, 0);
9335 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9338 op1
= TREE_OPERAND (expr
, 1);
9340 inner_code
= TREE_CODE (op1
);
9341 if (inner_code
== INTEGER_CST
)
9343 *residue
+= TREE_INT_CST_LOW (op1
);
9346 else if (inner_code
== MULT_EXPR
)
9348 op1
= TREE_OPERAND (op1
, 1);
9349 if (TREE_CODE (op1
) == INTEGER_CST
)
9351 unsigned HOST_WIDE_INT align
;
9353 /* Compute the greatest power-of-2 divisor of op1. */
9354 align
= TREE_INT_CST_LOW (op1
);
9357 /* If align is non-zero and less than *modulus, replace
9358 *modulus with align., If align is 0, then either op1 is 0
9359 or the greatest power-of-2 divisor of op1 doesn't fit in an
9360 unsigned HOST_WIDE_INT. In either case, no additional
9361 constraint is imposed. */
9363 modulus
= MIN (modulus
, align
);
9370 /* If we get here, we were unable to determine anything useful about the
9376 /* Fold a binary expression of code CODE and type TYPE with operands
9377 OP0 and OP1. LOC is the location of the resulting expression.
9378 Return the folded expression if folding is successful. Otherwise,
9379 return NULL_TREE. */
9382 fold_binary_loc (location_t loc
,
9383 enum tree_code code
, tree type
, tree op0
, tree op1
)
9385 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9386 tree arg0
, arg1
, tem
;
9387 tree t1
= NULL_TREE
;
9388 bool strict_overflow_p
;
9390 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9391 && TREE_CODE_LENGTH (code
) == 2
9393 && op1
!= NULL_TREE
);
9398 /* Strip any conversions that don't change the mode. This is
9399 safe for every expression, except for a comparison expression
9400 because its signedness is derived from its operands. So, in
9401 the latter case, only strip conversions that don't change the
9402 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9405 Note that this is done as an internal manipulation within the
9406 constant folder, in order to find the simplest representation
9407 of the arguments so that their form can be studied. In any
9408 cases, the appropriate type conversions should be put back in
9409 the tree that will get out of the constant folder. */
9411 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9413 STRIP_SIGN_NOPS (arg0
);
9414 STRIP_SIGN_NOPS (arg1
);
9422 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9423 constant but we can't do arithmetic on them. */
9424 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9425 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9426 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9427 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9428 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9429 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
9431 if (kind
== tcc_binary
)
9433 /* Make sure type and arg0 have the same saturating flag. */
9434 gcc_assert (TYPE_SATURATING (type
)
9435 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9436 tem
= const_binop (code
, arg0
, arg1
);
9438 else if (kind
== tcc_comparison
)
9439 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9443 if (tem
!= NULL_TREE
)
9445 if (TREE_TYPE (tem
) != type
)
9446 tem
= fold_convert_loc (loc
, type
, tem
);
9451 /* If this is a commutative operation, and ARG0 is a constant, move it
9452 to ARG1 to reduce the number of tests below. */
9453 if (commutative_tree_code (code
)
9454 && tree_swap_operands_p (arg0
, arg1
, true))
9455 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9457 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9459 First check for cases where an arithmetic operation is applied to a
9460 compound, conditional, or comparison operation. Push the arithmetic
9461 operation inside the compound or conditional to see if any folding
9462 can then be done. Convert comparison to conditional for this purpose.
9463 The also optimizes non-constant cases that used to be done in
9466 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9467 one of the operands is a comparison and the other is a comparison, a
9468 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9469 code below would make the expression more complex. Change it to a
9470 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9471 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9473 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9474 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9475 && ((truth_value_p (TREE_CODE (arg0
))
9476 && (truth_value_p (TREE_CODE (arg1
))
9477 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9478 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9479 || (truth_value_p (TREE_CODE (arg1
))
9480 && (truth_value_p (TREE_CODE (arg0
))
9481 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9482 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9484 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9485 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9488 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9489 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9491 if (code
== EQ_EXPR
)
9492 tem
= invert_truthvalue_loc (loc
, tem
);
9494 return fold_convert_loc (loc
, type
, tem
);
9497 if (TREE_CODE_CLASS (code
) == tcc_binary
9498 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9500 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9502 tem
= fold_build2_loc (loc
, code
, type
,
9503 fold_convert_loc (loc
, TREE_TYPE (op0
),
9504 TREE_OPERAND (arg0
, 1)), op1
);
9505 tem
= build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0), tem
);
9506 goto fold_binary_exit
;
9508 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9509 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9511 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9512 fold_convert_loc (loc
, TREE_TYPE (op1
),
9513 TREE_OPERAND (arg1
, 1)));
9514 tem
= build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0), tem
);
9515 goto fold_binary_exit
;
9518 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
9520 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9522 /*cond_first_p=*/1);
9523 if (tem
!= NULL_TREE
)
9527 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
9529 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9531 /*cond_first_p=*/0);
9532 if (tem
!= NULL_TREE
)
9539 case POINTER_PLUS_EXPR
:
9540 /* 0 +p index -> (type)index */
9541 if (integer_zerop (arg0
))
9542 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
9544 /* PTR +p 0 -> PTR */
9545 if (integer_zerop (arg1
))
9546 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9548 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9549 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9550 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9551 return fold_convert_loc (loc
, type
,
9552 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9553 fold_convert_loc (loc
, sizetype
,
9555 fold_convert_loc (loc
, sizetype
,
9558 /* index +p PTR -> PTR +p index */
9559 if (POINTER_TYPE_P (TREE_TYPE (arg1
))
9560 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9561 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, type
,
9562 fold_convert_loc (loc
, type
, arg1
),
9563 fold_convert_loc (loc
, sizetype
, arg0
));
9565 /* (PTR +p B) +p A -> PTR +p (B + A) */
9566 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9569 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
9570 tree arg00
= TREE_OPERAND (arg0
, 0);
9571 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9572 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
9573 return fold_convert_loc (loc
, type
,
9574 fold_build2_loc (loc
, POINTER_PLUS_EXPR
,
9579 /* PTR_CST +p CST -> CST1 */
9580 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9581 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
9582 fold_convert_loc (loc
, type
, arg1
));
9584 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9585 of the array. Loop optimizer sometimes produce this type of
9587 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9589 tem
= try_move_mult_to_index (loc
, arg0
,
9590 fold_convert_loc (loc
, sizetype
, arg1
));
9592 return fold_convert_loc (loc
, type
, tem
);
9598 /* A + (-B) -> A - B */
9599 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9600 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9601 fold_convert_loc (loc
, type
, arg0
),
9602 fold_convert_loc (loc
, type
,
9603 TREE_OPERAND (arg1
, 0)));
9604 /* (-A) + B -> B - A */
9605 if (TREE_CODE (arg0
) == NEGATE_EXPR
9606 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
9607 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9608 fold_convert_loc (loc
, type
, arg1
),
9609 fold_convert_loc (loc
, type
,
9610 TREE_OPERAND (arg0
, 0)));
9612 if (INTEGRAL_TYPE_P (type
))
9614 /* Convert ~A + 1 to -A. */
9615 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9616 && integer_onep (arg1
))
9617 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
9618 fold_convert_loc (loc
, type
,
9619 TREE_OPERAND (arg0
, 0)));
9622 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9623 && !TYPE_OVERFLOW_TRAPS (type
))
9625 tree tem
= TREE_OPERAND (arg0
, 0);
9628 if (operand_equal_p (tem
, arg1
, 0))
9630 t1
= build_int_cst_type (type
, -1);
9631 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
9636 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9637 && !TYPE_OVERFLOW_TRAPS (type
))
9639 tree tem
= TREE_OPERAND (arg1
, 0);
9642 if (operand_equal_p (arg0
, tem
, 0))
9644 t1
= build_int_cst_type (type
, -1);
9645 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
9649 /* X + (X / CST) * -CST is X % CST. */
9650 if (TREE_CODE (arg1
) == MULT_EXPR
9651 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9652 && operand_equal_p (arg0
,
9653 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9655 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9656 tree cst1
= TREE_OPERAND (arg1
, 1);
9657 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9659 if (sum
&& integer_zerop (sum
))
9660 return fold_convert_loc (loc
, type
,
9661 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9662 TREE_TYPE (arg0
), arg0
,
9667 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9668 same or one. Make sure type is not saturating.
9669 fold_plusminus_mult_expr will re-associate. */
9670 if ((TREE_CODE (arg0
) == MULT_EXPR
9671 || TREE_CODE (arg1
) == MULT_EXPR
)
9672 && !TYPE_SATURATING (type
)
9673 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9675 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9680 if (! FLOAT_TYPE_P (type
))
9682 if (integer_zerop (arg1
))
9683 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9685 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9686 with a constant, and the two constants have no bits in common,
9687 we should treat this as a BIT_IOR_EXPR since this may produce more
9689 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9690 && TREE_CODE (arg1
) == BIT_AND_EXPR
9691 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9692 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9693 && integer_zerop (const_binop (BIT_AND_EXPR
,
9694 TREE_OPERAND (arg0
, 1),
9695 TREE_OPERAND (arg1
, 1))))
9697 code
= BIT_IOR_EXPR
;
9701 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9702 (plus (plus (mult) (mult)) (foo)) so that we can
9703 take advantage of the factoring cases below. */
9704 if (((TREE_CODE (arg0
) == PLUS_EXPR
9705 || TREE_CODE (arg0
) == MINUS_EXPR
)
9706 && TREE_CODE (arg1
) == MULT_EXPR
)
9707 || ((TREE_CODE (arg1
) == PLUS_EXPR
9708 || TREE_CODE (arg1
) == MINUS_EXPR
)
9709 && TREE_CODE (arg0
) == MULT_EXPR
))
9711 tree parg0
, parg1
, parg
, marg
;
9712 enum tree_code pcode
;
9714 if (TREE_CODE (arg1
) == MULT_EXPR
)
9715 parg
= arg0
, marg
= arg1
;
9717 parg
= arg1
, marg
= arg0
;
9718 pcode
= TREE_CODE (parg
);
9719 parg0
= TREE_OPERAND (parg
, 0);
9720 parg1
= TREE_OPERAND (parg
, 1);
9724 if (TREE_CODE (parg0
) == MULT_EXPR
9725 && TREE_CODE (parg1
) != MULT_EXPR
)
9726 return fold_build2_loc (loc
, pcode
, type
,
9727 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9728 fold_convert_loc (loc
, type
,
9730 fold_convert_loc (loc
, type
,
9732 fold_convert_loc (loc
, type
, parg1
));
9733 if (TREE_CODE (parg0
) != MULT_EXPR
9734 && TREE_CODE (parg1
) == MULT_EXPR
)
9736 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9737 fold_convert_loc (loc
, type
, parg0
),
9738 fold_build2_loc (loc
, pcode
, type
,
9739 fold_convert_loc (loc
, type
, marg
),
9740 fold_convert_loc (loc
, type
,
9746 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9747 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
9748 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9750 /* Likewise if the operands are reversed. */
9751 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
9752 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
9754 /* Convert X + -C into X - C. */
9755 if (TREE_CODE (arg1
) == REAL_CST
9756 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
9758 tem
= fold_negate_const (arg1
, type
);
9759 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
9760 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9761 fold_convert_loc (loc
, type
, arg0
),
9762 fold_convert_loc (loc
, type
, tem
));
9765 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9766 to __complex__ ( x, y ). This is not the same for SNaNs or
9767 if signed zeros are involved. */
9768 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9769 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9770 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9772 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9773 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9774 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9775 bool arg0rz
= false, arg0iz
= false;
9776 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9777 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9779 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9780 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9781 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9783 tree rp
= arg1r
? arg1r
9784 : build1 (REALPART_EXPR
, rtype
, arg1
);
9785 tree ip
= arg0i
? arg0i
9786 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9787 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9789 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9791 tree rp
= arg0r
? arg0r
9792 : build1 (REALPART_EXPR
, rtype
, arg0
);
9793 tree ip
= arg1i
? arg1i
9794 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9795 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9800 if (flag_unsafe_math_optimizations
9801 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9802 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9803 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9806 /* Convert x+x into x*2.0. */
9807 if (operand_equal_p (arg0
, arg1
, 0)
9808 && SCALAR_FLOAT_TYPE_P (type
))
9809 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
9810 build_real (type
, dconst2
));
9812 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9813 We associate floats only if the user has specified
9814 -fassociative-math. */
9815 if (flag_associative_math
9816 && TREE_CODE (arg1
) == PLUS_EXPR
9817 && TREE_CODE (arg0
) != MULT_EXPR
)
9819 tree tree10
= TREE_OPERAND (arg1
, 0);
9820 tree tree11
= TREE_OPERAND (arg1
, 1);
9821 if (TREE_CODE (tree11
) == MULT_EXPR
9822 && TREE_CODE (tree10
) == MULT_EXPR
)
9825 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
9826 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
9829 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9830 We associate floats only if the user has specified
9831 -fassociative-math. */
9832 if (flag_associative_math
9833 && TREE_CODE (arg0
) == PLUS_EXPR
9834 && TREE_CODE (arg1
) != MULT_EXPR
)
9836 tree tree00
= TREE_OPERAND (arg0
, 0);
9837 tree tree01
= TREE_OPERAND (arg0
, 1);
9838 if (TREE_CODE (tree01
) == MULT_EXPR
9839 && TREE_CODE (tree00
) == MULT_EXPR
)
9842 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
9843 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
9849 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9850 is a rotate of A by C1 bits. */
9851 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9852 is a rotate of A by B bits. */
9854 enum tree_code code0
, code1
;
9856 code0
= TREE_CODE (arg0
);
9857 code1
= TREE_CODE (arg1
);
9858 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9859 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9860 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9861 TREE_OPERAND (arg1
, 0), 0)
9862 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9863 TYPE_UNSIGNED (rtype
))
9864 /* Only create rotates in complete modes. Other cases are not
9865 expanded properly. */
9866 && TYPE_PRECISION (rtype
) == GET_MODE_PRECISION (TYPE_MODE (rtype
)))
9868 tree tree01
, tree11
;
9869 enum tree_code code01
, code11
;
9871 tree01
= TREE_OPERAND (arg0
, 1);
9872 tree11
= TREE_OPERAND (arg1
, 1);
9873 STRIP_NOPS (tree01
);
9874 STRIP_NOPS (tree11
);
9875 code01
= TREE_CODE (tree01
);
9876 code11
= TREE_CODE (tree11
);
9877 if (code01
== INTEGER_CST
9878 && code11
== INTEGER_CST
9879 && TREE_INT_CST_HIGH (tree01
) == 0
9880 && TREE_INT_CST_HIGH (tree11
) == 0
9881 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
9882 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9884 tem
= build2 (LROTATE_EXPR
,
9885 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9886 TREE_OPERAND (arg0
, 0),
9887 code0
== LSHIFT_EXPR
9889 SET_EXPR_LOCATION (tem
, loc
);
9890 return fold_convert_loc (loc
, type
, tem
);
9892 else if (code11
== MINUS_EXPR
)
9894 tree tree110
, tree111
;
9895 tree110
= TREE_OPERAND (tree11
, 0);
9896 tree111
= TREE_OPERAND (tree11
, 1);
9897 STRIP_NOPS (tree110
);
9898 STRIP_NOPS (tree111
);
9899 if (TREE_CODE (tree110
) == INTEGER_CST
9900 && 0 == compare_tree_int (tree110
,
9902 (TREE_TYPE (TREE_OPERAND
9904 && operand_equal_p (tree01
, tree111
, 0))
9906 fold_convert_loc (loc
, type
,
9907 build2 ((code0
== LSHIFT_EXPR
9910 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9911 TREE_OPERAND (arg0
, 0), tree01
));
9913 else if (code01
== MINUS_EXPR
)
9915 tree tree010
, tree011
;
9916 tree010
= TREE_OPERAND (tree01
, 0);
9917 tree011
= TREE_OPERAND (tree01
, 1);
9918 STRIP_NOPS (tree010
);
9919 STRIP_NOPS (tree011
);
9920 if (TREE_CODE (tree010
) == INTEGER_CST
9921 && 0 == compare_tree_int (tree010
,
9923 (TREE_TYPE (TREE_OPERAND
9925 && operand_equal_p (tree11
, tree011
, 0))
9926 return fold_convert_loc
9928 build2 ((code0
!= LSHIFT_EXPR
9931 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9932 TREE_OPERAND (arg0
, 0), tree11
));
9938 /* In most languages, can't associate operations on floats through
9939 parentheses. Rather than remember where the parentheses were, we
9940 don't associate floats at all, unless the user has specified
9942 And, we need to make sure type is not saturating. */
9944 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
9945 && !TYPE_SATURATING (type
))
9947 tree var0
, con0
, lit0
, minus_lit0
;
9948 tree var1
, con1
, lit1
, minus_lit1
;
9951 /* Split both trees into variables, constants, and literals. Then
9952 associate each group together, the constants with literals,
9953 then the result with variables. This increases the chances of
9954 literals being recombined later and of generating relocatable
9955 expressions for the sum of a constant and literal. */
9956 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
9957 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
9958 code
== MINUS_EXPR
);
9960 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9961 if (code
== MINUS_EXPR
)
9964 /* With undefined overflow we can only associate constants with one
9965 variable, and constants whose association doesn't overflow. */
9966 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
9967 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
9974 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
9975 tmp0
= TREE_OPERAND (tmp0
, 0);
9976 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
9977 tmp1
= TREE_OPERAND (tmp1
, 0);
9978 /* The only case we can still associate with two variables
9979 is if they are the same, modulo negation. */
9980 if (!operand_equal_p (tmp0
, tmp1
, 0))
9984 if (ok
&& lit0
&& lit1
)
9986 tree tmp0
= fold_convert (type
, lit0
);
9987 tree tmp1
= fold_convert (type
, lit1
);
9989 if (!TREE_OVERFLOW (tmp0
) && !TREE_OVERFLOW (tmp1
)
9990 && TREE_OVERFLOW (fold_build2 (code
, type
, tmp0
, tmp1
)))
9995 /* Only do something if we found more than two objects. Otherwise,
9996 nothing has changed and we risk infinite recursion. */
9998 && (2 < ((var0
!= 0) + (var1
!= 0)
9999 + (con0
!= 0) + (con1
!= 0)
10000 + (lit0
!= 0) + (lit1
!= 0)
10001 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10003 var0
= associate_trees (loc
, var0
, var1
, code
, type
);
10004 con0
= associate_trees (loc
, con0
, con1
, code
, type
);
10005 lit0
= associate_trees (loc
, lit0
, lit1
, code
, type
);
10006 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
, code
, type
);
10008 /* Preserve the MINUS_EXPR if the negative part of the literal is
10009 greater than the positive part. Otherwise, the multiplicative
10010 folding code (i.e extract_muldiv) may be fooled in case
10011 unsigned constants are subtracted, like in the following
10012 example: ((X*2 + 4) - 8U)/2. */
10013 if (minus_lit0
&& lit0
)
10015 if (TREE_CODE (lit0
) == INTEGER_CST
10016 && TREE_CODE (minus_lit0
) == INTEGER_CST
10017 && tree_int_cst_lt (lit0
, minus_lit0
))
10019 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10025 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10034 fold_convert_loc (loc
, type
,
10035 associate_trees (loc
, var0
, minus_lit0
,
10036 MINUS_EXPR
, type
));
10039 con0
= associate_trees (loc
, con0
, minus_lit0
,
10042 fold_convert_loc (loc
, type
,
10043 associate_trees (loc
, var0
, con0
,
10048 con0
= associate_trees (loc
, con0
, lit0
, code
, type
);
10050 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10058 /* Pointer simplifications for subtraction, simple reassociations. */
10059 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10061 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10062 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10063 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10065 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10066 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10067 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10068 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10069 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10070 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10072 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10075 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10076 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10078 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10079 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10080 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10081 fold_convert_loc (loc
, type
, arg1
));
10083 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10086 /* A - (-B) -> A + B */
10087 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10088 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10089 fold_convert_loc (loc
, type
,
10090 TREE_OPERAND (arg1
, 0)));
10091 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10092 if (TREE_CODE (arg0
) == NEGATE_EXPR
10093 && (FLOAT_TYPE_P (type
)
10094 || INTEGRAL_TYPE_P (type
))
10095 && negate_expr_p (arg1
)
10096 && reorder_operands_p (arg0
, arg1
))
10097 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10098 fold_convert_loc (loc
, type
,
10099 negate_expr (arg1
)),
10100 fold_convert_loc (loc
, type
,
10101 TREE_OPERAND (arg0
, 0)));
10102 /* Convert -A - 1 to ~A. */
10103 if (INTEGRAL_TYPE_P (type
)
10104 && TREE_CODE (arg0
) == NEGATE_EXPR
10105 && integer_onep (arg1
)
10106 && !TYPE_OVERFLOW_TRAPS (type
))
10107 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10108 fold_convert_loc (loc
, type
,
10109 TREE_OPERAND (arg0
, 0)));
10111 /* Convert -1 - A to ~A. */
10112 if (INTEGRAL_TYPE_P (type
)
10113 && integer_all_onesp (arg0
))
10114 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10117 /* X - (X / CST) * CST is X % CST. */
10118 if (INTEGRAL_TYPE_P (type
)
10119 && TREE_CODE (arg1
) == MULT_EXPR
10120 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10121 && operand_equal_p (arg0
,
10122 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10123 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10124 TREE_OPERAND (arg1
, 1), 0))
10126 fold_convert_loc (loc
, type
,
10127 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10128 arg0
, TREE_OPERAND (arg1
, 1)));
10130 if (! FLOAT_TYPE_P (type
))
10132 if (integer_zerop (arg0
))
10133 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10134 if (integer_zerop (arg1
))
10135 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10137 /* Fold A - (A & B) into ~B & A. */
10138 if (!TREE_SIDE_EFFECTS (arg0
)
10139 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10141 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10143 tree arg10
= fold_convert_loc (loc
, type
,
10144 TREE_OPERAND (arg1
, 0));
10145 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10146 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10148 fold_convert_loc (loc
, type
, arg0
));
10150 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10152 tree arg11
= fold_convert_loc (loc
,
10153 type
, TREE_OPERAND (arg1
, 1));
10154 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10155 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10157 fold_convert_loc (loc
, type
, arg0
));
10161 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10162 any power of 2 minus 1. */
10163 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10164 && TREE_CODE (arg1
) == BIT_AND_EXPR
10165 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10166 TREE_OPERAND (arg1
, 0), 0))
10168 tree mask0
= TREE_OPERAND (arg0
, 1);
10169 tree mask1
= TREE_OPERAND (arg1
, 1);
10170 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10172 if (operand_equal_p (tem
, mask1
, 0))
10174 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10175 TREE_OPERAND (arg0
, 0), mask1
);
10176 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10181 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10182 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10183 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10185 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10186 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10187 (-ARG1 + ARG0) reduces to -ARG1. */
10188 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10189 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10191 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10192 __complex__ ( x, -y ). This is not the same for SNaNs or if
10193 signed zeros are involved. */
10194 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10195 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10196 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10198 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10199 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10200 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10201 bool arg0rz
= false, arg0iz
= false;
10202 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10203 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10205 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10206 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10207 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10209 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10211 : build1 (REALPART_EXPR
, rtype
, arg1
));
10212 tree ip
= arg0i
? arg0i
10213 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10214 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10216 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10218 tree rp
= arg0r
? arg0r
10219 : build1 (REALPART_EXPR
, rtype
, arg0
);
10220 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10222 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10223 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10228 /* Fold &x - &x. This can happen from &x.foo - &x.
10229 This is unsafe for certain floats even in non-IEEE formats.
10230 In IEEE, it is unsafe because it does wrong for NaNs.
10231 Also note that operand_equal_p is always false if an operand
10234 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10235 && operand_equal_p (arg0
, arg1
, 0))
10236 return fold_convert_loc (loc
, type
, integer_zero_node
);
10238 /* A - B -> A + (-B) if B is easily negatable. */
10239 if (negate_expr_p (arg1
)
10240 && ((FLOAT_TYPE_P (type
)
10241 /* Avoid this transformation if B is a positive REAL_CST. */
10242 && (TREE_CODE (arg1
) != REAL_CST
10243 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10244 || INTEGRAL_TYPE_P (type
)))
10245 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10246 fold_convert_loc (loc
, type
, arg0
),
10247 fold_convert_loc (loc
, type
,
10248 negate_expr (arg1
)));
10250 /* Try folding difference of addresses. */
10252 HOST_WIDE_INT diff
;
10254 if ((TREE_CODE (arg0
) == ADDR_EXPR
10255 || TREE_CODE (arg1
) == ADDR_EXPR
)
10256 && ptr_difference_const (arg0
, arg1
, &diff
))
10257 return build_int_cst_type (type
, diff
);
10260 /* Fold &a[i] - &a[j] to i-j. */
10261 if (TREE_CODE (arg0
) == ADDR_EXPR
10262 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10263 && TREE_CODE (arg1
) == ADDR_EXPR
10264 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10266 tree aref0
= TREE_OPERAND (arg0
, 0);
10267 tree aref1
= TREE_OPERAND (arg1
, 0);
10268 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
10269 TREE_OPERAND (aref1
, 0), 0))
10271 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
10272 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
10273 tree esz
= array_ref_element_size (aref0
);
10274 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
10275 return fold_build2_loc (loc
, MULT_EXPR
, type
, diff
,
10276 fold_convert_loc (loc
, type
, esz
));
10281 if (FLOAT_TYPE_P (type
)
10282 && flag_unsafe_math_optimizations
10283 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10284 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10285 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10288 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10289 same or one. Make sure type is not saturating.
10290 fold_plusminus_mult_expr will re-associate. */
10291 if ((TREE_CODE (arg0
) == MULT_EXPR
10292 || TREE_CODE (arg1
) == MULT_EXPR
)
10293 && !TYPE_SATURATING (type
)
10294 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10296 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10304 /* (-A) * (-B) -> A * B */
10305 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10306 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10307 fold_convert_loc (loc
, type
,
10308 TREE_OPERAND (arg0
, 0)),
10309 fold_convert_loc (loc
, type
,
10310 negate_expr (arg1
)));
10311 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10312 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10313 fold_convert_loc (loc
, type
,
10314 negate_expr (arg0
)),
10315 fold_convert_loc (loc
, type
,
10316 TREE_OPERAND (arg1
, 0)));
10318 if (! FLOAT_TYPE_P (type
))
10320 if (integer_zerop (arg1
))
10321 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10322 if (integer_onep (arg1
))
10323 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10324 /* Transform x * -1 into -x. Make sure to do the negation
10325 on the original operand with conversions not stripped
10326 because we can only strip non-sign-changing conversions. */
10327 if (integer_all_onesp (arg1
))
10328 return fold_convert_loc (loc
, type
, negate_expr (op0
));
10329 /* Transform x * -C into -x * C if x is easily negatable. */
10330 if (TREE_CODE (arg1
) == INTEGER_CST
10331 && tree_int_cst_sgn (arg1
) == -1
10332 && negate_expr_p (arg0
)
10333 && (tem
= negate_expr (arg1
)) != arg1
10334 && !TREE_OVERFLOW (tem
))
10335 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10336 fold_convert_loc (loc
, type
,
10337 negate_expr (arg0
)),
10340 /* (a * (1 << b)) is (a << b) */
10341 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10342 && integer_onep (TREE_OPERAND (arg1
, 0)))
10343 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10344 TREE_OPERAND (arg1
, 1));
10345 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10346 && integer_onep (TREE_OPERAND (arg0
, 0)))
10347 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10348 TREE_OPERAND (arg0
, 1));
10350 /* (A + A) * C -> A * 2 * C */
10351 if (TREE_CODE (arg0
) == PLUS_EXPR
10352 && TREE_CODE (arg1
) == INTEGER_CST
10353 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10354 TREE_OPERAND (arg0
, 1), 0))
10355 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10356 omit_one_operand_loc (loc
, type
,
10357 TREE_OPERAND (arg0
, 0),
10358 TREE_OPERAND (arg0
, 1)),
10359 fold_build2_loc (loc
, MULT_EXPR
, type
,
10360 build_int_cst (type
, 2) , arg1
));
10362 strict_overflow_p
= false;
10363 if (TREE_CODE (arg1
) == INTEGER_CST
10364 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10365 &strict_overflow_p
)))
10367 if (strict_overflow_p
)
10368 fold_overflow_warning (("assuming signed overflow does not "
10369 "occur when simplifying "
10371 WARN_STRICT_OVERFLOW_MISC
);
10372 return fold_convert_loc (loc
, type
, tem
);
10375 /* Optimize z * conj(z) for integer complex numbers. */
10376 if (TREE_CODE (arg0
) == CONJ_EXPR
10377 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10378 return fold_mult_zconjz (loc
, type
, arg1
);
10379 if (TREE_CODE (arg1
) == CONJ_EXPR
10380 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10381 return fold_mult_zconjz (loc
, type
, arg0
);
10385 /* Maybe fold x * 0 to 0. The expressions aren't the same
10386 when x is NaN, since x * 0 is also NaN. Nor are they the
10387 same in modes with signed zeros, since multiplying a
10388 negative value by 0 gives -0, not +0. */
10389 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10390 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10391 && real_zerop (arg1
))
10392 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10393 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10394 Likewise for complex arithmetic with signed zeros. */
10395 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10396 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10397 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10398 && real_onep (arg1
))
10399 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10401 /* Transform x * -1.0 into -x. */
10402 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10403 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10404 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10405 && real_minus_onep (arg1
))
10406 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
10408 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10409 the result for floating point types due to rounding so it is applied
10410 only if -fassociative-math was specify. */
10411 if (flag_associative_math
10412 && TREE_CODE (arg0
) == RDIV_EXPR
10413 && TREE_CODE (arg1
) == REAL_CST
10414 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10416 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10419 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10420 TREE_OPERAND (arg0
, 1));
10423 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10424 if (operand_equal_p (arg0
, arg1
, 0))
10426 tree tem
= fold_strip_sign_ops (arg0
);
10427 if (tem
!= NULL_TREE
)
10429 tem
= fold_convert_loc (loc
, type
, tem
);
10430 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10434 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10435 This is not the same for NaNs or if signed zeros are
10437 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10438 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10439 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10440 && TREE_CODE (arg1
) == COMPLEX_CST
10441 && real_zerop (TREE_REALPART (arg1
)))
10443 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10444 if (real_onep (TREE_IMAGPART (arg1
)))
10446 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10447 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10449 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10450 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10452 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10453 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10454 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10458 /* Optimize z * conj(z) for floating point complex numbers.
10459 Guarded by flag_unsafe_math_optimizations as non-finite
10460 imaginary components don't produce scalar results. */
10461 if (flag_unsafe_math_optimizations
10462 && TREE_CODE (arg0
) == CONJ_EXPR
10463 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10464 return fold_mult_zconjz (loc
, type
, arg1
);
10465 if (flag_unsafe_math_optimizations
10466 && TREE_CODE (arg1
) == CONJ_EXPR
10467 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10468 return fold_mult_zconjz (loc
, type
, arg0
);
10470 if (flag_unsafe_math_optimizations
)
10472 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10473 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10475 /* Optimizations of root(...)*root(...). */
10476 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10479 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10480 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10482 /* Optimize sqrt(x)*sqrt(x) as x. */
10483 if (BUILTIN_SQRT_P (fcode0
)
10484 && operand_equal_p (arg00
, arg10
, 0)
10485 && ! HONOR_SNANS (TYPE_MODE (type
)))
10488 /* Optimize root(x)*root(y) as root(x*y). */
10489 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10490 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10491 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10494 /* Optimize expN(x)*expN(y) as expN(x+y). */
10495 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10497 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10498 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10499 CALL_EXPR_ARG (arg0
, 0),
10500 CALL_EXPR_ARG (arg1
, 0));
10501 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10504 /* Optimizations of pow(...)*pow(...). */
10505 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10506 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10507 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10509 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10510 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10511 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10512 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10514 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10515 if (operand_equal_p (arg01
, arg11
, 0))
10517 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10518 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10520 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10523 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10524 if (operand_equal_p (arg00
, arg10
, 0))
10526 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10527 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10529 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10533 /* Optimize tan(x)*cos(x) as sin(x). */
10534 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10535 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10536 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10537 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10538 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10539 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10540 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10541 CALL_EXPR_ARG (arg1
, 0), 0))
10543 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10545 if (sinfn
!= NULL_TREE
)
10546 return build_call_expr_loc (loc
, sinfn
, 1,
10547 CALL_EXPR_ARG (arg0
, 0));
10550 /* Optimize x*pow(x,c) as pow(x,c+1). */
10551 if (fcode1
== BUILT_IN_POW
10552 || fcode1
== BUILT_IN_POWF
10553 || fcode1
== BUILT_IN_POWL
)
10555 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10556 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10557 if (TREE_CODE (arg11
) == REAL_CST
10558 && !TREE_OVERFLOW (arg11
)
10559 && operand_equal_p (arg0
, arg10
, 0))
10561 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10565 c
= TREE_REAL_CST (arg11
);
10566 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10567 arg
= build_real (type
, c
);
10568 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10572 /* Optimize pow(x,c)*x as pow(x,c+1). */
10573 if (fcode0
== BUILT_IN_POW
10574 || fcode0
== BUILT_IN_POWF
10575 || fcode0
== BUILT_IN_POWL
)
10577 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10578 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10579 if (TREE_CODE (arg01
) == REAL_CST
10580 && !TREE_OVERFLOW (arg01
)
10581 && operand_equal_p (arg1
, arg00
, 0))
10583 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10587 c
= TREE_REAL_CST (arg01
);
10588 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10589 arg
= build_real (type
, c
);
10590 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10594 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10595 if (optimize_function_for_speed_p (cfun
)
10596 && operand_equal_p (arg0
, arg1
, 0))
10598 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10602 tree arg
= build_real (type
, dconst2
);
10603 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10612 if (integer_all_onesp (arg1
))
10613 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10614 if (integer_zerop (arg1
))
10615 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10616 if (operand_equal_p (arg0
, arg1
, 0))
10617 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10619 /* ~X | X is -1. */
10620 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10621 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10623 t1
= fold_convert_loc (loc
, type
, integer_zero_node
);
10624 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10625 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10628 /* X | ~X is -1. */
10629 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10630 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10632 t1
= fold_convert_loc (loc
, type
, integer_zero_node
);
10633 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10634 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10637 /* Canonicalize (X & C1) | C2. */
10638 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10639 && TREE_CODE (arg1
) == INTEGER_CST
10640 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10642 unsigned HOST_WIDE_INT hi1
, lo1
, hi2
, lo2
, hi3
, lo3
, mlo
, mhi
;
10643 int width
= TYPE_PRECISION (type
), w
;
10644 hi1
= TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1));
10645 lo1
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
10646 hi2
= TREE_INT_CST_HIGH (arg1
);
10647 lo2
= TREE_INT_CST_LOW (arg1
);
10649 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10650 if ((hi1
& hi2
) == hi1
&& (lo1
& lo2
) == lo1
)
10651 return omit_one_operand_loc (loc
, type
, arg1
,
10652 TREE_OPERAND (arg0
, 0));
10654 if (width
> HOST_BITS_PER_WIDE_INT
)
10656 mhi
= (unsigned HOST_WIDE_INT
) -1
10657 >> (2 * HOST_BITS_PER_WIDE_INT
- width
);
10663 mlo
= (unsigned HOST_WIDE_INT
) -1
10664 >> (HOST_BITS_PER_WIDE_INT
- width
);
10667 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10668 if ((~(hi1
| hi2
) & mhi
) == 0 && (~(lo1
| lo2
) & mlo
) == 0)
10669 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10670 TREE_OPERAND (arg0
, 0), arg1
);
10672 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10673 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10674 mode which allows further optimizations. */
10681 for (w
= BITS_PER_UNIT
;
10682 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
10685 unsigned HOST_WIDE_INT mask
10686 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
10687 if (((lo1
| lo2
) & mask
) == mask
10688 && (lo1
& ~mask
) == 0 && hi1
== 0)
10695 if (hi3
!= hi1
|| lo3
!= lo1
)
10696 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10697 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10698 TREE_OPERAND (arg0
, 0),
10699 build_int_cst_wide (type
,
10704 /* (X & Y) | Y is (X, Y). */
10705 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10706 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10707 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
10708 /* (X & Y) | X is (Y, X). */
10709 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10710 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10711 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10712 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
10713 /* X | (X & Y) is (Y, X). */
10714 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10715 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10716 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10717 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
10718 /* X | (Y & X) is (Y, X). */
10719 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10720 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10721 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10722 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
10724 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
10725 if (t1
!= NULL_TREE
)
10728 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10730 This results in more efficient code for machines without a NAND
10731 instruction. Combine will canonicalize to the first form
10732 which will allow use of NAND instructions provided by the
10733 backend if they exist. */
10734 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10735 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10738 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10739 build2 (BIT_AND_EXPR
, type
,
10740 fold_convert_loc (loc
, type
,
10741 TREE_OPERAND (arg0
, 0)),
10742 fold_convert_loc (loc
, type
,
10743 TREE_OPERAND (arg1
, 0))));
10746 /* See if this can be simplified into a rotate first. If that
10747 is unsuccessful continue in the association code. */
10751 if (integer_zerop (arg1
))
10752 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10753 if (integer_all_onesp (arg1
))
10754 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
10755 if (operand_equal_p (arg0
, arg1
, 0))
10756 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10758 /* ~X ^ X is -1. */
10759 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10760 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10762 t1
= fold_convert_loc (loc
, type
, integer_zero_node
);
10763 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10764 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10767 /* X ^ ~X is -1. */
10768 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10769 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10771 t1
= fold_convert_loc (loc
, type
, integer_zero_node
);
10772 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10773 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10776 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10777 with a constant, and the two constants have no bits in common,
10778 we should treat this as a BIT_IOR_EXPR since this may produce more
10779 simplifications. */
10780 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10781 && TREE_CODE (arg1
) == BIT_AND_EXPR
10782 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10783 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10784 && integer_zerop (const_binop (BIT_AND_EXPR
,
10785 TREE_OPERAND (arg0
, 1),
10786 TREE_OPERAND (arg1
, 1))))
10788 code
= BIT_IOR_EXPR
;
10792 /* (X | Y) ^ X -> Y & ~ X*/
10793 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10794 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10796 tree t2
= TREE_OPERAND (arg0
, 1);
10797 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10799 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10800 fold_convert_loc (loc
, type
, t2
),
10801 fold_convert_loc (loc
, type
, t1
));
10805 /* (Y | X) ^ X -> Y & ~ X*/
10806 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10807 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10809 tree t2
= TREE_OPERAND (arg0
, 0);
10810 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10812 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10813 fold_convert_loc (loc
, type
, t2
),
10814 fold_convert_loc (loc
, type
, t1
));
10818 /* X ^ (X | Y) -> Y & ~ X*/
10819 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10820 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
10822 tree t2
= TREE_OPERAND (arg1
, 1);
10823 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10825 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10826 fold_convert_loc (loc
, type
, t2
),
10827 fold_convert_loc (loc
, type
, t1
));
10831 /* X ^ (Y | X) -> Y & ~ X*/
10832 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10833 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
10835 tree t2
= TREE_OPERAND (arg1
, 0);
10836 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10838 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10839 fold_convert_loc (loc
, type
, t2
),
10840 fold_convert_loc (loc
, type
, t1
));
10844 /* Convert ~X ^ ~Y to X ^ Y. */
10845 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10846 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10847 return fold_build2_loc (loc
, code
, type
,
10848 fold_convert_loc (loc
, type
,
10849 TREE_OPERAND (arg0
, 0)),
10850 fold_convert_loc (loc
, type
,
10851 TREE_OPERAND (arg1
, 0)));
10853 /* Convert ~X ^ C to X ^ ~C. */
10854 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10855 && TREE_CODE (arg1
) == INTEGER_CST
)
10856 return fold_build2_loc (loc
, code
, type
,
10857 fold_convert_loc (loc
, type
,
10858 TREE_OPERAND (arg0
, 0)),
10859 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
10861 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10862 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10863 && integer_onep (TREE_OPERAND (arg0
, 1))
10864 && integer_onep (arg1
))
10865 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
10866 build_int_cst (TREE_TYPE (arg0
), 0));
10868 /* Fold (X & Y) ^ Y as ~X & Y. */
10869 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10870 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10872 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10873 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10874 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10875 fold_convert_loc (loc
, type
, arg1
));
10877 /* Fold (X & Y) ^ X as ~Y & X. */
10878 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10879 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10880 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10882 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10883 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10884 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10885 fold_convert_loc (loc
, type
, arg1
));
10887 /* Fold X ^ (X & Y) as X & ~Y. */
10888 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10889 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10891 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10892 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10893 fold_convert_loc (loc
, type
, arg0
),
10894 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
10896 /* Fold X ^ (Y & X) as ~Y & X. */
10897 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10898 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10899 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10901 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10902 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10903 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10904 fold_convert_loc (loc
, type
, arg0
));
10907 /* See if this can be simplified into a rotate first. If that
10908 is unsuccessful continue in the association code. */
10912 if (integer_all_onesp (arg1
))
10913 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10914 if (integer_zerop (arg1
))
10915 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10916 if (operand_equal_p (arg0
, arg1
, 0))
10917 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10919 /* ~X & X is always zero. */
10920 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10921 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10922 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
10924 /* X & ~X is always zero. */
10925 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10926 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10927 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10929 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10930 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10931 && TREE_CODE (arg1
) == INTEGER_CST
10932 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10934 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
10935 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10936 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10937 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
10938 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
10940 fold_convert_loc (loc
, type
,
10941 fold_build2_loc (loc
, BIT_IOR_EXPR
,
10942 type
, tmp2
, tmp3
));
10945 /* (X | Y) & Y is (X, Y). */
10946 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10947 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10948 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
10949 /* (X | Y) & X is (Y, X). */
10950 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10951 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10952 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10953 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
10954 /* X & (X | Y) is (Y, X). */
10955 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10956 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10957 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10958 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
10959 /* X & (Y | X) is (Y, X). */
10960 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10961 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10962 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10963 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
10965 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10966 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10967 && integer_onep (TREE_OPERAND (arg0
, 1))
10968 && integer_onep (arg1
))
10970 tem
= TREE_OPERAND (arg0
, 0);
10971 return fold_build2_loc (loc
, EQ_EXPR
, type
,
10972 fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
10973 build_int_cst (TREE_TYPE (tem
), 1)),
10974 build_int_cst (TREE_TYPE (tem
), 0));
10976 /* Fold ~X & 1 as (X & 1) == 0. */
10977 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10978 && integer_onep (arg1
))
10980 tem
= TREE_OPERAND (arg0
, 0);
10981 return fold_build2_loc (loc
, EQ_EXPR
, type
,
10982 fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
10983 build_int_cst (TREE_TYPE (tem
), 1)),
10984 build_int_cst (TREE_TYPE (tem
), 0));
10987 /* Fold (X ^ Y) & Y as ~X & Y. */
10988 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10989 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10991 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10992 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10993 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10994 fold_convert_loc (loc
, type
, arg1
));
10996 /* Fold (X ^ Y) & X as ~Y & X. */
10997 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10998 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10999 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11001 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11002 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11003 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11004 fold_convert_loc (loc
, type
, arg1
));
11006 /* Fold X & (X ^ Y) as X & ~Y. */
11007 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11008 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11010 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11011 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11012 fold_convert_loc (loc
, type
, arg0
),
11013 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11015 /* Fold X & (Y ^ X) as ~Y & X. */
11016 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11017 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11018 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11020 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11021 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11022 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11023 fold_convert_loc (loc
, type
, arg0
));
11026 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11027 if (t1
!= NULL_TREE
)
11029 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11030 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11031 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11034 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11036 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
11037 && (~TREE_INT_CST_LOW (arg1
)
11038 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
11040 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11043 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11045 This results in more efficient code for machines without a NOR
11046 instruction. Combine will canonicalize to the first form
11047 which will allow use of NOR instructions provided by the
11048 backend if they exist. */
11049 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11050 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11052 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11053 build2 (BIT_IOR_EXPR
, type
,
11054 fold_convert_loc (loc
, type
,
11055 TREE_OPERAND (arg0
, 0)),
11056 fold_convert_loc (loc
, type
,
11057 TREE_OPERAND (arg1
, 0))));
11060 /* If arg0 is derived from the address of an object or function, we may
11061 be able to fold this expression using the object or function's
11063 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
11065 unsigned HOST_WIDE_INT modulus
, residue
;
11066 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
11068 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11069 integer_onep (arg1
));
11071 /* This works because modulus is a power of 2. If this weren't the
11072 case, we'd have to replace it by its greatest power-of-2
11073 divisor: modulus & -modulus. */
11075 return build_int_cst (type
, residue
& low
);
11078 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11079 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11080 if the new mask might be further optimized. */
11081 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11082 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11083 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
11084 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
11085 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
11086 < TYPE_PRECISION (TREE_TYPE (arg0
))
11087 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11088 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
11090 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
11091 unsigned HOST_WIDE_INT mask
11092 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11093 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11094 tree shift_type
= TREE_TYPE (arg0
);
11096 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11097 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11098 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11099 && TYPE_PRECISION (TREE_TYPE (arg0
))
11100 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
11102 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11103 tree arg00
= TREE_OPERAND (arg0
, 0);
11104 /* See if more bits can be proven as zero because of
11106 if (TREE_CODE (arg00
) == NOP_EXPR
11107 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11109 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11110 if (TYPE_PRECISION (inner_type
)
11111 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
11112 && TYPE_PRECISION (inner_type
) < prec
)
11114 prec
= TYPE_PRECISION (inner_type
);
11115 /* See if we can shorten the right shift. */
11117 shift_type
= inner_type
;
11120 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11121 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11122 zerobits
<<= prec
- shiftc
;
11123 /* For arithmetic shift if sign bit could be set, zerobits
11124 can contain actually sign bits, so no transformation is
11125 possible, unless MASK masks them all away. In that
11126 case the shift needs to be converted into logical shift. */
11127 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11128 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11130 if ((mask
& zerobits
) == 0)
11131 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11137 /* ((X << 16) & 0xff00) is (X, 0). */
11138 if ((mask
& zerobits
) == mask
)
11139 return omit_one_operand_loc (loc
, type
,
11140 build_int_cst (type
, 0), arg0
);
11142 newmask
= mask
| zerobits
;
11143 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11147 /* Only do the transformation if NEWMASK is some integer
11149 for (prec
= BITS_PER_UNIT
;
11150 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11151 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11153 if (prec
< HOST_BITS_PER_WIDE_INT
11154 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11158 if (shift_type
!= TREE_TYPE (arg0
))
11160 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11161 fold_convert_loc (loc
, shift_type
,
11162 TREE_OPERAND (arg0
, 0)),
11163 TREE_OPERAND (arg0
, 1));
11164 tem
= fold_convert_loc (loc
, type
, tem
);
11168 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11169 if (!tree_int_cst_equal (newmaskt
, arg1
))
11170 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11178 /* Don't touch a floating-point divide by zero unless the mode
11179 of the constant can represent infinity. */
11180 if (TREE_CODE (arg1
) == REAL_CST
11181 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11182 && real_zerop (arg1
))
11185 /* Optimize A / A to 1.0 if we don't care about
11186 NaNs or Infinities. Skip the transformation
11187 for non-real operands. */
11188 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11189 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11190 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11191 && operand_equal_p (arg0
, arg1
, 0))
11193 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11195 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11198 /* The complex version of the above A / A optimization. */
11199 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11200 && operand_equal_p (arg0
, arg1
, 0))
11202 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11203 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11204 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11206 tree r
= build_real (elem_type
, dconst1
);
11207 /* omit_two_operands will call fold_convert for us. */
11208 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11212 /* (-A) / (-B) -> A / B */
11213 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11214 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11215 TREE_OPERAND (arg0
, 0),
11216 negate_expr (arg1
));
11217 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11218 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11219 negate_expr (arg0
),
11220 TREE_OPERAND (arg1
, 0));
11222 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11223 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11224 && real_onep (arg1
))
11225 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11227 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11228 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11229 && real_minus_onep (arg1
))
11230 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
11231 negate_expr (arg0
)));
11233 /* If ARG1 is a constant, we can convert this to a multiply by the
11234 reciprocal. This does not have the same rounding properties,
11235 so only do this if -freciprocal-math. We can actually
11236 always safely do it if ARG1 is a power of two, but it's hard to
11237 tell if it is or not in a portable manner. */
11238 if (TREE_CODE (arg1
) == REAL_CST
)
11240 if (flag_reciprocal_math
11241 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
11243 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
11244 /* Find the reciprocal if optimizing and the result is exact. */
11248 r
= TREE_REAL_CST (arg1
);
11249 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
11251 tem
= build_real (type
, r
);
11252 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11253 fold_convert_loc (loc
, type
, arg0
), tem
);
11257 /* Convert A/B/C to A/(B*C). */
11258 if (flag_reciprocal_math
11259 && TREE_CODE (arg0
) == RDIV_EXPR
)
11260 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11261 fold_build2_loc (loc
, MULT_EXPR
, type
,
11262 TREE_OPERAND (arg0
, 1), arg1
));
11264 /* Convert A/(B/C) to (A/B)*C. */
11265 if (flag_reciprocal_math
11266 && TREE_CODE (arg1
) == RDIV_EXPR
)
11267 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11268 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11269 TREE_OPERAND (arg1
, 0)),
11270 TREE_OPERAND (arg1
, 1));
11272 /* Convert C1/(X*C2) into (C1/C2)/X. */
11273 if (flag_reciprocal_math
11274 && TREE_CODE (arg1
) == MULT_EXPR
11275 && TREE_CODE (arg0
) == REAL_CST
11276 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11278 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11279 TREE_OPERAND (arg1
, 1));
11281 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11282 TREE_OPERAND (arg1
, 0));
11285 if (flag_unsafe_math_optimizations
)
11287 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11288 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11290 /* Optimize sin(x)/cos(x) as tan(x). */
11291 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11292 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11293 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11294 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11295 CALL_EXPR_ARG (arg1
, 0), 0))
11297 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11299 if (tanfn
!= NULL_TREE
)
11300 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11303 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11304 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11305 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11306 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11307 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11308 CALL_EXPR_ARG (arg1
, 0), 0))
11310 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11312 if (tanfn
!= NULL_TREE
)
11314 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11315 CALL_EXPR_ARG (arg0
, 0));
11316 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11317 build_real (type
, dconst1
), tmp
);
11321 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11322 NaNs or Infinities. */
11323 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11324 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11325 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11327 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11328 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11330 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11331 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11332 && operand_equal_p (arg00
, arg01
, 0))
11334 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11336 if (cosfn
!= NULL_TREE
)
11337 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11341 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11342 NaNs or Infinities. */
11343 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11344 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11345 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11347 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11348 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11350 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11351 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11352 && operand_equal_p (arg00
, arg01
, 0))
11354 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11356 if (cosfn
!= NULL_TREE
)
11358 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11359 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11360 build_real (type
, dconst1
),
11366 /* Optimize pow(x,c)/x as pow(x,c-1). */
11367 if (fcode0
== BUILT_IN_POW
11368 || fcode0
== BUILT_IN_POWF
11369 || fcode0
== BUILT_IN_POWL
)
11371 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11372 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11373 if (TREE_CODE (arg01
) == REAL_CST
11374 && !TREE_OVERFLOW (arg01
)
11375 && operand_equal_p (arg1
, arg00
, 0))
11377 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11381 c
= TREE_REAL_CST (arg01
);
11382 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11383 arg
= build_real (type
, c
);
11384 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11388 /* Optimize a/root(b/c) into a*root(c/b). */
11389 if (BUILTIN_ROOT_P (fcode1
))
11391 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11393 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11395 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11396 tree b
= TREE_OPERAND (rootarg
, 0);
11397 tree c
= TREE_OPERAND (rootarg
, 1);
11399 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
11401 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
11402 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
11406 /* Optimize x/expN(y) into x*expN(-y). */
11407 if (BUILTIN_EXPONENT_P (fcode1
))
11409 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11410 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11411 arg1
= build_call_expr_loc (loc
,
11413 fold_convert_loc (loc
, type
, arg
));
11414 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11417 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11418 if (fcode1
== BUILT_IN_POW
11419 || fcode1
== BUILT_IN_POWF
11420 || fcode1
== BUILT_IN_POWL
)
11422 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11423 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11424 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11425 tree neg11
= fold_convert_loc (loc
, type
,
11426 negate_expr (arg11
));
11427 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
11428 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11433 case TRUNC_DIV_EXPR
:
11434 case FLOOR_DIV_EXPR
:
11435 /* Simplify A / (B << N) where A and B are positive and B is
11436 a power of 2, to A >> (N + log2(B)). */
11437 strict_overflow_p
= false;
11438 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11439 && (TYPE_UNSIGNED (type
)
11440 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11442 tree sval
= TREE_OPERAND (arg1
, 0);
11443 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11445 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11446 unsigned long pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
11448 if (strict_overflow_p
)
11449 fold_overflow_warning (("assuming signed overflow does not "
11450 "occur when simplifying A / (B << N)"),
11451 WARN_STRICT_OVERFLOW_MISC
);
11453 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11454 sh_cnt
, build_int_cst (NULL_TREE
, pow2
));
11455 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11456 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11460 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11461 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11462 if (INTEGRAL_TYPE_P (type
)
11463 && TYPE_UNSIGNED (type
)
11464 && code
== FLOOR_DIV_EXPR
)
11465 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
11469 case ROUND_DIV_EXPR
:
11470 case CEIL_DIV_EXPR
:
11471 case EXACT_DIV_EXPR
:
11472 if (integer_onep (arg1
))
11473 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11474 if (integer_zerop (arg1
))
11476 /* X / -1 is -X. */
11477 if (!TYPE_UNSIGNED (type
)
11478 && TREE_CODE (arg1
) == INTEGER_CST
11479 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11480 && TREE_INT_CST_HIGH (arg1
) == -1)
11481 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
11483 /* Convert -A / -B to A / B when the type is signed and overflow is
11485 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11486 && TREE_CODE (arg0
) == NEGATE_EXPR
11487 && negate_expr_p (arg1
))
11489 if (INTEGRAL_TYPE_P (type
))
11490 fold_overflow_warning (("assuming signed overflow does not occur "
11491 "when distributing negation across "
11493 WARN_STRICT_OVERFLOW_MISC
);
11494 return fold_build2_loc (loc
, code
, type
,
11495 fold_convert_loc (loc
, type
,
11496 TREE_OPERAND (arg0
, 0)),
11497 fold_convert_loc (loc
, type
,
11498 negate_expr (arg1
)));
11500 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11501 && TREE_CODE (arg1
) == NEGATE_EXPR
11502 && negate_expr_p (arg0
))
11504 if (INTEGRAL_TYPE_P (type
))
11505 fold_overflow_warning (("assuming signed overflow does not occur "
11506 "when distributing negation across "
11508 WARN_STRICT_OVERFLOW_MISC
);
11509 return fold_build2_loc (loc
, code
, type
,
11510 fold_convert_loc (loc
, type
,
11511 negate_expr (arg0
)),
11512 fold_convert_loc (loc
, type
,
11513 TREE_OPERAND (arg1
, 0)));
11516 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11517 operation, EXACT_DIV_EXPR.
11519 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11520 At one time others generated faster code, it's not clear if they do
11521 after the last round to changes to the DIV code in expmed.c. */
11522 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11523 && multiple_of_p (type
, arg0
, arg1
))
11524 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11526 strict_overflow_p
= false;
11527 if (TREE_CODE (arg1
) == INTEGER_CST
11528 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11529 &strict_overflow_p
)))
11531 if (strict_overflow_p
)
11532 fold_overflow_warning (("assuming signed overflow does not occur "
11533 "when simplifying division"),
11534 WARN_STRICT_OVERFLOW_MISC
);
11535 return fold_convert_loc (loc
, type
, tem
);
11540 case CEIL_MOD_EXPR
:
11541 case FLOOR_MOD_EXPR
:
11542 case ROUND_MOD_EXPR
:
11543 case TRUNC_MOD_EXPR
:
11544 /* X % 1 is always zero, but be sure to preserve any side
11546 if (integer_onep (arg1
))
11547 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11549 /* X % 0, return X % 0 unchanged so that we can get the
11550 proper warnings and errors. */
11551 if (integer_zerop (arg1
))
11554 /* 0 % X is always zero, but be sure to preserve any side
11555 effects in X. Place this after checking for X == 0. */
11556 if (integer_zerop (arg0
))
11557 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11559 /* X % -1 is zero. */
11560 if (!TYPE_UNSIGNED (type
)
11561 && TREE_CODE (arg1
) == INTEGER_CST
11562 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11563 && TREE_INT_CST_HIGH (arg1
) == -1)
11564 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11566 /* X % -C is the same as X % C. */
11567 if (code
== TRUNC_MOD_EXPR
11568 && !TYPE_UNSIGNED (type
)
11569 && TREE_CODE (arg1
) == INTEGER_CST
11570 && !TREE_OVERFLOW (arg1
)
11571 && TREE_INT_CST_HIGH (arg1
) < 0
11572 && !TYPE_OVERFLOW_TRAPS (type
)
11573 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11574 && !sign_bit_p (arg1
, arg1
))
11575 return fold_build2_loc (loc
, code
, type
,
11576 fold_convert_loc (loc
, type
, arg0
),
11577 fold_convert_loc (loc
, type
,
11578 negate_expr (arg1
)));
11580 /* X % -Y is the same as X % Y. */
11581 if (code
== TRUNC_MOD_EXPR
11582 && !TYPE_UNSIGNED (type
)
11583 && TREE_CODE (arg1
) == NEGATE_EXPR
11584 && !TYPE_OVERFLOW_TRAPS (type
))
11585 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
11586 fold_convert_loc (loc
, type
,
11587 TREE_OPERAND (arg1
, 0)));
11589 strict_overflow_p
= false;
11590 if (TREE_CODE (arg1
) == INTEGER_CST
11591 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11592 &strict_overflow_p
)))
11594 if (strict_overflow_p
)
11595 fold_overflow_warning (("assuming signed overflow does not occur "
11596 "when simplifying modulus"),
11597 WARN_STRICT_OVERFLOW_MISC
);
11598 return fold_convert_loc (loc
, type
, tem
);
11601 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11602 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11603 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
11604 && (TYPE_UNSIGNED (type
)
11605 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11608 /* Also optimize A % (C << N) where C is a power of 2,
11609 to A & ((C << N) - 1). */
11610 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
11611 c
= TREE_OPERAND (arg1
, 0);
11613 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
11616 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
11617 build_int_cst (TREE_TYPE (arg1
), 1));
11618 if (strict_overflow_p
)
11619 fold_overflow_warning (("assuming signed overflow does not "
11620 "occur when simplifying "
11621 "X % (power of two)"),
11622 WARN_STRICT_OVERFLOW_MISC
);
11623 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11624 fold_convert_loc (loc
, type
, arg0
),
11625 fold_convert_loc (loc
, type
, mask
));
11633 if (integer_all_onesp (arg0
))
11634 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11638 /* Optimize -1 >> x for arithmetic right shifts. */
11639 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
11640 && tree_expr_nonnegative_p (arg1
))
11641 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11642 /* ... fall through ... */
11646 if (integer_zerop (arg1
))
11647 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11648 if (integer_zerop (arg0
))
11649 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11651 /* Since negative shift count is not well-defined,
11652 don't try to compute it in the compiler. */
11653 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11656 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11657 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
11658 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11659 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11660 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11662 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
11663 + TREE_INT_CST_LOW (arg1
));
11665 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11666 being well defined. */
11667 if (low
>= TYPE_PRECISION (type
))
11669 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
11670 low
= low
% TYPE_PRECISION (type
);
11671 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
11672 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 0),
11673 TREE_OPERAND (arg0
, 0));
11675 low
= TYPE_PRECISION (type
) - 1;
11678 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
11679 build_int_cst (type
, low
));
11682 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11683 into x & ((unsigned)-1 >> c) for unsigned types. */
11684 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
11685 || (TYPE_UNSIGNED (type
)
11686 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
11687 && host_integerp (arg1
, false)
11688 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11689 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11690 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11692 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
11693 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
11699 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11701 lshift
= build_int_cst (type
, -1);
11702 lshift
= int_const_binop (code
, lshift
, arg1
, 0);
11704 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
11708 /* Rewrite an LROTATE_EXPR by a constant into an
11709 RROTATE_EXPR by a new constant. */
11710 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
11712 tree tem
= build_int_cst (TREE_TYPE (arg1
),
11713 TYPE_PRECISION (type
));
11714 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
11715 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
11718 /* If we have a rotate of a bit operation with the rotate count and
11719 the second operand of the bit operation both constant,
11720 permute the two operations. */
11721 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11722 && (TREE_CODE (arg0
) == BIT_AND_EXPR
11723 || TREE_CODE (arg0
) == BIT_IOR_EXPR
11724 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11725 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11726 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
11727 fold_build2_loc (loc
, code
, type
,
11728 TREE_OPERAND (arg0
, 0), arg1
),
11729 fold_build2_loc (loc
, code
, type
,
11730 TREE_OPERAND (arg0
, 1), arg1
));
11732 /* Two consecutive rotates adding up to the precision of the
11733 type can be ignored. */
11734 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11735 && TREE_CODE (arg0
) == RROTATE_EXPR
11736 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11737 && TREE_INT_CST_HIGH (arg1
) == 0
11738 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
11739 && ((TREE_INT_CST_LOW (arg1
)
11740 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
11741 == (unsigned int) TYPE_PRECISION (type
)))
11742 return TREE_OPERAND (arg0
, 0);
11744 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11745 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11746 if the latter can be further optimized. */
11747 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
11748 && TREE_CODE (arg0
) == BIT_AND_EXPR
11749 && TREE_CODE (arg1
) == INTEGER_CST
11750 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11752 tree mask
= fold_build2_loc (loc
, code
, type
,
11753 fold_convert_loc (loc
, type
,
11754 TREE_OPERAND (arg0
, 1)),
11756 tree shift
= fold_build2_loc (loc
, code
, type
,
11757 fold_convert_loc (loc
, type
,
11758 TREE_OPERAND (arg0
, 0)),
11760 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
11768 if (operand_equal_p (arg0
, arg1
, 0))
11769 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11770 if (INTEGRAL_TYPE_P (type
)
11771 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
11772 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11773 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
11779 if (operand_equal_p (arg0
, arg1
, 0))
11780 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11781 if (INTEGRAL_TYPE_P (type
)
11782 && TYPE_MAX_VALUE (type
)
11783 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
11784 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11785 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
11790 case TRUTH_ANDIF_EXPR
:
11791 /* Note that the operands of this must be ints
11792 and their values must be 0 or 1.
11793 ("true" is a fixed value perhaps depending on the language.) */
11794 /* If first arg is constant zero, return it. */
11795 if (integer_zerop (arg0
))
11796 return fold_convert_loc (loc
, type
, arg0
);
11797 case TRUTH_AND_EXPR
:
11798 /* If either arg is constant true, drop it. */
11799 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11800 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
11801 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
11802 /* Preserve sequence points. */
11803 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11804 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11805 /* If second arg is constant zero, result is zero, but first arg
11806 must be evaluated. */
11807 if (integer_zerop (arg1
))
11808 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11809 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11810 case will be handled here. */
11811 if (integer_zerop (arg0
))
11812 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11814 /* !X && X is always false. */
11815 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11816 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11817 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11818 /* X && !X is always false. */
11819 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11820 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11821 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11823 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11824 means A >= Y && A != MAX, but in this case we know that
11827 if (!TREE_SIDE_EFFECTS (arg0
)
11828 && !TREE_SIDE_EFFECTS (arg1
))
11830 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
11831 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
11832 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
11834 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
11835 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
11836 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
11840 /* We only do these simplifications if we are optimizing. */
11844 /* Check for things like (A || B) && (A || C). We can convert this
11845 to A || (B && C). Note that either operator can be any of the four
11846 truth and/or operations and the transformation will still be
11847 valid. Also note that we only care about order for the
11848 ANDIF and ORIF operators. If B contains side effects, this
11849 might change the truth-value of A. */
11850 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
11851 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
11852 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
11853 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
11854 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
11855 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
11857 tree a00
= TREE_OPERAND (arg0
, 0);
11858 tree a01
= TREE_OPERAND (arg0
, 1);
11859 tree a10
= TREE_OPERAND (arg1
, 0);
11860 tree a11
= TREE_OPERAND (arg1
, 1);
11861 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
11862 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
11863 && (code
== TRUTH_AND_EXPR
11864 || code
== TRUTH_OR_EXPR
));
11866 if (operand_equal_p (a00
, a10
, 0))
11867 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
11868 fold_build2_loc (loc
, code
, type
, a01
, a11
));
11869 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
11870 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
11871 fold_build2_loc (loc
, code
, type
, a01
, a10
));
11872 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
11873 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
11874 fold_build2_loc (loc
, code
, type
, a00
, a11
));
11876 /* This case if tricky because we must either have commutative
11877 operators or else A10 must not have side-effects. */
11879 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
11880 && operand_equal_p (a01
, a11
, 0))
11881 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
11882 fold_build2_loc (loc
, code
, type
, a00
, a10
),
11886 /* See if we can build a range comparison. */
11887 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
11890 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
11891 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
11893 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
11895 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
11898 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
11899 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
11901 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
11903 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
11906 /* Check for the possibility of merging component references. If our
11907 lhs is another similar operation, try to merge its rhs with our
11908 rhs. Then try to merge our lhs and rhs. */
11909 if (TREE_CODE (arg0
) == code
11910 && 0 != (tem
= fold_truthop (loc
, code
, type
,
11911 TREE_OPERAND (arg0
, 1), arg1
)))
11912 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
11914 if ((tem
= fold_truthop (loc
, code
, type
, arg0
, arg1
)) != 0)
11919 case TRUTH_ORIF_EXPR
:
11920 /* Note that the operands of this must be ints
11921 and their values must be 0 or true.
11922 ("true" is a fixed value perhaps depending on the language.) */
11923 /* If first arg is constant true, return it. */
11924 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11925 return fold_convert_loc (loc
, type
, arg0
);
11926 case TRUTH_OR_EXPR
:
11927 /* If either arg is constant zero, drop it. */
11928 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
11929 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
11930 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
11931 /* Preserve sequence points. */
11932 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11933 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11934 /* If second arg is constant true, result is true, but we must
11935 evaluate first arg. */
11936 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
11937 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11938 /* Likewise for first arg, but note this only occurs here for
11940 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11941 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11943 /* !X || X is always true. */
11944 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11945 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11946 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
11947 /* X || !X is always true. */
11948 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11949 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11950 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
11954 case TRUTH_XOR_EXPR
:
11955 /* If the second arg is constant zero, drop it. */
11956 if (integer_zerop (arg1
))
11957 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11958 /* If the second arg is constant true, this is a logical inversion. */
11959 if (integer_onep (arg1
))
11961 /* Only call invert_truthvalue if operand is a truth value. */
11962 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
11963 tem
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
11965 tem
= invert_truthvalue_loc (loc
, arg0
);
11966 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
11968 /* Identical arguments cancel to zero. */
11969 if (operand_equal_p (arg0
, arg1
, 0))
11970 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11972 /* !X ^ X is always true. */
11973 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11974 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11975 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
11977 /* X ^ !X is always true. */
11978 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11979 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11980 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
11986 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
11987 if (tem
!= NULL_TREE
)
11990 /* bool_var != 0 becomes bool_var. */
11991 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
11992 && code
== NE_EXPR
)
11993 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11995 /* bool_var == 1 becomes bool_var. */
11996 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
11997 && code
== EQ_EXPR
)
11998 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12000 /* bool_var != 1 becomes !bool_var. */
12001 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12002 && code
== NE_EXPR
)
12003 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
12004 fold_convert_loc (loc
, type
, arg0
));
12006 /* bool_var == 0 becomes !bool_var. */
12007 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12008 && code
== EQ_EXPR
)
12009 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
12010 fold_convert_loc (loc
, type
, arg0
));
12012 /* !exp != 0 becomes !exp */
12013 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12014 && code
== NE_EXPR
)
12015 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12017 /* If this is an equality comparison of the address of two non-weak,
12018 unaliased symbols neither of which are extern (since we do not
12019 have access to attributes for externs), then we know the result. */
12020 if (TREE_CODE (arg0
) == ADDR_EXPR
12021 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12022 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12023 && ! lookup_attribute ("alias",
12024 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12025 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12026 && TREE_CODE (arg1
) == ADDR_EXPR
12027 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12028 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12029 && ! lookup_attribute ("alias",
12030 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12031 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12033 /* We know that we're looking at the address of two
12034 non-weak, unaliased, static _DECL nodes.
12036 It is both wasteful and incorrect to call operand_equal_p
12037 to compare the two ADDR_EXPR nodes. It is wasteful in that
12038 all we need to do is test pointer equality for the arguments
12039 to the two ADDR_EXPR nodes. It is incorrect to use
12040 operand_equal_p as that function is NOT equivalent to a
12041 C equality test. It can in fact return false for two
12042 objects which would test as equal using the C equality
12044 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12045 return constant_boolean_node (equal
12046 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12050 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12051 a MINUS_EXPR of a constant, we can convert it into a comparison with
12052 a revised constant as long as no overflow occurs. */
12053 if (TREE_CODE (arg1
) == INTEGER_CST
12054 && (TREE_CODE (arg0
) == PLUS_EXPR
12055 || TREE_CODE (arg0
) == MINUS_EXPR
)
12056 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12057 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
12058 ? MINUS_EXPR
: PLUS_EXPR
,
12059 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12061 TREE_OPERAND (arg0
, 1)))
12062 && !TREE_OVERFLOW (tem
))
12063 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12065 /* Similarly for a NEGATE_EXPR. */
12066 if (TREE_CODE (arg0
) == NEGATE_EXPR
12067 && TREE_CODE (arg1
) == INTEGER_CST
12068 && 0 != (tem
= negate_expr (arg1
))
12069 && TREE_CODE (tem
) == INTEGER_CST
12070 && !TREE_OVERFLOW (tem
))
12071 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12073 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12074 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12075 && TREE_CODE (arg1
) == INTEGER_CST
12076 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12077 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12078 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12079 fold_convert_loc (loc
,
12082 TREE_OPERAND (arg0
, 1)));
12084 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12085 if ((TREE_CODE (arg0
) == PLUS_EXPR
12086 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12087 || TREE_CODE (arg0
) == MINUS_EXPR
)
12088 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12089 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12090 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12092 tree val
= TREE_OPERAND (arg0
, 1);
12093 return omit_two_operands_loc (loc
, type
,
12094 fold_build2_loc (loc
, code
, type
,
12096 build_int_cst (TREE_TYPE (val
),
12098 TREE_OPERAND (arg0
, 0), arg1
);
12101 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12102 if (TREE_CODE (arg0
) == MINUS_EXPR
12103 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12104 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0)
12105 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
12107 return omit_two_operands_loc (loc
, type
,
12109 ? boolean_true_node
: boolean_false_node
,
12110 TREE_OPERAND (arg0
, 1), arg1
);
12113 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12114 for !=. Don't do this for ordered comparisons due to overflow. */
12115 if (TREE_CODE (arg0
) == MINUS_EXPR
12116 && integer_zerop (arg1
))
12117 return fold_build2_loc (loc
, code
, type
,
12118 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
12120 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12121 if (TREE_CODE (arg0
) == ABS_EXPR
12122 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12123 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12125 /* If this is an EQ or NE comparison with zero and ARG0 is
12126 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12127 two operations, but the latter can be done in one less insn
12128 on machines that have only two-operand insns or on which a
12129 constant cannot be the first operand. */
12130 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12131 && integer_zerop (arg1
))
12133 tree arg00
= TREE_OPERAND (arg0
, 0);
12134 tree arg01
= TREE_OPERAND (arg0
, 1);
12135 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12136 && integer_onep (TREE_OPERAND (arg00
, 0)))
12138 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12139 arg01
, TREE_OPERAND (arg00
, 1));
12140 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12141 build_int_cst (TREE_TYPE (arg0
), 1));
12142 return fold_build2_loc (loc
, code
, type
,
12143 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12146 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12147 && integer_onep (TREE_OPERAND (arg01
, 0)))
12149 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12150 arg00
, TREE_OPERAND (arg01
, 1));
12151 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12152 build_int_cst (TREE_TYPE (arg0
), 1));
12153 return fold_build2_loc (loc
, code
, type
,
12154 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12159 /* If this is an NE or EQ comparison of zero against the result of a
12160 signed MOD operation whose second operand is a power of 2, make
12161 the MOD operation unsigned since it is simpler and equivalent. */
12162 if (integer_zerop (arg1
)
12163 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12164 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12165 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12166 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12167 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12168 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12170 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12171 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12172 fold_convert_loc (loc
, newtype
,
12173 TREE_OPERAND (arg0
, 0)),
12174 fold_convert_loc (loc
, newtype
,
12175 TREE_OPERAND (arg0
, 1)));
12177 return fold_build2_loc (loc
, code
, type
, newmod
,
12178 fold_convert_loc (loc
, newtype
, arg1
));
12181 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12182 C1 is a valid shift constant, and C2 is a power of two, i.e.
12184 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12185 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12186 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12188 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12189 && integer_zerop (arg1
))
12191 tree itype
= TREE_TYPE (arg0
);
12192 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
12193 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12195 /* Check for a valid shift count. */
12196 if (TREE_INT_CST_HIGH (arg001
) == 0
12197 && TREE_INT_CST_LOW (arg001
) < prec
)
12199 tree arg01
= TREE_OPERAND (arg0
, 1);
12200 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12201 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12202 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12203 can be rewritten as (X & (C2 << C1)) != 0. */
12204 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12206 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12207 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12208 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12210 /* Otherwise, for signed (arithmetic) shifts,
12211 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12212 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12213 else if (!TYPE_UNSIGNED (itype
))
12214 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12215 arg000
, build_int_cst (itype
, 0));
12216 /* Otherwise, of unsigned (logical) shifts,
12217 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12218 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12220 return omit_one_operand_loc (loc
, type
,
12221 code
== EQ_EXPR
? integer_one_node
12222 : integer_zero_node
,
12227 /* If this is an NE comparison of zero with an AND of one, remove the
12228 comparison since the AND will give the correct value. */
12229 if (code
== NE_EXPR
12230 && integer_zerop (arg1
)
12231 && TREE_CODE (arg0
) == BIT_AND_EXPR
12232 && integer_onep (TREE_OPERAND (arg0
, 1)))
12233 return fold_convert_loc (loc
, type
, arg0
);
12235 /* If we have (A & C) == C where C is a power of 2, convert this into
12236 (A & C) != 0. Similarly for NE_EXPR. */
12237 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12238 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12239 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12240 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12241 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12242 integer_zero_node
));
12244 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12245 bit, then fold the expression into A < 0 or A >= 0. */
12246 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12250 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12251 Similarly for NE_EXPR. */
12252 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12253 && TREE_CODE (arg1
) == INTEGER_CST
12254 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12256 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12257 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12258 TREE_OPERAND (arg0
, 1));
12259 tree dandnotc
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12261 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12262 if (integer_nonzerop (dandnotc
))
12263 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12266 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12267 Similarly for NE_EXPR. */
12268 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12269 && TREE_CODE (arg1
) == INTEGER_CST
12270 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12272 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12273 tree candnotd
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12274 TREE_OPERAND (arg0
, 1), notd
);
12275 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12276 if (integer_nonzerop (candnotd
))
12277 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12280 /* If this is a comparison of a field, we may be able to simplify it. */
12281 if ((TREE_CODE (arg0
) == COMPONENT_REF
12282 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12283 /* Handle the constant case even without -O
12284 to make sure the warnings are given. */
12285 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12287 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12292 /* Optimize comparisons of strlen vs zero to a compare of the
12293 first character of the string vs zero. To wit,
12294 strlen(ptr) == 0 => *ptr == 0
12295 strlen(ptr) != 0 => *ptr != 0
12296 Other cases should reduce to one of these two (or a constant)
12297 due to the return value of strlen being unsigned. */
12298 if (TREE_CODE (arg0
) == CALL_EXPR
12299 && integer_zerop (arg1
))
12301 tree fndecl
= get_callee_fndecl (arg0
);
12304 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12305 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12306 && call_expr_nargs (arg0
) == 1
12307 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12309 tree iref
= build_fold_indirect_ref_loc (loc
,
12310 CALL_EXPR_ARG (arg0
, 0));
12311 return fold_build2_loc (loc
, code
, type
, iref
,
12312 build_int_cst (TREE_TYPE (iref
), 0));
12316 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12317 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12318 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12319 && integer_zerop (arg1
)
12320 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12322 tree arg00
= TREE_OPERAND (arg0
, 0);
12323 tree arg01
= TREE_OPERAND (arg0
, 1);
12324 tree itype
= TREE_TYPE (arg00
);
12325 if (TREE_INT_CST_HIGH (arg01
) == 0
12326 && TREE_INT_CST_LOW (arg01
)
12327 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
12329 if (TYPE_UNSIGNED (itype
))
12331 itype
= signed_type_for (itype
);
12332 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12334 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12335 type
, arg00
, build_int_cst (itype
, 0));
12339 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12340 if (integer_zerop (arg1
)
12341 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12342 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12343 TREE_OPERAND (arg0
, 1));
12345 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12346 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12347 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12348 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12349 build_int_cst (TREE_TYPE (arg1
), 0));
12350 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12351 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12352 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12353 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12354 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12355 build_int_cst (TREE_TYPE (arg1
), 0));
12357 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12358 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12359 && TREE_CODE (arg1
) == INTEGER_CST
12360 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12361 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12362 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12363 TREE_OPERAND (arg0
, 1), arg1
));
12365 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12366 (X & C) == 0 when C is a single bit. */
12367 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12368 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12369 && integer_zerop (arg1
)
12370 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12372 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12373 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12374 TREE_OPERAND (arg0
, 1));
12375 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12379 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12380 constant C is a power of two, i.e. a single bit. */
12381 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12382 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12383 && integer_zerop (arg1
)
12384 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12385 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12386 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12388 tree arg00
= TREE_OPERAND (arg0
, 0);
12389 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12390 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12393 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12394 when is C is a power of two, i.e. a single bit. */
12395 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12396 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12397 && integer_zerop (arg1
)
12398 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12399 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12400 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12402 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12403 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
12404 arg000
, TREE_OPERAND (arg0
, 1));
12405 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12406 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12409 if (integer_zerop (arg1
)
12410 && tree_expr_nonzero_p (arg0
))
12412 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12413 return omit_one_operand_loc (loc
, type
, res
, arg0
);
12416 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12417 if (TREE_CODE (arg0
) == NEGATE_EXPR
12418 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12419 return fold_build2_loc (loc
, code
, type
,
12420 TREE_OPERAND (arg0
, 0),
12421 TREE_OPERAND (arg1
, 0));
12423 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12424 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12425 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12427 tree arg00
= TREE_OPERAND (arg0
, 0);
12428 tree arg01
= TREE_OPERAND (arg0
, 1);
12429 tree arg10
= TREE_OPERAND (arg1
, 0);
12430 tree arg11
= TREE_OPERAND (arg1
, 1);
12431 tree itype
= TREE_TYPE (arg0
);
12433 if (operand_equal_p (arg01
, arg11
, 0))
12434 return fold_build2_loc (loc
, code
, type
,
12435 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12436 fold_build2_loc (loc
,
12437 BIT_XOR_EXPR
, itype
,
12440 build_int_cst (itype
, 0));
12442 if (operand_equal_p (arg01
, arg10
, 0))
12443 return fold_build2_loc (loc
, code
, type
,
12444 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12445 fold_build2_loc (loc
,
12446 BIT_XOR_EXPR
, itype
,
12449 build_int_cst (itype
, 0));
12451 if (operand_equal_p (arg00
, arg11
, 0))
12452 return fold_build2_loc (loc
, code
, type
,
12453 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12454 fold_build2_loc (loc
,
12455 BIT_XOR_EXPR
, itype
,
12458 build_int_cst (itype
, 0));
12460 if (operand_equal_p (arg00
, arg10
, 0))
12461 return fold_build2_loc (loc
, code
, type
,
12462 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12463 fold_build2_loc (loc
,
12464 BIT_XOR_EXPR
, itype
,
12467 build_int_cst (itype
, 0));
12470 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12471 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12473 tree arg00
= TREE_OPERAND (arg0
, 0);
12474 tree arg01
= TREE_OPERAND (arg0
, 1);
12475 tree arg10
= TREE_OPERAND (arg1
, 0);
12476 tree arg11
= TREE_OPERAND (arg1
, 1);
12477 tree itype
= TREE_TYPE (arg0
);
12479 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12480 operand_equal_p guarantees no side-effects so we don't need
12481 to use omit_one_operand on Z. */
12482 if (operand_equal_p (arg01
, arg11
, 0))
12483 return fold_build2_loc (loc
, code
, type
, arg00
, arg10
);
12484 if (operand_equal_p (arg01
, arg10
, 0))
12485 return fold_build2_loc (loc
, code
, type
, arg00
, arg11
);
12486 if (operand_equal_p (arg00
, arg11
, 0))
12487 return fold_build2_loc (loc
, code
, type
, arg01
, arg10
);
12488 if (operand_equal_p (arg00
, arg10
, 0))
12489 return fold_build2_loc (loc
, code
, type
, arg01
, arg11
);
12491 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12492 if (TREE_CODE (arg01
) == INTEGER_CST
12493 && TREE_CODE (arg11
) == INTEGER_CST
)
12494 return fold_build2_loc (loc
, code
, type
,
12495 fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
,
12496 fold_build2_loc (loc
,
12497 BIT_XOR_EXPR
, itype
,
12502 /* Attempt to simplify equality/inequality comparisons of complex
12503 values. Only lower the comparison if the result is known or
12504 can be simplified to a single scalar comparison. */
12505 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12506 || TREE_CODE (arg0
) == COMPLEX_CST
)
12507 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12508 || TREE_CODE (arg1
) == COMPLEX_CST
))
12510 tree real0
, imag0
, real1
, imag1
;
12513 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12515 real0
= TREE_OPERAND (arg0
, 0);
12516 imag0
= TREE_OPERAND (arg0
, 1);
12520 real0
= TREE_REALPART (arg0
);
12521 imag0
= TREE_IMAGPART (arg0
);
12524 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12526 real1
= TREE_OPERAND (arg1
, 0);
12527 imag1
= TREE_OPERAND (arg1
, 1);
12531 real1
= TREE_REALPART (arg1
);
12532 imag1
= TREE_IMAGPART (arg1
);
12535 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
12536 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12538 if (integer_zerop (rcond
))
12540 if (code
== EQ_EXPR
)
12541 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12543 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
12547 if (code
== NE_EXPR
)
12548 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12550 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
12554 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
12555 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12557 if (integer_zerop (icond
))
12559 if (code
== EQ_EXPR
)
12560 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12562 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
12566 if (code
== NE_EXPR
)
12567 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12569 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
12580 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12581 if (tem
!= NULL_TREE
)
12584 /* Transform comparisons of the form X +- C CMP X. */
12585 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12586 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12587 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12588 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
12589 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12590 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
12592 tree arg01
= TREE_OPERAND (arg0
, 1);
12593 enum tree_code code0
= TREE_CODE (arg0
);
12596 if (TREE_CODE (arg01
) == REAL_CST
)
12597 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12599 is_positive
= tree_int_cst_sgn (arg01
);
12601 /* (X - c) > X becomes false. */
12602 if (code
== GT_EXPR
12603 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12604 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12606 if (TREE_CODE (arg01
) == INTEGER_CST
12607 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12608 fold_overflow_warning (("assuming signed overflow does not "
12609 "occur when assuming that (X - c) > X "
12610 "is always false"),
12611 WARN_STRICT_OVERFLOW_ALL
);
12612 return constant_boolean_node (0, type
);
12615 /* Likewise (X + c) < X becomes false. */
12616 if (code
== LT_EXPR
12617 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12618 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12620 if (TREE_CODE (arg01
) == INTEGER_CST
12621 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12622 fold_overflow_warning (("assuming signed overflow does not "
12623 "occur when assuming that "
12624 "(X + c) < X is always false"),
12625 WARN_STRICT_OVERFLOW_ALL
);
12626 return constant_boolean_node (0, type
);
12629 /* Convert (X - c) <= X to true. */
12630 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12632 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12633 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12635 if (TREE_CODE (arg01
) == INTEGER_CST
12636 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12637 fold_overflow_warning (("assuming signed overflow does not "
12638 "occur when assuming that "
12639 "(X - c) <= X is always true"),
12640 WARN_STRICT_OVERFLOW_ALL
);
12641 return constant_boolean_node (1, type
);
12644 /* Convert (X + c) >= X to true. */
12645 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12647 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12648 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12650 if (TREE_CODE (arg01
) == INTEGER_CST
12651 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12652 fold_overflow_warning (("assuming signed overflow does not "
12653 "occur when assuming that "
12654 "(X + c) >= X is always true"),
12655 WARN_STRICT_OVERFLOW_ALL
);
12656 return constant_boolean_node (1, type
);
12659 if (TREE_CODE (arg01
) == INTEGER_CST
)
12661 /* Convert X + c > X and X - c < X to true for integers. */
12662 if (code
== GT_EXPR
12663 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12664 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12666 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12667 fold_overflow_warning (("assuming signed overflow does "
12668 "not occur when assuming that "
12669 "(X + c) > X is always true"),
12670 WARN_STRICT_OVERFLOW_ALL
);
12671 return constant_boolean_node (1, type
);
12674 if (code
== LT_EXPR
12675 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12676 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12678 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12679 fold_overflow_warning (("assuming signed overflow does "
12680 "not occur when assuming that "
12681 "(X - c) < X is always true"),
12682 WARN_STRICT_OVERFLOW_ALL
);
12683 return constant_boolean_node (1, type
);
12686 /* Convert X + c <= X and X - c >= X to false for integers. */
12687 if (code
== LE_EXPR
12688 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12689 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12691 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12692 fold_overflow_warning (("assuming signed overflow does "
12693 "not occur when assuming that "
12694 "(X + c) <= X is always false"),
12695 WARN_STRICT_OVERFLOW_ALL
);
12696 return constant_boolean_node (0, type
);
12699 if (code
== GE_EXPR
12700 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12701 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12703 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12704 fold_overflow_warning (("assuming signed overflow does "
12705 "not occur when assuming that "
12706 "(X - c) >= X is always false"),
12707 WARN_STRICT_OVERFLOW_ALL
);
12708 return constant_boolean_node (0, type
);
12713 /* Comparisons with the highest or lowest possible integer of
12714 the specified precision will have known values. */
12716 tree arg1_type
= TREE_TYPE (arg1
);
12717 unsigned int width
= TYPE_PRECISION (arg1_type
);
12719 if (TREE_CODE (arg1
) == INTEGER_CST
12720 && width
<= 2 * HOST_BITS_PER_WIDE_INT
12721 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
12723 HOST_WIDE_INT signed_max_hi
;
12724 unsigned HOST_WIDE_INT signed_max_lo
;
12725 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
12727 if (width
<= HOST_BITS_PER_WIDE_INT
)
12729 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
12734 if (TYPE_UNSIGNED (arg1_type
))
12736 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
12742 max_lo
= signed_max_lo
;
12743 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
12749 width
-= HOST_BITS_PER_WIDE_INT
;
12750 signed_max_lo
= -1;
12751 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
12756 if (TYPE_UNSIGNED (arg1_type
))
12758 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
12763 max_hi
= signed_max_hi
;
12764 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
12768 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
12769 && TREE_INT_CST_LOW (arg1
) == max_lo
)
12773 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12776 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
12779 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12782 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
12784 /* The GE_EXPR and LT_EXPR cases above are not normally
12785 reached because of previous transformations. */
12790 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12792 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
12796 arg1
= const_binop (PLUS_EXPR
, arg1
,
12797 build_int_cst (TREE_TYPE (arg1
), 1));
12798 return fold_build2_loc (loc
, EQ_EXPR
, type
,
12799 fold_convert_loc (loc
,
12800 TREE_TYPE (arg1
), arg0
),
12803 arg1
= const_binop (PLUS_EXPR
, arg1
,
12804 build_int_cst (TREE_TYPE (arg1
), 1));
12805 return fold_build2_loc (loc
, NE_EXPR
, type
,
12806 fold_convert_loc (loc
, TREE_TYPE (arg1
),
12812 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12814 && TREE_INT_CST_LOW (arg1
) == min_lo
)
12818 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12821 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
12824 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12827 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
12832 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12834 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
12838 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
12839 return fold_build2_loc (loc
, NE_EXPR
, type
,
12840 fold_convert_loc (loc
,
12841 TREE_TYPE (arg1
), arg0
),
12844 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
12845 return fold_build2_loc (loc
, EQ_EXPR
, type
,
12846 fold_convert_loc (loc
, TREE_TYPE (arg1
),
12853 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
12854 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
12855 && TYPE_UNSIGNED (arg1_type
)
12856 /* We will flip the signedness of the comparison operator
12857 associated with the mode of arg1, so the sign bit is
12858 specified by this mode. Check that arg1 is the signed
12859 max associated with this sign bit. */
12860 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
12861 /* signed_type does not work on pointer types. */
12862 && INTEGRAL_TYPE_P (arg1_type
))
12864 /* The following case also applies to X < signed_max+1
12865 and X >= signed_max+1 because previous transformations. */
12866 if (code
== LE_EXPR
|| code
== GT_EXPR
)
12869 st
= signed_type_for (TREE_TYPE (arg1
));
12870 return fold_build2_loc (loc
,
12871 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
12872 type
, fold_convert_loc (loc
, st
, arg0
),
12873 build_int_cst (st
, 0));
12879 /* If we are comparing an ABS_EXPR with a constant, we can
12880 convert all the cases into explicit comparisons, but they may
12881 well not be faster than doing the ABS and one comparison.
12882 But ABS (X) <= C is a range comparison, which becomes a subtraction
12883 and a comparison, and is probably faster. */
12884 if (code
== LE_EXPR
12885 && TREE_CODE (arg1
) == INTEGER_CST
12886 && TREE_CODE (arg0
) == ABS_EXPR
12887 && ! TREE_SIDE_EFFECTS (arg0
)
12888 && (0 != (tem
= negate_expr (arg1
)))
12889 && TREE_CODE (tem
) == INTEGER_CST
12890 && !TREE_OVERFLOW (tem
))
12891 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
12892 build2 (GE_EXPR
, type
,
12893 TREE_OPERAND (arg0
, 0), tem
),
12894 build2 (LE_EXPR
, type
,
12895 TREE_OPERAND (arg0
, 0), arg1
));
12897 /* Convert ABS_EXPR<x> >= 0 to true. */
12898 strict_overflow_p
= false;
12899 if (code
== GE_EXPR
12900 && (integer_zerop (arg1
)
12901 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
12902 && real_zerop (arg1
)))
12903 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12905 if (strict_overflow_p
)
12906 fold_overflow_warning (("assuming signed overflow does not occur "
12907 "when simplifying comparison of "
12908 "absolute value and zero"),
12909 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12910 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12913 /* Convert ABS_EXPR<x> < 0 to false. */
12914 strict_overflow_p
= false;
12915 if (code
== LT_EXPR
12916 && (integer_zerop (arg1
) || real_zerop (arg1
))
12917 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
12919 if (strict_overflow_p
)
12920 fold_overflow_warning (("assuming signed overflow does not occur "
12921 "when simplifying comparison of "
12922 "absolute value and zero"),
12923 WARN_STRICT_OVERFLOW_CONDITIONAL
);
12924 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12927 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12928 and similarly for >= into !=. */
12929 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12930 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12931 && TREE_CODE (arg1
) == LSHIFT_EXPR
12932 && integer_onep (TREE_OPERAND (arg1
, 0)))
12934 tem
= build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12935 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
12936 TREE_OPERAND (arg1
, 1)),
12937 build_int_cst (TREE_TYPE (arg0
), 0));
12938 goto fold_binary_exit
;
12941 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
12942 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
12943 && CONVERT_EXPR_P (arg1
)
12944 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
12945 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
12947 tem
= build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
12948 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12949 build2 (RSHIFT_EXPR
,
12950 TREE_TYPE (arg0
), arg0
,
12951 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
12953 build_int_cst (TREE_TYPE (arg0
), 0));
12954 goto fold_binary_exit
;
12959 case UNORDERED_EXPR
:
12967 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
12969 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
12970 if (t1
!= NULL_TREE
)
12974 /* If the first operand is NaN, the result is constant. */
12975 if (TREE_CODE (arg0
) == REAL_CST
12976 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
12977 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
12979 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
12980 ? integer_zero_node
12981 : integer_one_node
;
12982 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
12985 /* If the second operand is NaN, the result is constant. */
12986 if (TREE_CODE (arg1
) == REAL_CST
12987 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
12988 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
12990 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
12991 ? integer_zero_node
12992 : integer_one_node
;
12993 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
12996 /* Simplify unordered comparison of something with itself. */
12997 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
12998 && operand_equal_p (arg0
, arg1
, 0))
12999 return constant_boolean_node (1, type
);
13001 if (code
== LTGT_EXPR
13002 && !flag_trapping_math
13003 && operand_equal_p (arg0
, arg1
, 0))
13004 return constant_boolean_node (0, type
);
13006 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13008 tree targ0
= strip_float_extensions (arg0
);
13009 tree targ1
= strip_float_extensions (arg1
);
13010 tree newtype
= TREE_TYPE (targ0
);
13012 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13013 newtype
= TREE_TYPE (targ1
);
13015 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13016 return fold_build2_loc (loc
, code
, type
,
13017 fold_convert_loc (loc
, newtype
, targ0
),
13018 fold_convert_loc (loc
, newtype
, targ1
));
13023 case COMPOUND_EXPR
:
13024 /* When pedantic, a compound expression can be neither an lvalue
13025 nor an integer constant expression. */
13026 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13028 /* Don't let (0, 0) be null pointer constant. */
13029 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13030 : fold_convert_loc (loc
, type
, arg1
);
13031 return pedantic_non_lvalue_loc (loc
, tem
);
13034 if ((TREE_CODE (arg0
) == REAL_CST
13035 && TREE_CODE (arg1
) == REAL_CST
)
13036 || (TREE_CODE (arg0
) == INTEGER_CST
13037 && TREE_CODE (arg1
) == INTEGER_CST
))
13038 return build_complex (type
, arg0
, arg1
);
13042 /* An ASSERT_EXPR should never be passed to fold_binary. */
13043 gcc_unreachable ();
13047 } /* switch (code) */
13049 protected_set_expr_location (tem
, loc
);
13053 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13054 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13058 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13060 switch (TREE_CODE (*tp
))
13066 *walk_subtrees
= 0;
13068 /* ... fall through ... */
13075 /* Return whether the sub-tree ST contains a label which is accessible from
13076 outside the sub-tree. */
13079 contains_label_p (tree st
)
13082 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13085 /* Fold a ternary expression of code CODE and type TYPE with operands
13086 OP0, OP1, and OP2. Return the folded expression if folding is
13087 successful. Otherwise, return NULL_TREE. */
13090 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13091 tree op0
, tree op1
, tree op2
)
13094 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
13095 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13097 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13098 && TREE_CODE_LENGTH (code
) == 3);
13100 /* Strip any conversions that don't change the mode. This is safe
13101 for every expression, except for a comparison expression because
13102 its signedness is derived from its operands. So, in the latter
13103 case, only strip conversions that don't change the signedness.
13105 Note that this is done as an internal manipulation within the
13106 constant folder, in order to find the simplest representation of
13107 the arguments so that their form can be studied. In any cases,
13108 the appropriate type conversions should be put back in the tree
13109 that will get out of the constant folder. */
13124 case COMPONENT_REF
:
13125 if (TREE_CODE (arg0
) == CONSTRUCTOR
13126 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13128 unsigned HOST_WIDE_INT idx
;
13130 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13137 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13138 so all simple results must be passed through pedantic_non_lvalue. */
13139 if (TREE_CODE (arg0
) == INTEGER_CST
)
13141 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13142 tem
= integer_zerop (arg0
) ? op2
: op1
;
13143 /* Only optimize constant conditions when the selected branch
13144 has the same type as the COND_EXPR. This avoids optimizing
13145 away "c ? x : throw", where the throw has a void type.
13146 Avoid throwing away that operand which contains label. */
13147 if ((!TREE_SIDE_EFFECTS (unused_op
)
13148 || !contains_label_p (unused_op
))
13149 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13150 || VOID_TYPE_P (type
)))
13151 return pedantic_non_lvalue_loc (loc
, tem
);
13154 if (operand_equal_p (arg1
, op2
, 0))
13155 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
13157 /* If we have A op B ? A : C, we may be able to convert this to a
13158 simpler expression, depending on the operation and the values
13159 of B and C. Signed zeros prevent all of these transformations,
13160 for reasons given above each one.
13162 Also try swapping the arguments and inverting the conditional. */
13163 if (COMPARISON_CLASS_P (arg0
)
13164 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13165 arg1
, TREE_OPERAND (arg0
, 1))
13166 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
13168 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13173 if (COMPARISON_CLASS_P (arg0
)
13174 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13176 TREE_OPERAND (arg0
, 1))
13177 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
13179 tem
= fold_truth_not_expr (loc
, arg0
);
13180 if (tem
&& COMPARISON_CLASS_P (tem
))
13182 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13188 /* If the second operand is simpler than the third, swap them
13189 since that produces better jump optimization results. */
13190 if (truth_value_p (TREE_CODE (arg0
))
13191 && tree_swap_operands_p (op1
, op2
, false))
13193 /* See if this can be inverted. If it can't, possibly because
13194 it was a floating-point inequality comparison, don't do
13196 tem
= fold_truth_not_expr (loc
, arg0
);
13198 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13201 /* Convert A ? 1 : 0 to simply A. */
13202 if (integer_onep (op1
)
13203 && integer_zerop (op2
)
13204 /* If we try to convert OP0 to our type, the
13205 call to fold will try to move the conversion inside
13206 a COND, which will recurse. In that case, the COND_EXPR
13207 is probably the best choice, so leave it alone. */
13208 && type
== TREE_TYPE (arg0
))
13209 return pedantic_non_lvalue_loc (loc
, arg0
);
13211 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13212 over COND_EXPR in cases such as floating point comparisons. */
13213 if (integer_zerop (op1
)
13214 && integer_onep (op2
)
13215 && truth_value_p (TREE_CODE (arg0
)))
13216 return pedantic_non_lvalue_loc (loc
,
13217 fold_convert_loc (loc
, type
,
13218 invert_truthvalue_loc (loc
,
13221 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13222 if (TREE_CODE (arg0
) == LT_EXPR
13223 && integer_zerop (TREE_OPERAND (arg0
, 1))
13224 && integer_zerop (op2
)
13225 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13227 /* sign_bit_p only checks ARG1 bits within A's precision.
13228 If <sign bit of A> has wider type than A, bits outside
13229 of A's precision in <sign bit of A> need to be checked.
13230 If they are all 0, this optimization needs to be done
13231 in unsigned A's type, if they are all 1 in signed A's type,
13232 otherwise this can't be done. */
13233 if (TYPE_PRECISION (TREE_TYPE (tem
))
13234 < TYPE_PRECISION (TREE_TYPE (arg1
))
13235 && TYPE_PRECISION (TREE_TYPE (tem
))
13236 < TYPE_PRECISION (type
))
13238 unsigned HOST_WIDE_INT mask_lo
;
13239 HOST_WIDE_INT mask_hi
;
13240 int inner_width
, outer_width
;
13243 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13244 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13245 if (outer_width
> TYPE_PRECISION (type
))
13246 outer_width
= TYPE_PRECISION (type
);
13248 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
13250 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
13251 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
13257 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
13258 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
13260 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
13262 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
13263 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13267 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
13268 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13270 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
13271 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
13273 tem_type
= signed_type_for (TREE_TYPE (tem
));
13274 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13276 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
13277 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
13279 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13280 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13288 fold_convert_loc (loc
, type
,
13289 fold_build2_loc (loc
, BIT_AND_EXPR
,
13290 TREE_TYPE (tem
), tem
,
13291 fold_convert_loc (loc
,
13296 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13297 already handled above. */
13298 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13299 && integer_onep (TREE_OPERAND (arg0
, 1))
13300 && integer_zerop (op2
)
13301 && integer_pow2p (arg1
))
13303 tree tem
= TREE_OPERAND (arg0
, 0);
13305 if (TREE_CODE (tem
) == RSHIFT_EXPR
13306 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
13307 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13308 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
13309 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13310 TREE_OPERAND (tem
, 0), arg1
);
13313 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13314 is probably obsolete because the first operand should be a
13315 truth value (that's why we have the two cases above), but let's
13316 leave it in until we can confirm this for all front-ends. */
13317 if (integer_zerop (op2
)
13318 && TREE_CODE (arg0
) == NE_EXPR
13319 && integer_zerop (TREE_OPERAND (arg0
, 1))
13320 && integer_pow2p (arg1
)
13321 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13322 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13323 arg1
, OEP_ONLY_CONST
))
13324 return pedantic_non_lvalue_loc (loc
,
13325 fold_convert_loc (loc
, type
,
13326 TREE_OPERAND (arg0
, 0)));
13328 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13329 if (integer_zerop (op2
)
13330 && truth_value_p (TREE_CODE (arg0
))
13331 && truth_value_p (TREE_CODE (arg1
)))
13332 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13333 fold_convert_loc (loc
, type
, arg0
),
13336 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13337 if (integer_onep (op2
)
13338 && truth_value_p (TREE_CODE (arg0
))
13339 && truth_value_p (TREE_CODE (arg1
)))
13341 /* Only perform transformation if ARG0 is easily inverted. */
13342 tem
= fold_truth_not_expr (loc
, arg0
);
13344 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
13345 fold_convert_loc (loc
, type
, tem
),
13349 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13350 if (integer_zerop (arg1
)
13351 && truth_value_p (TREE_CODE (arg0
))
13352 && truth_value_p (TREE_CODE (op2
)))
13354 /* Only perform transformation if ARG0 is easily inverted. */
13355 tem
= fold_truth_not_expr (loc
, arg0
);
13357 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13358 fold_convert_loc (loc
, type
, tem
),
13362 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13363 if (integer_onep (arg1
)
13364 && truth_value_p (TREE_CODE (arg0
))
13365 && truth_value_p (TREE_CODE (op2
)))
13366 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
13367 fold_convert_loc (loc
, type
, arg0
),
13373 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13374 of fold_ternary on them. */
13375 gcc_unreachable ();
13377 case BIT_FIELD_REF
:
13378 if ((TREE_CODE (arg0
) == VECTOR_CST
13379 || (TREE_CODE (arg0
) == CONSTRUCTOR
&& TREE_CONSTANT (arg0
)))
13380 && type
== TREE_TYPE (TREE_TYPE (arg0
)))
13382 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
13383 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
13386 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
13387 && (idx
% width
) == 0
13388 && (idx
= idx
/ width
)
13389 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
13391 tree elements
= NULL_TREE
;
13393 if (TREE_CODE (arg0
) == VECTOR_CST
)
13394 elements
= TREE_VECTOR_CST_ELTS (arg0
);
13397 unsigned HOST_WIDE_INT idx
;
13400 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0
), idx
, value
)
13401 elements
= tree_cons (NULL_TREE
, value
, elements
);
13403 while (idx
-- > 0 && elements
)
13404 elements
= TREE_CHAIN (elements
);
13406 return TREE_VALUE (elements
);
13408 return fold_convert_loc (loc
, type
, integer_zero_node
);
13412 /* A bit-field-ref that referenced the full argument can be stripped. */
13413 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
13414 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_low_cst (arg1
, 1)
13415 && integer_zerop (op2
))
13416 return fold_convert_loc (loc
, type
, arg0
);
13422 } /* switch (code) */
13425 /* Perform constant folding and related simplification of EXPR.
13426 The related simplifications include x*1 => x, x*0 => 0, etc.,
13427 and application of the associative law.
13428 NOP_EXPR conversions may be removed freely (as long as we
13429 are careful not to change the type of the overall expression).
13430 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13431 but we can constant-fold them if they have constant operands. */
13433 #ifdef ENABLE_FOLD_CHECKING
13434 # define fold(x) fold_1 (x)
13435 static tree
fold_1 (tree
);
13441 const tree t
= expr
;
13442 enum tree_code code
= TREE_CODE (t
);
13443 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13445 location_t loc
= EXPR_LOCATION (expr
);
13447 /* Return right away if a constant. */
13448 if (kind
== tcc_constant
)
13451 /* CALL_EXPR-like objects with variable numbers of operands are
13452 treated specially. */
13453 if (kind
== tcc_vl_exp
)
13455 if (code
== CALL_EXPR
)
13457 tem
= fold_call_expr (loc
, expr
, false);
13458 return tem
? tem
: expr
;
13463 if (IS_EXPR_CODE_CLASS (kind
))
13465 tree type
= TREE_TYPE (t
);
13466 tree op0
, op1
, op2
;
13468 switch (TREE_CODE_LENGTH (code
))
13471 op0
= TREE_OPERAND (t
, 0);
13472 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13473 return tem
? tem
: expr
;
13475 op0
= TREE_OPERAND (t
, 0);
13476 op1
= TREE_OPERAND (t
, 1);
13477 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13478 return tem
? tem
: expr
;
13480 op0
= TREE_OPERAND (t
, 0);
13481 op1
= TREE_OPERAND (t
, 1);
13482 op2
= TREE_OPERAND (t
, 2);
13483 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13484 return tem
? tem
: expr
;
13494 tree op0
= TREE_OPERAND (t
, 0);
13495 tree op1
= TREE_OPERAND (t
, 1);
13497 if (TREE_CODE (op1
) == INTEGER_CST
13498 && TREE_CODE (op0
) == CONSTRUCTOR
13499 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
13501 VEC(constructor_elt
,gc
) *elts
= CONSTRUCTOR_ELTS (op0
);
13502 unsigned HOST_WIDE_INT end
= VEC_length (constructor_elt
, elts
);
13503 unsigned HOST_WIDE_INT begin
= 0;
13505 /* Find a matching index by means of a binary search. */
13506 while (begin
!= end
)
13508 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
13509 tree index
= VEC_index (constructor_elt
, elts
, middle
)->index
;
13511 if (TREE_CODE (index
) == INTEGER_CST
13512 && tree_int_cst_lt (index
, op1
))
13513 begin
= middle
+ 1;
13514 else if (TREE_CODE (index
) == INTEGER_CST
13515 && tree_int_cst_lt (op1
, index
))
13517 else if (TREE_CODE (index
) == RANGE_EXPR
13518 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
13519 begin
= middle
+ 1;
13520 else if (TREE_CODE (index
) == RANGE_EXPR
13521 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
13524 return VEC_index (constructor_elt
, elts
, middle
)->value
;
13532 return fold (DECL_INITIAL (t
));
13536 } /* switch (code) */
13539 #ifdef ENABLE_FOLD_CHECKING
13542 static void fold_checksum_tree (const_tree
, struct md5_ctx
*, htab_t
);
13543 static void fold_check_failed (const_tree
, const_tree
);
13544 void print_fold_checksum (const_tree
);
13546 /* When --enable-checking=fold, compute a digest of expr before
13547 and after actual fold call to see if fold did not accidentally
13548 change original expr. */
13554 struct md5_ctx ctx
;
13555 unsigned char checksum_before
[16], checksum_after
[16];
13558 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13559 md5_init_ctx (&ctx
);
13560 fold_checksum_tree (expr
, &ctx
, ht
);
13561 md5_finish_ctx (&ctx
, checksum_before
);
13564 ret
= fold_1 (expr
);
13566 md5_init_ctx (&ctx
);
13567 fold_checksum_tree (expr
, &ctx
, ht
);
13568 md5_finish_ctx (&ctx
, checksum_after
);
13571 if (memcmp (checksum_before
, checksum_after
, 16))
13572 fold_check_failed (expr
, ret
);
13578 print_fold_checksum (const_tree expr
)
13580 struct md5_ctx ctx
;
13581 unsigned char checksum
[16], cnt
;
13584 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13585 md5_init_ctx (&ctx
);
13586 fold_checksum_tree (expr
, &ctx
, ht
);
13587 md5_finish_ctx (&ctx
, checksum
);
13589 for (cnt
= 0; cnt
< 16; ++cnt
)
13590 fprintf (stderr
, "%02x", checksum
[cnt
]);
13591 putc ('\n', stderr
);
13595 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
13597 internal_error ("fold check: original tree changed by fold");
13601 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
13604 enum tree_code code
;
13605 union tree_node buf
;
13610 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
13611 <= sizeof (struct tree_function_decl
))
13612 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
13615 slot
= (const void **) htab_find_slot (ht
, expr
, INSERT
);
13619 code
= TREE_CODE (expr
);
13620 if (TREE_CODE_CLASS (code
) == tcc_declaration
13621 && DECL_ASSEMBLER_NAME_SET_P (expr
))
13623 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13624 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13625 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
13626 expr
= (tree
) &buf
;
13628 else if (TREE_CODE_CLASS (code
) == tcc_type
13629 && (TYPE_POINTER_TO (expr
)
13630 || TYPE_REFERENCE_TO (expr
)
13631 || TYPE_CACHED_VALUES_P (expr
)
13632 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
13633 || TYPE_NEXT_VARIANT (expr
)))
13635 /* Allow these fields to be modified. */
13637 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13638 expr
= tmp
= (tree
) &buf
;
13639 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
13640 TYPE_POINTER_TO (tmp
) = NULL
;
13641 TYPE_REFERENCE_TO (tmp
) = NULL
;
13642 TYPE_NEXT_VARIANT (tmp
) = NULL
;
13643 if (TYPE_CACHED_VALUES_P (tmp
))
13645 TYPE_CACHED_VALUES_P (tmp
) = 0;
13646 TYPE_CACHED_VALUES (tmp
) = NULL
;
13649 md5_process_bytes (expr
, tree_size (expr
), ctx
);
13650 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
13651 if (TREE_CODE_CLASS (code
) != tcc_type
13652 && TREE_CODE_CLASS (code
) != tcc_declaration
13653 && code
!= TREE_LIST
13654 && code
!= SSA_NAME
)
13655 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
13656 switch (TREE_CODE_CLASS (code
))
13662 md5_process_bytes (TREE_STRING_POINTER (expr
),
13663 TREE_STRING_LENGTH (expr
), ctx
);
13666 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
13667 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
13670 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
13676 case tcc_exceptional
:
13680 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
13681 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
13682 expr
= TREE_CHAIN (expr
);
13683 goto recursive_label
;
13686 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
13687 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
13693 case tcc_expression
:
13694 case tcc_reference
:
13695 case tcc_comparison
:
13698 case tcc_statement
:
13700 len
= TREE_OPERAND_LENGTH (expr
);
13701 for (i
= 0; i
< len
; ++i
)
13702 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
13704 case tcc_declaration
:
13705 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
13706 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
13707 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
13709 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
13710 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
13711 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
13712 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
13713 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
13715 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
13716 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
13718 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
13720 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
13721 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
13722 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
13726 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
13727 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
13728 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
13729 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
13730 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
13731 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
13732 if (INTEGRAL_TYPE_P (expr
)
13733 || SCALAR_FLOAT_TYPE_P (expr
))
13735 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
13736 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
13738 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
13739 if (TREE_CODE (expr
) == RECORD_TYPE
13740 || TREE_CODE (expr
) == UNION_TYPE
13741 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
13742 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
13743 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
13750 /* Helper function for outputting the checksum of a tree T. When
13751 debugging with gdb, you can "define mynext" to be "next" followed
13752 by "call debug_fold_checksum (op0)", then just trace down till the
13755 DEBUG_FUNCTION
void
13756 debug_fold_checksum (const_tree t
)
13759 unsigned char checksum
[16];
13760 struct md5_ctx ctx
;
13761 htab_t ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13763 md5_init_ctx (&ctx
);
13764 fold_checksum_tree (t
, &ctx
, ht
);
13765 md5_finish_ctx (&ctx
, checksum
);
13768 for (i
= 0; i
< 16; i
++)
13769 fprintf (stderr
, "%d ", checksum
[i
]);
13771 fprintf (stderr
, "\n");
13776 /* Fold a unary tree expression with code CODE of type TYPE with an
13777 operand OP0. LOC is the location of the resulting expression.
13778 Return a folded expression if successful. Otherwise, return a tree
13779 expression with code CODE of type TYPE with an operand OP0. */
13782 fold_build1_stat_loc (location_t loc
,
13783 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
13786 #ifdef ENABLE_FOLD_CHECKING
13787 unsigned char checksum_before
[16], checksum_after
[16];
13788 struct md5_ctx ctx
;
13791 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13792 md5_init_ctx (&ctx
);
13793 fold_checksum_tree (op0
, &ctx
, ht
);
13794 md5_finish_ctx (&ctx
, checksum_before
);
13798 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13801 tem
= build1_stat (code
, type
, op0 PASS_MEM_STAT
);
13802 SET_EXPR_LOCATION (tem
, loc
);
13805 #ifdef ENABLE_FOLD_CHECKING
13806 md5_init_ctx (&ctx
);
13807 fold_checksum_tree (op0
, &ctx
, ht
);
13808 md5_finish_ctx (&ctx
, checksum_after
);
13811 if (memcmp (checksum_before
, checksum_after
, 16))
13812 fold_check_failed (op0
, tem
);
13817 /* Fold a binary tree expression with code CODE of type TYPE with
13818 operands OP0 and OP1. LOC is the location of the resulting
13819 expression. Return a folded expression if successful. Otherwise,
13820 return a tree expression with code CODE of type TYPE with operands
13824 fold_build2_stat_loc (location_t loc
,
13825 enum tree_code code
, tree type
, tree op0
, tree op1
13829 #ifdef ENABLE_FOLD_CHECKING
13830 unsigned char checksum_before_op0
[16],
13831 checksum_before_op1
[16],
13832 checksum_after_op0
[16],
13833 checksum_after_op1
[16];
13834 struct md5_ctx ctx
;
13837 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13838 md5_init_ctx (&ctx
);
13839 fold_checksum_tree (op0
, &ctx
, ht
);
13840 md5_finish_ctx (&ctx
, checksum_before_op0
);
13843 md5_init_ctx (&ctx
);
13844 fold_checksum_tree (op1
, &ctx
, ht
);
13845 md5_finish_ctx (&ctx
, checksum_before_op1
);
13849 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13852 tem
= build2_stat (code
, type
, op0
, op1 PASS_MEM_STAT
);
13853 SET_EXPR_LOCATION (tem
, loc
);
13856 #ifdef ENABLE_FOLD_CHECKING
13857 md5_init_ctx (&ctx
);
13858 fold_checksum_tree (op0
, &ctx
, ht
);
13859 md5_finish_ctx (&ctx
, checksum_after_op0
);
13862 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13863 fold_check_failed (op0
, tem
);
13865 md5_init_ctx (&ctx
);
13866 fold_checksum_tree (op1
, &ctx
, ht
);
13867 md5_finish_ctx (&ctx
, checksum_after_op1
);
13870 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13871 fold_check_failed (op1
, tem
);
13876 /* Fold a ternary tree expression with code CODE of type TYPE with
13877 operands OP0, OP1, and OP2. Return a folded expression if
13878 successful. Otherwise, return a tree expression with code CODE of
13879 type TYPE with operands OP0, OP1, and OP2. */
13882 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
13883 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
13886 #ifdef ENABLE_FOLD_CHECKING
13887 unsigned char checksum_before_op0
[16],
13888 checksum_before_op1
[16],
13889 checksum_before_op2
[16],
13890 checksum_after_op0
[16],
13891 checksum_after_op1
[16],
13892 checksum_after_op2
[16];
13893 struct md5_ctx ctx
;
13896 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13897 md5_init_ctx (&ctx
);
13898 fold_checksum_tree (op0
, &ctx
, ht
);
13899 md5_finish_ctx (&ctx
, checksum_before_op0
);
13902 md5_init_ctx (&ctx
);
13903 fold_checksum_tree (op1
, &ctx
, ht
);
13904 md5_finish_ctx (&ctx
, checksum_before_op1
);
13907 md5_init_ctx (&ctx
);
13908 fold_checksum_tree (op2
, &ctx
, ht
);
13909 md5_finish_ctx (&ctx
, checksum_before_op2
);
13913 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
13914 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13917 tem
= build3_stat (code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
13918 SET_EXPR_LOCATION (tem
, loc
);
13921 #ifdef ENABLE_FOLD_CHECKING
13922 md5_init_ctx (&ctx
);
13923 fold_checksum_tree (op0
, &ctx
, ht
);
13924 md5_finish_ctx (&ctx
, checksum_after_op0
);
13927 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
13928 fold_check_failed (op0
, tem
);
13930 md5_init_ctx (&ctx
);
13931 fold_checksum_tree (op1
, &ctx
, ht
);
13932 md5_finish_ctx (&ctx
, checksum_after_op1
);
13935 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
13936 fold_check_failed (op1
, tem
);
13938 md5_init_ctx (&ctx
);
13939 fold_checksum_tree (op2
, &ctx
, ht
);
13940 md5_finish_ctx (&ctx
, checksum_after_op2
);
13943 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
13944 fold_check_failed (op2
, tem
);
13949 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13950 arguments in ARGARRAY, and a null static chain.
13951 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13952 of type TYPE from the given operands as constructed by build_call_array. */
13955 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
13956 int nargs
, tree
*argarray
)
13959 #ifdef ENABLE_FOLD_CHECKING
13960 unsigned char checksum_before_fn
[16],
13961 checksum_before_arglist
[16],
13962 checksum_after_fn
[16],
13963 checksum_after_arglist
[16];
13964 struct md5_ctx ctx
;
13968 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13969 md5_init_ctx (&ctx
);
13970 fold_checksum_tree (fn
, &ctx
, ht
);
13971 md5_finish_ctx (&ctx
, checksum_before_fn
);
13974 md5_init_ctx (&ctx
);
13975 for (i
= 0; i
< nargs
; i
++)
13976 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
13977 md5_finish_ctx (&ctx
, checksum_before_arglist
);
13981 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
13983 #ifdef ENABLE_FOLD_CHECKING
13984 md5_init_ctx (&ctx
);
13985 fold_checksum_tree (fn
, &ctx
, ht
);
13986 md5_finish_ctx (&ctx
, checksum_after_fn
);
13989 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
13990 fold_check_failed (fn
, tem
);
13992 md5_init_ctx (&ctx
);
13993 for (i
= 0; i
< nargs
; i
++)
13994 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
13995 md5_finish_ctx (&ctx
, checksum_after_arglist
);
13998 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
13999 fold_check_failed (NULL_TREE
, tem
);
14004 /* Perform constant folding and related simplification of initializer
14005 expression EXPR. These behave identically to "fold_buildN" but ignore
14006 potential run-time traps and exceptions that fold must preserve. */
14008 #define START_FOLD_INIT \
14009 int saved_signaling_nans = flag_signaling_nans;\
14010 int saved_trapping_math = flag_trapping_math;\
14011 int saved_rounding_math = flag_rounding_math;\
14012 int saved_trapv = flag_trapv;\
14013 int saved_folding_initializer = folding_initializer;\
14014 flag_signaling_nans = 0;\
14015 flag_trapping_math = 0;\
14016 flag_rounding_math = 0;\
14018 folding_initializer = 1;
14020 #define END_FOLD_INIT \
14021 flag_signaling_nans = saved_signaling_nans;\
14022 flag_trapping_math = saved_trapping_math;\
14023 flag_rounding_math = saved_rounding_math;\
14024 flag_trapv = saved_trapv;\
14025 folding_initializer = saved_folding_initializer;
14028 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14029 tree type
, tree op
)
14034 result
= fold_build1_loc (loc
, code
, type
, op
);
14041 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14042 tree type
, tree op0
, tree op1
)
14047 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14054 fold_build3_initializer_loc (location_t loc
, enum tree_code code
,
14055 tree type
, tree op0
, tree op1
, tree op2
)
14060 result
= fold_build3_loc (loc
, code
, type
, op0
, op1
, op2
);
14067 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14068 int nargs
, tree
*argarray
)
14073 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14079 #undef START_FOLD_INIT
14080 #undef END_FOLD_INIT
14082 /* Determine if first argument is a multiple of second argument. Return 0 if
14083 it is not, or we cannot easily determined it to be.
14085 An example of the sort of thing we care about (at this point; this routine
14086 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14087 fold cases do now) is discovering that
14089 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14095 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14097 This code also handles discovering that
14099 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14101 is a multiple of 8 so we don't have to worry about dealing with a
14102 possible remainder.
14104 Note that we *look* inside a SAVE_EXPR only to determine how it was
14105 calculated; it is not safe for fold to do much of anything else with the
14106 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14107 at run time. For example, the latter example above *cannot* be implemented
14108 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14109 evaluation time of the original SAVE_EXPR is not necessarily the same at
14110 the time the new expression is evaluated. The only optimization of this
14111 sort that would be valid is changing
14113 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14117 SAVE_EXPR (I) * SAVE_EXPR (J)
14119 (where the same SAVE_EXPR (J) is used in the original and the
14120 transformed version). */
14123 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
14125 if (operand_equal_p (top
, bottom
, 0))
14128 if (TREE_CODE (type
) != INTEGER_TYPE
)
14131 switch (TREE_CODE (top
))
14134 /* Bitwise and provides a power of two multiple. If the mask is
14135 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14136 if (!integer_pow2p (bottom
))
14141 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14142 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14146 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14147 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14150 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14154 op1
= TREE_OPERAND (top
, 1);
14155 /* const_binop may not detect overflow correctly,
14156 so check for it explicitly here. */
14157 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
14158 > TREE_INT_CST_LOW (op1
)
14159 && TREE_INT_CST_HIGH (op1
) == 0
14160 && 0 != (t1
= fold_convert (type
,
14161 const_binop (LSHIFT_EXPR
,
14164 && !TREE_OVERFLOW (t1
))
14165 return multiple_of_p (type
, t1
, bottom
);
14170 /* Can't handle conversions from non-integral or wider integral type. */
14171 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14172 || (TYPE_PRECISION (type
)
14173 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14176 /* .. fall through ... */
14179 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14182 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
14183 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
14186 if (TREE_CODE (bottom
) != INTEGER_CST
14187 || integer_zerop (bottom
)
14188 || (TYPE_UNSIGNED (type
)
14189 && (tree_int_cst_sgn (top
) < 0
14190 || tree_int_cst_sgn (bottom
) < 0)))
14192 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
14200 /* Return true if CODE or TYPE is known to be non-negative. */
14203 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
14205 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14206 && truth_value_p (code
))
14207 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14208 have a signed:1 type (where the value is -1 and 0). */
14213 /* Return true if (CODE OP0) is known to be non-negative. If the return
14214 value is based on the assumption that signed overflow is undefined,
14215 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14216 *STRICT_OVERFLOW_P. */
14219 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14220 bool *strict_overflow_p
)
14222 if (TYPE_UNSIGNED (type
))
14228 /* We can't return 1 if flag_wrapv is set because
14229 ABS_EXPR<INT_MIN> = INT_MIN. */
14230 if (!INTEGRAL_TYPE_P (type
))
14232 if (TYPE_OVERFLOW_UNDEFINED (type
))
14234 *strict_overflow_p
= true;
14239 case NON_LVALUE_EXPR
:
14241 case FIX_TRUNC_EXPR
:
14242 return tree_expr_nonnegative_warnv_p (op0
,
14243 strict_overflow_p
);
14247 tree inner_type
= TREE_TYPE (op0
);
14248 tree outer_type
= type
;
14250 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14252 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14253 return tree_expr_nonnegative_warnv_p (op0
,
14254 strict_overflow_p
);
14255 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14257 if (TYPE_UNSIGNED (inner_type
))
14259 return tree_expr_nonnegative_warnv_p (op0
,
14260 strict_overflow_p
);
14263 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
14265 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14266 return tree_expr_nonnegative_warnv_p (op0
,
14267 strict_overflow_p
);
14268 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14269 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14270 && TYPE_UNSIGNED (inner_type
);
14276 return tree_simple_nonnegative_warnv_p (code
, type
);
14279 /* We don't know sign of `t', so be conservative and return false. */
14283 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14284 value is based on the assumption that signed overflow is undefined,
14285 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14286 *STRICT_OVERFLOW_P. */
14289 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14290 tree op1
, bool *strict_overflow_p
)
14292 if (TYPE_UNSIGNED (type
))
14297 case POINTER_PLUS_EXPR
:
14299 if (FLOAT_TYPE_P (type
))
14300 return (tree_expr_nonnegative_warnv_p (op0
,
14302 && tree_expr_nonnegative_warnv_p (op1
,
14303 strict_overflow_p
));
14305 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14306 both unsigned and at least 2 bits shorter than the result. */
14307 if (TREE_CODE (type
) == INTEGER_TYPE
14308 && TREE_CODE (op0
) == NOP_EXPR
14309 && TREE_CODE (op1
) == NOP_EXPR
)
14311 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
14312 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
14313 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14314 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14316 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14317 TYPE_PRECISION (inner2
)) + 1;
14318 return prec
< TYPE_PRECISION (type
);
14324 if (FLOAT_TYPE_P (type
))
14326 /* x * x for floating point x is always non-negative. */
14327 if (operand_equal_p (op0
, op1
, 0))
14329 return (tree_expr_nonnegative_warnv_p (op0
,
14331 && tree_expr_nonnegative_warnv_p (op1
,
14332 strict_overflow_p
));
14335 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14336 both unsigned and their total bits is shorter than the result. */
14337 if (TREE_CODE (type
) == INTEGER_TYPE
14338 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
14339 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
14341 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
14342 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
14344 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
14345 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
14348 bool unsigned0
= TYPE_UNSIGNED (inner0
);
14349 bool unsigned1
= TYPE_UNSIGNED (inner1
);
14351 if (TREE_CODE (op0
) == INTEGER_CST
)
14352 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
14354 if (TREE_CODE (op1
) == INTEGER_CST
)
14355 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
14357 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
14358 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
14360 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
14361 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
14362 : TYPE_PRECISION (inner0
);
14364 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
14365 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
14366 : TYPE_PRECISION (inner1
);
14368 return precision0
+ precision1
< TYPE_PRECISION (type
);
14375 return (tree_expr_nonnegative_warnv_p (op0
,
14377 || tree_expr_nonnegative_warnv_p (op1
,
14378 strict_overflow_p
));
14384 case TRUNC_DIV_EXPR
:
14385 case CEIL_DIV_EXPR
:
14386 case FLOOR_DIV_EXPR
:
14387 case ROUND_DIV_EXPR
:
14388 return (tree_expr_nonnegative_warnv_p (op0
,
14390 && tree_expr_nonnegative_warnv_p (op1
,
14391 strict_overflow_p
));
14393 case TRUNC_MOD_EXPR
:
14394 case CEIL_MOD_EXPR
:
14395 case FLOOR_MOD_EXPR
:
14396 case ROUND_MOD_EXPR
:
14397 return tree_expr_nonnegative_warnv_p (op0
,
14398 strict_overflow_p
);
14400 return tree_simple_nonnegative_warnv_p (code
, type
);
14403 /* We don't know sign of `t', so be conservative and return false. */
14407 /* Return true if T is known to be non-negative. If the return
14408 value is based on the assumption that signed overflow is undefined,
14409 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14410 *STRICT_OVERFLOW_P. */
14413 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14415 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14418 switch (TREE_CODE (t
))
14421 return tree_int_cst_sgn (t
) >= 0;
14424 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
14427 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
14430 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14432 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
14433 strict_overflow_p
));
14435 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14438 /* We don't know sign of `t', so be conservative and return false. */
14442 /* Return true if T is known to be non-negative. If the return
14443 value is based on the assumption that signed overflow is undefined,
14444 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14445 *STRICT_OVERFLOW_P. */
14448 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
14449 tree arg0
, tree arg1
, bool *strict_overflow_p
)
14451 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
14452 switch (DECL_FUNCTION_CODE (fndecl
))
14454 CASE_FLT_FN (BUILT_IN_ACOS
):
14455 CASE_FLT_FN (BUILT_IN_ACOSH
):
14456 CASE_FLT_FN (BUILT_IN_CABS
):
14457 CASE_FLT_FN (BUILT_IN_COSH
):
14458 CASE_FLT_FN (BUILT_IN_ERFC
):
14459 CASE_FLT_FN (BUILT_IN_EXP
):
14460 CASE_FLT_FN (BUILT_IN_EXP10
):
14461 CASE_FLT_FN (BUILT_IN_EXP2
):
14462 CASE_FLT_FN (BUILT_IN_FABS
):
14463 CASE_FLT_FN (BUILT_IN_FDIM
):
14464 CASE_FLT_FN (BUILT_IN_HYPOT
):
14465 CASE_FLT_FN (BUILT_IN_POW10
):
14466 CASE_INT_FN (BUILT_IN_FFS
):
14467 CASE_INT_FN (BUILT_IN_PARITY
):
14468 CASE_INT_FN (BUILT_IN_POPCOUNT
):
14469 case BUILT_IN_BSWAP32
:
14470 case BUILT_IN_BSWAP64
:
14474 CASE_FLT_FN (BUILT_IN_SQRT
):
14475 /* sqrt(-0.0) is -0.0. */
14476 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
14478 return tree_expr_nonnegative_warnv_p (arg0
,
14479 strict_overflow_p
);
14481 CASE_FLT_FN (BUILT_IN_ASINH
):
14482 CASE_FLT_FN (BUILT_IN_ATAN
):
14483 CASE_FLT_FN (BUILT_IN_ATANH
):
14484 CASE_FLT_FN (BUILT_IN_CBRT
):
14485 CASE_FLT_FN (BUILT_IN_CEIL
):
14486 CASE_FLT_FN (BUILT_IN_ERF
):
14487 CASE_FLT_FN (BUILT_IN_EXPM1
):
14488 CASE_FLT_FN (BUILT_IN_FLOOR
):
14489 CASE_FLT_FN (BUILT_IN_FMOD
):
14490 CASE_FLT_FN (BUILT_IN_FREXP
):
14491 CASE_FLT_FN (BUILT_IN_LCEIL
):
14492 CASE_FLT_FN (BUILT_IN_LDEXP
):
14493 CASE_FLT_FN (BUILT_IN_LFLOOR
):
14494 CASE_FLT_FN (BUILT_IN_LLCEIL
):
14495 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
14496 CASE_FLT_FN (BUILT_IN_LLRINT
):
14497 CASE_FLT_FN (BUILT_IN_LLROUND
):
14498 CASE_FLT_FN (BUILT_IN_LRINT
):
14499 CASE_FLT_FN (BUILT_IN_LROUND
):
14500 CASE_FLT_FN (BUILT_IN_MODF
):
14501 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
14502 CASE_FLT_FN (BUILT_IN_RINT
):
14503 CASE_FLT_FN (BUILT_IN_ROUND
):
14504 CASE_FLT_FN (BUILT_IN_SCALB
):
14505 CASE_FLT_FN (BUILT_IN_SCALBLN
):
14506 CASE_FLT_FN (BUILT_IN_SCALBN
):
14507 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
14508 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
14509 CASE_FLT_FN (BUILT_IN_SINH
):
14510 CASE_FLT_FN (BUILT_IN_TANH
):
14511 CASE_FLT_FN (BUILT_IN_TRUNC
):
14512 /* True if the 1st argument is nonnegative. */
14513 return tree_expr_nonnegative_warnv_p (arg0
,
14514 strict_overflow_p
);
14516 CASE_FLT_FN (BUILT_IN_FMAX
):
14517 /* True if the 1st OR 2nd arguments are nonnegative. */
14518 return (tree_expr_nonnegative_warnv_p (arg0
,
14520 || (tree_expr_nonnegative_warnv_p (arg1
,
14521 strict_overflow_p
)));
14523 CASE_FLT_FN (BUILT_IN_FMIN
):
14524 /* True if the 1st AND 2nd arguments are nonnegative. */
14525 return (tree_expr_nonnegative_warnv_p (arg0
,
14527 && (tree_expr_nonnegative_warnv_p (arg1
,
14528 strict_overflow_p
)));
14530 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14531 /* True if the 2nd argument is nonnegative. */
14532 return tree_expr_nonnegative_warnv_p (arg1
,
14533 strict_overflow_p
);
14535 CASE_FLT_FN (BUILT_IN_POWI
):
14536 /* True if the 1st argument is nonnegative or the second
14537 argument is an even integer. */
14538 if (TREE_CODE (arg1
) == INTEGER_CST
14539 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
14541 return tree_expr_nonnegative_warnv_p (arg0
,
14542 strict_overflow_p
);
14544 CASE_FLT_FN (BUILT_IN_POW
):
14545 /* True if the 1st argument is nonnegative or the second
14546 argument is an even integer valued real. */
14547 if (TREE_CODE (arg1
) == REAL_CST
)
14552 c
= TREE_REAL_CST (arg1
);
14553 n
= real_to_integer (&c
);
14556 REAL_VALUE_TYPE cint
;
14557 real_from_integer (&cint
, VOIDmode
, n
,
14558 n
< 0 ? -1 : 0, 0);
14559 if (real_identical (&c
, &cint
))
14563 return tree_expr_nonnegative_warnv_p (arg0
,
14564 strict_overflow_p
);
14569 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
14573 /* Return true if T is known to be non-negative. If the return
14574 value is based on the assumption that signed overflow is undefined,
14575 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14576 *STRICT_OVERFLOW_P. */
14579 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14581 enum tree_code code
= TREE_CODE (t
);
14582 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14589 tree temp
= TARGET_EXPR_SLOT (t
);
14590 t
= TARGET_EXPR_INITIAL (t
);
14592 /* If the initializer is non-void, then it's a normal expression
14593 that will be assigned to the slot. */
14594 if (!VOID_TYPE_P (t
))
14595 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
14597 /* Otherwise, the initializer sets the slot in some way. One common
14598 way is an assignment statement at the end of the initializer. */
14601 if (TREE_CODE (t
) == BIND_EXPR
)
14602 t
= expr_last (BIND_EXPR_BODY (t
));
14603 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
14604 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
14605 t
= expr_last (TREE_OPERAND (t
, 0));
14606 else if (TREE_CODE (t
) == STATEMENT_LIST
)
14611 if (TREE_CODE (t
) == MODIFY_EXPR
14612 && TREE_OPERAND (t
, 0) == temp
)
14613 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14614 strict_overflow_p
);
14621 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
14622 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
14624 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
14625 get_callee_fndecl (t
),
14628 strict_overflow_p
);
14630 case COMPOUND_EXPR
:
14632 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14633 strict_overflow_p
);
14635 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
14636 strict_overflow_p
);
14638 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14639 strict_overflow_p
);
14642 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14646 /* We don't know sign of `t', so be conservative and return false. */
14650 /* Return true if T is known to be non-negative. If the return
14651 value is based on the assumption that signed overflow is undefined,
14652 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14653 *STRICT_OVERFLOW_P. */
14656 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14658 enum tree_code code
;
14659 if (t
== error_mark_node
)
14662 code
= TREE_CODE (t
);
14663 switch (TREE_CODE_CLASS (code
))
14666 case tcc_comparison
:
14667 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14669 TREE_OPERAND (t
, 0),
14670 TREE_OPERAND (t
, 1),
14671 strict_overflow_p
);
14674 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14676 TREE_OPERAND (t
, 0),
14677 strict_overflow_p
);
14680 case tcc_declaration
:
14681 case tcc_reference
:
14682 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
14690 case TRUTH_AND_EXPR
:
14691 case TRUTH_OR_EXPR
:
14692 case TRUTH_XOR_EXPR
:
14693 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14695 TREE_OPERAND (t
, 0),
14696 TREE_OPERAND (t
, 1),
14697 strict_overflow_p
);
14698 case TRUTH_NOT_EXPR
:
14699 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14701 TREE_OPERAND (t
, 0),
14702 strict_overflow_p
);
14709 case WITH_SIZE_EXPR
:
14711 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
14714 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
14718 /* Return true if `t' is known to be non-negative. Handle warnings
14719 about undefined signed overflow. */
14722 tree_expr_nonnegative_p (tree t
)
14724 bool ret
, strict_overflow_p
;
14726 strict_overflow_p
= false;
14727 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
14728 if (strict_overflow_p
)
14729 fold_overflow_warning (("assuming signed overflow does not occur when "
14730 "determining that expression is always "
14732 WARN_STRICT_OVERFLOW_MISC
);
14737 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14738 For floating point we further ensure that T is not denormal.
14739 Similar logic is present in nonzero_address in rtlanal.h.
14741 If the return value is based on the assumption that signed overflow
14742 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14743 change *STRICT_OVERFLOW_P. */
14746 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
14747 bool *strict_overflow_p
)
14752 return tree_expr_nonzero_warnv_p (op0
,
14753 strict_overflow_p
);
14757 tree inner_type
= TREE_TYPE (op0
);
14758 tree outer_type
= type
;
14760 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
14761 && tree_expr_nonzero_warnv_p (op0
,
14762 strict_overflow_p
));
14766 case NON_LVALUE_EXPR
:
14767 return tree_expr_nonzero_warnv_p (op0
,
14768 strict_overflow_p
);
14777 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14778 For floating point we further ensure that T is not denormal.
14779 Similar logic is present in nonzero_address in rtlanal.h.
14781 If the return value is based on the assumption that signed overflow
14782 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14783 change *STRICT_OVERFLOW_P. */
14786 tree_binary_nonzero_warnv_p (enum tree_code code
,
14789 tree op1
, bool *strict_overflow_p
)
14791 bool sub_strict_overflow_p
;
14794 case POINTER_PLUS_EXPR
:
14796 if (TYPE_OVERFLOW_UNDEFINED (type
))
14798 /* With the presence of negative values it is hard
14799 to say something. */
14800 sub_strict_overflow_p
= false;
14801 if (!tree_expr_nonnegative_warnv_p (op0
,
14802 &sub_strict_overflow_p
)
14803 || !tree_expr_nonnegative_warnv_p (op1
,
14804 &sub_strict_overflow_p
))
14806 /* One of operands must be positive and the other non-negative. */
14807 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14808 overflows, on a twos-complement machine the sum of two
14809 nonnegative numbers can never be zero. */
14810 return (tree_expr_nonzero_warnv_p (op0
,
14812 || tree_expr_nonzero_warnv_p (op1
,
14813 strict_overflow_p
));
14818 if (TYPE_OVERFLOW_UNDEFINED (type
))
14820 if (tree_expr_nonzero_warnv_p (op0
,
14822 && tree_expr_nonzero_warnv_p (op1
,
14823 strict_overflow_p
))
14825 *strict_overflow_p
= true;
14832 sub_strict_overflow_p
= false;
14833 if (tree_expr_nonzero_warnv_p (op0
,
14834 &sub_strict_overflow_p
)
14835 && tree_expr_nonzero_warnv_p (op1
,
14836 &sub_strict_overflow_p
))
14838 if (sub_strict_overflow_p
)
14839 *strict_overflow_p
= true;
14844 sub_strict_overflow_p
= false;
14845 if (tree_expr_nonzero_warnv_p (op0
,
14846 &sub_strict_overflow_p
))
14848 if (sub_strict_overflow_p
)
14849 *strict_overflow_p
= true;
14851 /* When both operands are nonzero, then MAX must be too. */
14852 if (tree_expr_nonzero_warnv_p (op1
,
14853 strict_overflow_p
))
14856 /* MAX where operand 0 is positive is positive. */
14857 return tree_expr_nonnegative_warnv_p (op0
,
14858 strict_overflow_p
);
14860 /* MAX where operand 1 is positive is positive. */
14861 else if (tree_expr_nonzero_warnv_p (op1
,
14862 &sub_strict_overflow_p
)
14863 && tree_expr_nonnegative_warnv_p (op1
,
14864 &sub_strict_overflow_p
))
14866 if (sub_strict_overflow_p
)
14867 *strict_overflow_p
= true;
14873 return (tree_expr_nonzero_warnv_p (op1
,
14875 || tree_expr_nonzero_warnv_p (op0
,
14876 strict_overflow_p
));
14885 /* Return true when T is an address and is known to be nonzero.
14886 For floating point we further ensure that T is not denormal.
14887 Similar logic is present in nonzero_address in rtlanal.h.
14889 If the return value is based on the assumption that signed overflow
14890 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14891 change *STRICT_OVERFLOW_P. */
14894 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
14896 bool sub_strict_overflow_p
;
14897 switch (TREE_CODE (t
))
14900 return !integer_zerop (t
);
14904 tree base
= TREE_OPERAND (t
, 0);
14905 if (!DECL_P (base
))
14906 base
= get_base_address (base
);
14911 /* Weak declarations may link to NULL. Other things may also be NULL
14912 so protect with -fdelete-null-pointer-checks; but not variables
14913 allocated on the stack. */
14915 && (flag_delete_null_pointer_checks
14916 || (DECL_CONTEXT (base
)
14917 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
14918 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
)))))
14919 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
14921 /* Constants are never weak. */
14922 if (CONSTANT_CLASS_P (base
))
14929 sub_strict_overflow_p
= false;
14930 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
14931 &sub_strict_overflow_p
)
14932 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
14933 &sub_strict_overflow_p
))
14935 if (sub_strict_overflow_p
)
14936 *strict_overflow_p
= true;
14947 /* Return true when T is an address and is known to be nonzero.
14948 For floating point we further ensure that T is not denormal.
14949 Similar logic is present in nonzero_address in rtlanal.h.
14951 If the return value is based on the assumption that signed overflow
14952 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14953 change *STRICT_OVERFLOW_P. */
14956 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
14958 tree type
= TREE_TYPE (t
);
14959 enum tree_code code
;
14961 /* Doing something useful for floating point would need more work. */
14962 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
14965 code
= TREE_CODE (t
);
14966 switch (TREE_CODE_CLASS (code
))
14969 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
14970 strict_overflow_p
);
14972 case tcc_comparison
:
14973 return tree_binary_nonzero_warnv_p (code
, type
,
14974 TREE_OPERAND (t
, 0),
14975 TREE_OPERAND (t
, 1),
14976 strict_overflow_p
);
14978 case tcc_declaration
:
14979 case tcc_reference
:
14980 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
14988 case TRUTH_NOT_EXPR
:
14989 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
14990 strict_overflow_p
);
14992 case TRUTH_AND_EXPR
:
14993 case TRUTH_OR_EXPR
:
14994 case TRUTH_XOR_EXPR
:
14995 return tree_binary_nonzero_warnv_p (code
, type
,
14996 TREE_OPERAND (t
, 0),
14997 TREE_OPERAND (t
, 1),
14998 strict_overflow_p
);
15005 case WITH_SIZE_EXPR
:
15007 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15009 case COMPOUND_EXPR
:
15012 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15013 strict_overflow_p
);
15016 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
15017 strict_overflow_p
);
15020 return alloca_call_p (t
);
15028 /* Return true when T is an address and is known to be nonzero.
15029 Handle warnings about undefined signed overflow. */
15032 tree_expr_nonzero_p (tree t
)
15034 bool ret
, strict_overflow_p
;
15036 strict_overflow_p
= false;
15037 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
15038 if (strict_overflow_p
)
15039 fold_overflow_warning (("assuming signed overflow does not occur when "
15040 "determining that expression is always "
15042 WARN_STRICT_OVERFLOW_MISC
);
15046 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15047 attempt to fold the expression to a constant without modifying TYPE,
15050 If the expression could be simplified to a constant, then return
15051 the constant. If the expression would not be simplified to a
15052 constant, then return NULL_TREE. */
15055 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15057 tree tem
= fold_binary (code
, type
, op0
, op1
);
15058 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15061 /* Given the components of a unary expression CODE, TYPE and OP0,
15062 attempt to fold the expression to a constant without modifying
15065 If the expression could be simplified to a constant, then return
15066 the constant. If the expression would not be simplified to a
15067 constant, then return NULL_TREE. */
15070 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15072 tree tem
= fold_unary (code
, type
, op0
);
15073 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15076 /* If EXP represents referencing an element in a constant string
15077 (either via pointer arithmetic or array indexing), return the
15078 tree representing the value accessed, otherwise return NULL. */
15081 fold_read_from_constant_string (tree exp
)
15083 if ((TREE_CODE (exp
) == INDIRECT_REF
15084 || TREE_CODE (exp
) == ARRAY_REF
)
15085 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15087 tree exp1
= TREE_OPERAND (exp
, 0);
15090 location_t loc
= EXPR_LOCATION (exp
);
15092 if (TREE_CODE (exp
) == INDIRECT_REF
)
15093 string
= string_constant (exp1
, &index
);
15096 tree low_bound
= array_ref_low_bound (exp
);
15097 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15099 /* Optimize the special-case of a zero lower bound.
15101 We convert the low_bound to sizetype to avoid some problems
15102 with constant folding. (E.g. suppose the lower bound is 1,
15103 and its mode is QI. Without the conversion,l (ARRAY
15104 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15105 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15106 if (! integer_zerop (low_bound
))
15107 index
= size_diffop_loc (loc
, index
,
15108 fold_convert_loc (loc
, sizetype
, low_bound
));
15114 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15115 && TREE_CODE (string
) == STRING_CST
15116 && TREE_CODE (index
) == INTEGER_CST
15117 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15118 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15120 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15121 return build_int_cst_type (TREE_TYPE (exp
),
15122 (TREE_STRING_POINTER (string
)
15123 [TREE_INT_CST_LOW (index
)]));
15128 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15129 an integer constant, real, or fixed-point constant.
15131 TYPE is the type of the result. */
15134 fold_negate_const (tree arg0
, tree type
)
15136 tree t
= NULL_TREE
;
15138 switch (TREE_CODE (arg0
))
15142 unsigned HOST_WIDE_INT low
;
15143 HOST_WIDE_INT high
;
15144 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
15145 TREE_INT_CST_HIGH (arg0
),
15147 t
= force_fit_type_double (type
, low
, high
, 1,
15148 (overflow
| TREE_OVERFLOW (arg0
))
15149 && !TYPE_UNSIGNED (type
));
15154 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15159 FIXED_VALUE_TYPE f
;
15160 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15161 &(TREE_FIXED_CST (arg0
)), NULL
,
15162 TYPE_SATURATING (type
));
15163 t
= build_fixed (type
, f
);
15164 /* Propagate overflow flags. */
15165 if (overflow_p
| TREE_OVERFLOW (arg0
))
15166 TREE_OVERFLOW (t
) = 1;
15171 gcc_unreachable ();
15177 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15178 an integer constant or real constant.
15180 TYPE is the type of the result. */
15183 fold_abs_const (tree arg0
, tree type
)
15185 tree t
= NULL_TREE
;
15187 switch (TREE_CODE (arg0
))
15190 /* If the value is unsigned, then the absolute value is
15191 the same as the ordinary value. */
15192 if (TYPE_UNSIGNED (type
))
15194 /* Similarly, if the value is non-negative. */
15195 else if (INT_CST_LT (integer_minus_one_node
, arg0
))
15197 /* If the value is negative, then the absolute value is
15201 unsigned HOST_WIDE_INT low
;
15202 HOST_WIDE_INT high
;
15203 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
15204 TREE_INT_CST_HIGH (arg0
),
15206 t
= force_fit_type_double (type
, low
, high
, -1,
15207 overflow
| TREE_OVERFLOW (arg0
));
15212 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
15213 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15219 gcc_unreachable ();
15225 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15226 constant. TYPE is the type of the result. */
15229 fold_not_const (tree arg0
, tree type
)
15231 tree t
= NULL_TREE
;
15233 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
15235 t
= force_fit_type_double (type
, ~TREE_INT_CST_LOW (arg0
),
15236 ~TREE_INT_CST_HIGH (arg0
), 0,
15237 TREE_OVERFLOW (arg0
));
15242 /* Given CODE, a relational operator, the target type, TYPE and two
15243 constant operands OP0 and OP1, return the result of the
15244 relational operation. If the result is not a compile time
15245 constant, then return NULL_TREE. */
15248 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
15250 int result
, invert
;
15252 /* From here on, the only cases we handle are when the result is
15253 known to be a constant. */
15255 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
15257 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
15258 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
15260 /* Handle the cases where either operand is a NaN. */
15261 if (real_isnan (c0
) || real_isnan (c1
))
15271 case UNORDERED_EXPR
:
15285 if (flag_trapping_math
)
15291 gcc_unreachable ();
15294 return constant_boolean_node (result
, type
);
15297 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
15300 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
15302 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
15303 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
15304 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
15307 /* Handle equality/inequality of complex constants. */
15308 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
15310 tree rcond
= fold_relational_const (code
, type
,
15311 TREE_REALPART (op0
),
15312 TREE_REALPART (op1
));
15313 tree icond
= fold_relational_const (code
, type
,
15314 TREE_IMAGPART (op0
),
15315 TREE_IMAGPART (op1
));
15316 if (code
== EQ_EXPR
)
15317 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
15318 else if (code
== NE_EXPR
)
15319 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
15324 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15326 To compute GT, swap the arguments and do LT.
15327 To compute GE, do LT and invert the result.
15328 To compute LE, swap the arguments, do LT and invert the result.
15329 To compute NE, do EQ and invert the result.
15331 Therefore, the code below must handle only EQ and LT. */
15333 if (code
== LE_EXPR
|| code
== GT_EXPR
)
15338 code
= swap_tree_comparison (code
);
15341 /* Note that it is safe to invert for real values here because we
15342 have already handled the one case that it matters. */
15345 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15348 code
= invert_tree_comparison (code
, false);
15351 /* Compute a result for LT or EQ if args permit;
15352 Otherwise return T. */
15353 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15355 if (code
== EQ_EXPR
)
15356 result
= tree_int_cst_equal (op0
, op1
);
15357 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
15358 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
15360 result
= INT_CST_LT (op0
, op1
);
15367 return constant_boolean_node (result
, type
);
15370 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15371 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15375 fold_build_cleanup_point_expr (tree type
, tree expr
)
15377 /* If the expression does not have side effects then we don't have to wrap
15378 it with a cleanup point expression. */
15379 if (!TREE_SIDE_EFFECTS (expr
))
15382 /* If the expression is a return, check to see if the expression inside the
15383 return has no side effects or the right hand side of the modify expression
15384 inside the return. If either don't have side effects set we don't need to
15385 wrap the expression in a cleanup point expression. Note we don't check the
15386 left hand side of the modify because it should always be a return decl. */
15387 if (TREE_CODE (expr
) == RETURN_EXPR
)
15389 tree op
= TREE_OPERAND (expr
, 0);
15390 if (!op
|| !TREE_SIDE_EFFECTS (op
))
15392 op
= TREE_OPERAND (op
, 1);
15393 if (!TREE_SIDE_EFFECTS (op
))
15397 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
15400 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15401 of an indirection through OP0, or NULL_TREE if no simplification is
15405 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
15411 subtype
= TREE_TYPE (sub
);
15412 if (!POINTER_TYPE_P (subtype
))
15415 if (TREE_CODE (sub
) == ADDR_EXPR
)
15417 tree op
= TREE_OPERAND (sub
, 0);
15418 tree optype
= TREE_TYPE (op
);
15419 /* *&CONST_DECL -> to the value of the const decl. */
15420 if (TREE_CODE (op
) == CONST_DECL
)
15421 return DECL_INITIAL (op
);
15422 /* *&p => p; make sure to handle *&"str"[cst] here. */
15423 if (type
== optype
)
15425 tree fop
= fold_read_from_constant_string (op
);
15431 /* *(foo *)&fooarray => fooarray[0] */
15432 else if (TREE_CODE (optype
) == ARRAY_TYPE
15433 && type
== TREE_TYPE (optype
))
15435 tree type_domain
= TYPE_DOMAIN (optype
);
15436 tree min_val
= size_zero_node
;
15437 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15438 min_val
= TYPE_MIN_VALUE (type_domain
);
15439 op0
= build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
15440 SET_EXPR_LOCATION (op0
, loc
);
15443 /* *(foo *)&complexfoo => __real__ complexfoo */
15444 else if (TREE_CODE (optype
) == COMPLEX_TYPE
15445 && type
== TREE_TYPE (optype
))
15446 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
15447 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15448 else if (TREE_CODE (optype
) == VECTOR_TYPE
15449 && type
== TREE_TYPE (optype
))
15451 tree part_width
= TYPE_SIZE (type
);
15452 tree index
= bitsize_int (0);
15453 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
15457 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15458 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15459 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
15461 tree op00
= TREE_OPERAND (sub
, 0);
15462 tree op01
= TREE_OPERAND (sub
, 1);
15466 op00type
= TREE_TYPE (op00
);
15467 if (TREE_CODE (op00
) == ADDR_EXPR
15468 && TREE_CODE (TREE_TYPE (op00type
)) == VECTOR_TYPE
15469 && type
== TREE_TYPE (TREE_TYPE (op00type
)))
15471 HOST_WIDE_INT offset
= tree_low_cst (op01
, 0);
15472 tree part_width
= TYPE_SIZE (type
);
15473 unsigned HOST_WIDE_INT part_widthi
= tree_low_cst (part_width
, 0)/BITS_PER_UNIT
;
15474 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
15475 tree index
= bitsize_int (indexi
);
15477 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type
)))
15478 return fold_build3_loc (loc
,
15479 BIT_FIELD_REF
, type
, TREE_OPERAND (op00
, 0),
15480 part_width
, index
);
15486 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15487 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15488 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
15490 tree op00
= TREE_OPERAND (sub
, 0);
15491 tree op01
= TREE_OPERAND (sub
, 1);
15495 op00type
= TREE_TYPE (op00
);
15496 if (TREE_CODE (op00
) == ADDR_EXPR
15497 && TREE_CODE (TREE_TYPE (op00type
)) == COMPLEX_TYPE
15498 && type
== TREE_TYPE (TREE_TYPE (op00type
)))
15500 tree size
= TYPE_SIZE_UNIT (type
);
15501 if (tree_int_cst_equal (size
, op01
))
15502 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
,
15503 TREE_OPERAND (op00
, 0));
15507 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15508 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
15509 && type
== TREE_TYPE (TREE_TYPE (subtype
)))
15512 tree min_val
= size_zero_node
;
15513 sub
= build_fold_indirect_ref_loc (loc
, sub
);
15514 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
15515 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15516 min_val
= TYPE_MIN_VALUE (type_domain
);
15517 op0
= build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
15518 SET_EXPR_LOCATION (op0
, loc
);
15525 /* Builds an expression for an indirection through T, simplifying some
15529 build_fold_indirect_ref_loc (location_t loc
, tree t
)
15531 tree type
= TREE_TYPE (TREE_TYPE (t
));
15532 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
15537 t
= build1 (INDIRECT_REF
, type
, t
);
15538 SET_EXPR_LOCATION (t
, loc
);
15542 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15545 fold_indirect_ref_loc (location_t loc
, tree t
)
15547 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
15555 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15556 whose result is ignored. The type of the returned tree need not be
15557 the same as the original expression. */
15560 fold_ignored_result (tree t
)
15562 if (!TREE_SIDE_EFFECTS (t
))
15563 return integer_zero_node
;
15566 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
15569 t
= TREE_OPERAND (t
, 0);
15573 case tcc_comparison
:
15574 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15575 t
= TREE_OPERAND (t
, 0);
15576 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
15577 t
= TREE_OPERAND (t
, 1);
15582 case tcc_expression
:
15583 switch (TREE_CODE (t
))
15585 case COMPOUND_EXPR
:
15586 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15588 t
= TREE_OPERAND (t
, 0);
15592 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15593 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15595 t
= TREE_OPERAND (t
, 0);
15608 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15609 This can only be applied to objects of a sizetype. */
15612 round_up_loc (location_t loc
, tree value
, int divisor
)
15614 tree div
= NULL_TREE
;
15616 gcc_assert (divisor
> 0);
15620 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15621 have to do anything. Only do this when we are not given a const,
15622 because in that case, this check is more expensive than just
15624 if (TREE_CODE (value
) != INTEGER_CST
)
15626 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15628 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15632 /* If divisor is a power of two, simplify this to bit manipulation. */
15633 if (divisor
== (divisor
& -divisor
))
15635 if (TREE_CODE (value
) == INTEGER_CST
)
15637 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (value
);
15638 unsigned HOST_WIDE_INT high
;
15641 if ((low
& (divisor
- 1)) == 0)
15644 overflow_p
= TREE_OVERFLOW (value
);
15645 high
= TREE_INT_CST_HIGH (value
);
15646 low
&= ~(divisor
- 1);
15655 return force_fit_type_double (TREE_TYPE (value
), low
, high
,
15662 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
15663 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
15664 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15665 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
15671 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15672 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
15673 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
15679 /* Likewise, but round down. */
15682 round_down_loc (location_t loc
, tree value
, int divisor
)
15684 tree div
= NULL_TREE
;
15686 gcc_assert (divisor
> 0);
15690 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15691 have to do anything. Only do this when we are not given a const,
15692 because in that case, this check is more expensive than just
15694 if (TREE_CODE (value
) != INTEGER_CST
)
15696 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15698 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15702 /* If divisor is a power of two, simplify this to bit manipulation. */
15703 if (divisor
== (divisor
& -divisor
))
15707 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15708 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
15713 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15714 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
15715 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
15721 /* Returns the pointer to the base of the object addressed by EXP and
15722 extracts the information about the offset of the access, storing it
15723 to PBITPOS and POFFSET. */
15726 split_address_to_core_and_offset (tree exp
,
15727 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
15730 enum machine_mode mode
;
15731 int unsignedp
, volatilep
;
15732 HOST_WIDE_INT bitsize
;
15733 location_t loc
= EXPR_LOCATION (exp
);
15735 if (TREE_CODE (exp
) == ADDR_EXPR
)
15737 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
15738 poffset
, &mode
, &unsignedp
, &volatilep
,
15740 core
= build_fold_addr_expr_loc (loc
, core
);
15746 *poffset
= NULL_TREE
;
15752 /* Returns true if addresses of E1 and E2 differ by a constant, false
15753 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15756 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
15759 HOST_WIDE_INT bitpos1
, bitpos2
;
15760 tree toffset1
, toffset2
, tdiff
, type
;
15762 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
15763 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
15765 if (bitpos1
% BITS_PER_UNIT
!= 0
15766 || bitpos2
% BITS_PER_UNIT
!= 0
15767 || !operand_equal_p (core1
, core2
, 0))
15770 if (toffset1
&& toffset2
)
15772 type
= TREE_TYPE (toffset1
);
15773 if (type
!= TREE_TYPE (toffset2
))
15774 toffset2
= fold_convert (type
, toffset2
);
15776 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
15777 if (!cst_and_fits_in_hwi (tdiff
))
15780 *diff
= int_cst_value (tdiff
);
15782 else if (toffset1
|| toffset2
)
15784 /* If only one of the offsets is non-constant, the difference cannot
15791 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
15795 /* Simplify the floating point expression EXP when the sign of the
15796 result is not significant. Return NULL_TREE if no simplification
15800 fold_strip_sign_ops (tree exp
)
15803 location_t loc
= EXPR_LOCATION (exp
);
15805 switch (TREE_CODE (exp
))
15809 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
15810 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
15814 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
15816 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
15817 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15818 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
15819 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
15820 arg0
? arg0
: TREE_OPERAND (exp
, 0),
15821 arg1
? arg1
: TREE_OPERAND (exp
, 1));
15824 case COMPOUND_EXPR
:
15825 arg0
= TREE_OPERAND (exp
, 0);
15826 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15828 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
15832 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
15833 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
15835 return fold_build3_loc (loc
,
15836 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
15837 arg0
? arg0
: TREE_OPERAND (exp
, 1),
15838 arg1
? arg1
: TREE_OPERAND (exp
, 2));
15843 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
15846 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15847 /* Strip copysign function call, return the 1st argument. */
15848 arg0
= CALL_EXPR_ARG (exp
, 0);
15849 arg1
= CALL_EXPR_ARG (exp
, 1);
15850 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
15853 /* Strip sign ops from the argument of "odd" math functions. */
15854 if (negate_mathfn_p (fcode
))
15856 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
15858 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);