1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
61 #include "langhooks.h"
64 #include "tree-flow.h"
66 /* Nonzero if we are folding constants inside an initializer; zero
68 int folding_initializer
= 0;
70 /* The following constants represent a bit based encoding of GCC's
71 comparison operators. This encoding simplifies transformations
72 on relational comparison operators, such as AND and OR. */
73 enum comparison_code
{
92 static bool negate_mathfn_p (enum built_in_function
);
93 static bool negate_expr_p (tree
);
94 static tree
negate_expr (tree
);
95 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
96 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
97 static tree
const_binop (enum tree_code
, tree
, tree
);
98 static enum comparison_code
comparison_to_compcode (enum tree_code
);
99 static enum tree_code
compcode_to_comparison (enum comparison_code
);
100 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
101 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
102 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
103 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
104 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
105 static tree
make_bit_field_ref (location_t
, tree
, tree
,
106 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
107 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
109 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
111 enum machine_mode
*, int *, int *,
113 static int all_ones_mask_p (const_tree
, int);
114 static tree
sign_bit_p (tree
, const_tree
);
115 static int simple_operand_p (const_tree
);
116 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
117 static tree
range_predecessor (tree
);
118 static tree
range_successor (tree
);
119 extern tree
make_range (tree
, int *, tree
*, tree
*, bool *);
120 extern bool merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int,
122 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
123 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
124 static tree
unextend (tree
, int, int, tree
);
125 static tree
fold_truthop (location_t
, enum tree_code
, tree
, tree
, tree
);
126 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
128 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
129 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
130 static tree
fold_binary_op_with_conditional_arg (location_t
,
131 enum tree_code
, tree
,
134 static tree
fold_mathfn_compare (location_t
,
135 enum built_in_function
, enum tree_code
,
137 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
138 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
139 static bool reorder_operands_p (const_tree
, const_tree
);
140 static tree
fold_negate_const (tree
, tree
);
141 static tree
fold_not_const (const_tree
, tree
);
142 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
143 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
146 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
147 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
148 and SUM1. Then this yields nonzero if overflow occurred during the
151 Overflow occurs if A and B have the same sign, but A and SUM differ in
152 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
154 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
156 /* If ARG2 divides ARG1 with zero remainder, carries out the division
157 of type CODE and returns the quotient.
158 Otherwise returns NULL_TREE. */
161 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
166 /* The sign of the division is according to operand two, that
167 does the correct thing for POINTER_PLUS_EXPR where we want
168 a signed division. */
169 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
170 if (TREE_CODE (TREE_TYPE (arg2
)) == INTEGER_TYPE
171 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2
)))
174 quo
= double_int_divmod (tree_to_double_int (arg1
),
175 tree_to_double_int (arg2
),
178 if (double_int_zero_p (rem
))
179 return build_int_cst_wide (TREE_TYPE (arg1
), quo
.low
, quo
.high
);
184 /* This is nonzero if we should defer warnings about undefined
185 overflow. This facility exists because these warnings are a
186 special case. The code to estimate loop iterations does not want
187 to issue any warnings, since it works with expressions which do not
188 occur in user code. Various bits of cleanup code call fold(), but
189 only use the result if it has certain characteristics (e.g., is a
190 constant); that code only wants to issue a warning if the result is
193 static int fold_deferring_overflow_warnings
;
195 /* If a warning about undefined overflow is deferred, this is the
196 warning. Note that this may cause us to turn two warnings into
197 one, but that is fine since it is sufficient to only give one
198 warning per expression. */
200 static const char* fold_deferred_overflow_warning
;
202 /* If a warning about undefined overflow is deferred, this is the
203 level at which the warning should be emitted. */
205 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
207 /* Start deferring overflow warnings. We could use a stack here to
208 permit nested calls, but at present it is not necessary. */
211 fold_defer_overflow_warnings (void)
213 ++fold_deferring_overflow_warnings
;
216 /* Stop deferring overflow warnings. If there is a pending warning,
217 and ISSUE is true, then issue the warning if appropriate. STMT is
218 the statement with which the warning should be associated (used for
219 location information); STMT may be NULL. CODE is the level of the
220 warning--a warn_strict_overflow_code value. This function will use
221 the smaller of CODE and the deferred code when deciding whether to
222 issue the warning. CODE may be zero to mean to always use the
226 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
231 gcc_assert (fold_deferring_overflow_warnings
> 0);
232 --fold_deferring_overflow_warnings
;
233 if (fold_deferring_overflow_warnings
> 0)
235 if (fold_deferred_overflow_warning
!= NULL
237 && code
< (int) fold_deferred_overflow_code
)
238 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
242 warnmsg
= fold_deferred_overflow_warning
;
243 fold_deferred_overflow_warning
= NULL
;
245 if (!issue
|| warnmsg
== NULL
)
248 if (gimple_no_warning_p (stmt
))
251 /* Use the smallest code level when deciding to issue the
253 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
254 code
= fold_deferred_overflow_code
;
256 if (!issue_strict_overflow_warning (code
))
260 locus
= input_location
;
262 locus
= gimple_location (stmt
);
263 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
266 /* Stop deferring overflow warnings, ignoring any deferred
270 fold_undefer_and_ignore_overflow_warnings (void)
272 fold_undefer_overflow_warnings (false, NULL
, 0);
275 /* Whether we are deferring overflow warnings. */
278 fold_deferring_overflow_warnings_p (void)
280 return fold_deferring_overflow_warnings
> 0;
283 /* This is called when we fold something based on the fact that signed
284 overflow is undefined. */
287 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
289 if (fold_deferring_overflow_warnings
> 0)
291 if (fold_deferred_overflow_warning
== NULL
292 || wc
< fold_deferred_overflow_code
)
294 fold_deferred_overflow_warning
= gmsgid
;
295 fold_deferred_overflow_code
= wc
;
298 else if (issue_strict_overflow_warning (wc
))
299 warning (OPT_Wstrict_overflow
, gmsgid
);
302 /* Return true if the built-in mathematical function specified by CODE
303 is odd, i.e. -f(x) == f(-x). */
306 negate_mathfn_p (enum built_in_function code
)
310 CASE_FLT_FN (BUILT_IN_ASIN
):
311 CASE_FLT_FN (BUILT_IN_ASINH
):
312 CASE_FLT_FN (BUILT_IN_ATAN
):
313 CASE_FLT_FN (BUILT_IN_ATANH
):
314 CASE_FLT_FN (BUILT_IN_CASIN
):
315 CASE_FLT_FN (BUILT_IN_CASINH
):
316 CASE_FLT_FN (BUILT_IN_CATAN
):
317 CASE_FLT_FN (BUILT_IN_CATANH
):
318 CASE_FLT_FN (BUILT_IN_CBRT
):
319 CASE_FLT_FN (BUILT_IN_CPROJ
):
320 CASE_FLT_FN (BUILT_IN_CSIN
):
321 CASE_FLT_FN (BUILT_IN_CSINH
):
322 CASE_FLT_FN (BUILT_IN_CTAN
):
323 CASE_FLT_FN (BUILT_IN_CTANH
):
324 CASE_FLT_FN (BUILT_IN_ERF
):
325 CASE_FLT_FN (BUILT_IN_LLROUND
):
326 CASE_FLT_FN (BUILT_IN_LROUND
):
327 CASE_FLT_FN (BUILT_IN_ROUND
):
328 CASE_FLT_FN (BUILT_IN_SIN
):
329 CASE_FLT_FN (BUILT_IN_SINH
):
330 CASE_FLT_FN (BUILT_IN_TAN
):
331 CASE_FLT_FN (BUILT_IN_TANH
):
332 CASE_FLT_FN (BUILT_IN_TRUNC
):
335 CASE_FLT_FN (BUILT_IN_LLRINT
):
336 CASE_FLT_FN (BUILT_IN_LRINT
):
337 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
338 CASE_FLT_FN (BUILT_IN_RINT
):
339 return !flag_rounding_math
;
347 /* Check whether we may negate an integer constant T without causing
351 may_negate_without_overflow_p (const_tree t
)
353 unsigned HOST_WIDE_INT val
;
357 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
359 type
= TREE_TYPE (t
);
360 if (TYPE_UNSIGNED (type
))
363 prec
= TYPE_PRECISION (type
);
364 if (prec
> HOST_BITS_PER_WIDE_INT
)
366 if (TREE_INT_CST_LOW (t
) != 0)
368 prec
-= HOST_BITS_PER_WIDE_INT
;
369 val
= TREE_INT_CST_HIGH (t
);
372 val
= TREE_INT_CST_LOW (t
);
373 if (prec
< HOST_BITS_PER_WIDE_INT
)
374 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
375 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
378 /* Determine whether an expression T can be cheaply negated using
379 the function negate_expr without introducing undefined overflow. */
382 negate_expr_p (tree t
)
389 type
= TREE_TYPE (t
);
392 switch (TREE_CODE (t
))
395 if (TYPE_OVERFLOW_WRAPS (type
))
398 /* Check that -CST will not overflow type. */
399 return may_negate_without_overflow_p (t
);
401 return (INTEGRAL_TYPE_P (type
)
402 && TYPE_OVERFLOW_WRAPS (type
));
409 /* We want to canonicalize to positive real constants. Pretend
410 that only negative ones can be easily negated. */
411 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
414 return negate_expr_p (TREE_REALPART (t
))
415 && negate_expr_p (TREE_IMAGPART (t
));
418 return negate_expr_p (TREE_OPERAND (t
, 0))
419 && negate_expr_p (TREE_OPERAND (t
, 1));
422 return negate_expr_p (TREE_OPERAND (t
, 0));
425 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
426 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
428 /* -(A + B) -> (-B) - A. */
429 if (negate_expr_p (TREE_OPERAND (t
, 1))
430 && reorder_operands_p (TREE_OPERAND (t
, 0),
431 TREE_OPERAND (t
, 1)))
433 /* -(A + B) -> (-A) - B. */
434 return negate_expr_p (TREE_OPERAND (t
, 0));
437 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
438 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
439 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
440 && reorder_operands_p (TREE_OPERAND (t
, 0),
441 TREE_OPERAND (t
, 1));
444 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
450 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
451 return negate_expr_p (TREE_OPERAND (t
, 1))
452 || negate_expr_p (TREE_OPERAND (t
, 0));
460 /* In general we can't negate A / B, because if A is INT_MIN and
461 B is 1, we may turn this into INT_MIN / -1 which is undefined
462 and actually traps on some architectures. But if overflow is
463 undefined, we can negate, because - (INT_MIN / 1) is an
465 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
466 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
468 return negate_expr_p (TREE_OPERAND (t
, 1))
469 || negate_expr_p (TREE_OPERAND (t
, 0));
472 /* Negate -((double)float) as (double)(-float). */
473 if (TREE_CODE (type
) == REAL_TYPE
)
475 tree tem
= strip_float_extensions (t
);
477 return negate_expr_p (tem
);
482 /* Negate -f(x) as f(-x). */
483 if (negate_mathfn_p (builtin_mathfn_code (t
)))
484 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
488 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
489 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
491 tree op1
= TREE_OPERAND (t
, 1);
492 if (TREE_INT_CST_HIGH (op1
) == 0
493 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
494 == TREE_INT_CST_LOW (op1
))
505 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
506 simplification is possible.
507 If negate_expr_p would return true for T, NULL_TREE will never be
511 fold_negate_expr (location_t loc
, tree t
)
513 tree type
= TREE_TYPE (t
);
516 switch (TREE_CODE (t
))
518 /* Convert - (~A) to A + 1. */
520 if (INTEGRAL_TYPE_P (type
))
521 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
522 build_int_cst (type
, 1));
526 tem
= fold_negate_const (t
, type
);
527 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
528 || !TYPE_OVERFLOW_TRAPS (type
))
533 tem
= fold_negate_const (t
, type
);
534 /* Two's complement FP formats, such as c4x, may overflow. */
535 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
540 tem
= fold_negate_const (t
, type
);
545 tree rpart
= negate_expr (TREE_REALPART (t
));
546 tree ipart
= negate_expr (TREE_IMAGPART (t
));
548 if ((TREE_CODE (rpart
) == REAL_CST
549 && TREE_CODE (ipart
) == REAL_CST
)
550 || (TREE_CODE (rpart
) == INTEGER_CST
551 && TREE_CODE (ipart
) == INTEGER_CST
))
552 return build_complex (type
, rpart
, ipart
);
557 if (negate_expr_p (t
))
558 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
559 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
560 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
564 if (negate_expr_p (t
))
565 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
566 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
570 return TREE_OPERAND (t
, 0);
573 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
574 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
576 /* -(A + B) -> (-B) - A. */
577 if (negate_expr_p (TREE_OPERAND (t
, 1))
578 && reorder_operands_p (TREE_OPERAND (t
, 0),
579 TREE_OPERAND (t
, 1)))
581 tem
= negate_expr (TREE_OPERAND (t
, 1));
582 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
583 tem
, TREE_OPERAND (t
, 0));
586 /* -(A + B) -> (-A) - B. */
587 if (negate_expr_p (TREE_OPERAND (t
, 0)))
589 tem
= negate_expr (TREE_OPERAND (t
, 0));
590 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
591 tem
, TREE_OPERAND (t
, 1));
597 /* - (A - B) -> B - A */
598 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
599 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
600 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
601 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
602 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
606 if (TYPE_UNSIGNED (type
))
612 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
614 tem
= TREE_OPERAND (t
, 1);
615 if (negate_expr_p (tem
))
616 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
617 TREE_OPERAND (t
, 0), negate_expr (tem
));
618 tem
= TREE_OPERAND (t
, 0);
619 if (negate_expr_p (tem
))
620 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
621 negate_expr (tem
), TREE_OPERAND (t
, 1));
630 /* In general we can't negate A / B, because if A is INT_MIN and
631 B is 1, we may turn this into INT_MIN / -1 which is undefined
632 and actually traps on some architectures. But if overflow is
633 undefined, we can negate, because - (INT_MIN / 1) is an
635 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
637 const char * const warnmsg
= G_("assuming signed overflow does not "
638 "occur when negating a division");
639 tem
= TREE_OPERAND (t
, 1);
640 if (negate_expr_p (tem
))
642 if (INTEGRAL_TYPE_P (type
)
643 && (TREE_CODE (tem
) != INTEGER_CST
644 || integer_onep (tem
)))
645 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
646 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
647 TREE_OPERAND (t
, 0), negate_expr (tem
));
649 tem
= TREE_OPERAND (t
, 0);
650 if (negate_expr_p (tem
))
652 if (INTEGRAL_TYPE_P (type
)
653 && (TREE_CODE (tem
) != INTEGER_CST
654 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
655 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
656 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
657 negate_expr (tem
), TREE_OPERAND (t
, 1));
663 /* Convert -((double)float) into (double)(-float). */
664 if (TREE_CODE (type
) == REAL_TYPE
)
666 tem
= strip_float_extensions (t
);
667 if (tem
!= t
&& negate_expr_p (tem
))
668 return fold_convert_loc (loc
, type
, negate_expr (tem
));
673 /* Negate -f(x) as f(-x). */
674 if (negate_mathfn_p (builtin_mathfn_code (t
))
675 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
679 fndecl
= get_callee_fndecl (t
);
680 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
681 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
686 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
687 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
689 tree op1
= TREE_OPERAND (t
, 1);
690 if (TREE_INT_CST_HIGH (op1
) == 0
691 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
692 == TREE_INT_CST_LOW (op1
))
694 tree ntype
= TYPE_UNSIGNED (type
)
695 ? signed_type_for (type
)
696 : unsigned_type_for (type
);
697 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
698 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
699 return fold_convert_loc (loc
, type
, temp
);
711 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
712 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
724 loc
= EXPR_LOCATION (t
);
725 type
= TREE_TYPE (t
);
728 tem
= fold_negate_expr (loc
, t
);
731 tem
= build1 (NEGATE_EXPR
, TREE_TYPE (t
), t
);
732 SET_EXPR_LOCATION (tem
, loc
);
734 return fold_convert_loc (loc
, type
, tem
);
737 /* Split a tree IN into a constant, literal and variable parts that could be
738 combined with CODE to make IN. "constant" means an expression with
739 TREE_CONSTANT but that isn't an actual constant. CODE must be a
740 commutative arithmetic operation. Store the constant part into *CONP,
741 the literal in *LITP and return the variable part. If a part isn't
742 present, set it to null. If the tree does not decompose in this way,
743 return the entire tree as the variable part and the other parts as null.
745 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
746 case, we negate an operand that was subtracted. Except if it is a
747 literal for which we use *MINUS_LITP instead.
749 If NEGATE_P is true, we are negating all of IN, again except a literal
750 for which we use *MINUS_LITP instead.
752 If IN is itself a literal or constant, return it as appropriate.
754 Note that we do not guarantee that any of the three values will be the
755 same type as IN, but they will have the same signedness and mode. */
758 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
759 tree
*minus_litp
, int negate_p
)
767 /* Strip any conversions that don't change the machine mode or signedness. */
768 STRIP_SIGN_NOPS (in
);
770 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
771 || TREE_CODE (in
) == FIXED_CST
)
773 else if (TREE_CODE (in
) == code
774 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
775 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
776 /* We can associate addition and subtraction together (even
777 though the C standard doesn't say so) for integers because
778 the value is not affected. For reals, the value might be
779 affected, so we can't. */
780 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
781 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
783 tree op0
= TREE_OPERAND (in
, 0);
784 tree op1
= TREE_OPERAND (in
, 1);
785 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
786 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
788 /* First see if either of the operands is a literal, then a constant. */
789 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
790 || TREE_CODE (op0
) == FIXED_CST
)
791 *litp
= op0
, op0
= 0;
792 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
793 || TREE_CODE (op1
) == FIXED_CST
)
794 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
796 if (op0
!= 0 && TREE_CONSTANT (op0
))
797 *conp
= op0
, op0
= 0;
798 else if (op1
!= 0 && TREE_CONSTANT (op1
))
799 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
801 /* If we haven't dealt with either operand, this is not a case we can
802 decompose. Otherwise, VAR is either of the ones remaining, if any. */
803 if (op0
!= 0 && op1
!= 0)
808 var
= op1
, neg_var_p
= neg1_p
;
810 /* Now do any needed negations. */
812 *minus_litp
= *litp
, *litp
= 0;
814 *conp
= negate_expr (*conp
);
816 var
= negate_expr (var
);
818 else if (TREE_CONSTANT (in
))
826 *minus_litp
= *litp
, *litp
= 0;
827 else if (*minus_litp
)
828 *litp
= *minus_litp
, *minus_litp
= 0;
829 *conp
= negate_expr (*conp
);
830 var
= negate_expr (var
);
836 /* Re-associate trees split by the above function. T1 and T2 are
837 either expressions to associate or null. Return the new
838 expression, if any. LOC is the location of the new expression. If
839 we build an operation, do it in TYPE and with CODE. */
842 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
851 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
852 try to fold this since we will have infinite recursion. But do
853 deal with any NEGATE_EXPRs. */
854 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
855 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
857 if (code
== PLUS_EXPR
)
859 if (TREE_CODE (t1
) == NEGATE_EXPR
)
860 tem
= build2 (MINUS_EXPR
, type
, fold_convert_loc (loc
, type
, t2
),
861 fold_convert_loc (loc
, type
, TREE_OPERAND (t1
, 0)));
862 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
863 tem
= build2 (MINUS_EXPR
, type
, fold_convert_loc (loc
, type
, t1
),
864 fold_convert_loc (loc
, type
, TREE_OPERAND (t2
, 0)));
865 else if (integer_zerop (t2
))
866 return fold_convert_loc (loc
, type
, t1
);
868 else if (code
== MINUS_EXPR
)
870 if (integer_zerop (t2
))
871 return fold_convert_loc (loc
, type
, t1
);
874 tem
= build2 (code
, type
, fold_convert_loc (loc
, type
, t1
),
875 fold_convert_loc (loc
, type
, t2
));
876 goto associate_trees_exit
;
879 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
880 fold_convert_loc (loc
, type
, t2
));
881 associate_trees_exit
:
882 protected_set_expr_location (tem
, loc
);
886 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
887 for use in int_const_binop, size_binop and size_diffop. */
890 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
892 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
894 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
909 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
910 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
911 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
915 /* Combine two integer constants ARG1 and ARG2 under operation CODE
916 to produce a new constant. Return NULL_TREE if we don't know how
917 to evaluate CODE at compile-time.
919 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
922 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
, int notrunc
)
924 double_int op1
, op2
, res
, tmp
;
926 tree type
= TREE_TYPE (arg1
);
927 bool uns
= TYPE_UNSIGNED (type
);
929 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
930 bool overflow
= false;
932 op1
= tree_to_double_int (arg1
);
933 op2
= tree_to_double_int (arg2
);
938 res
= double_int_ior (op1
, op2
);
942 res
= double_int_xor (op1
, op2
);
946 res
= double_int_and (op1
, op2
);
950 res
= double_int_rshift (op1
, double_int_to_shwi (op2
),
951 TYPE_PRECISION (type
), !uns
);
955 /* It's unclear from the C standard whether shifts can overflow.
956 The following code ignores overflow; perhaps a C standard
957 interpretation ruling is needed. */
958 res
= double_int_lshift (op1
, double_int_to_shwi (op2
),
959 TYPE_PRECISION (type
), !uns
);
963 res
= double_int_rrotate (op1
, double_int_to_shwi (op2
),
964 TYPE_PRECISION (type
));
968 res
= double_int_lrotate (op1
, double_int_to_shwi (op2
),
969 TYPE_PRECISION (type
));
973 overflow
= add_double (op1
.low
, op1
.high
, op2
.low
, op2
.high
,
974 &res
.low
, &res
.high
);
978 neg_double (op2
.low
, op2
.high
, &res
.low
, &res
.high
);
979 add_double (op1
.low
, op1
.high
, res
.low
, res
.high
,
980 &res
.low
, &res
.high
);
981 overflow
= OVERFLOW_SUM_SIGN (res
.high
, op2
.high
, op1
.high
);
985 overflow
= mul_double (op1
.low
, op1
.high
, op2
.low
, op2
.high
,
986 &res
.low
, &res
.high
);
990 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
992 /* This is a shortcut for a common special case. */
993 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
994 && !TREE_OVERFLOW (arg1
)
995 && !TREE_OVERFLOW (arg2
)
996 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
998 if (code
== CEIL_DIV_EXPR
)
999 op1
.low
+= op2
.low
- 1;
1001 res
.low
= op1
.low
/ op2
.low
, res
.high
= 0;
1005 /* ... fall through ... */
1007 case ROUND_DIV_EXPR
:
1008 if (double_int_zero_p (op2
))
1010 if (double_int_one_p (op2
))
1015 if (double_int_equal_p (op1
, op2
)
1016 && ! double_int_zero_p (op1
))
1018 res
= double_int_one
;
1021 overflow
= div_and_round_double (code
, uns
,
1022 op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1023 &res
.low
, &res
.high
,
1024 &tmp
.low
, &tmp
.high
);
1027 case TRUNC_MOD_EXPR
:
1028 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1029 /* This is a shortcut for a common special case. */
1030 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1031 && !TREE_OVERFLOW (arg1
)
1032 && !TREE_OVERFLOW (arg2
)
1033 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1035 if (code
== CEIL_MOD_EXPR
)
1036 op1
.low
+= op2
.low
- 1;
1037 res
.low
= op1
.low
% op2
.low
, res
.high
= 0;
1041 /* ... fall through ... */
1043 case ROUND_MOD_EXPR
:
1044 if (double_int_zero_p (op2
))
1046 overflow
= div_and_round_double (code
, uns
,
1047 op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1048 &tmp
.low
, &tmp
.high
,
1049 &res
.low
, &res
.high
);
1053 res
= double_int_min (op1
, op2
, uns
);
1057 res
= double_int_max (op1
, op2
, uns
);
1066 t
= build_int_cst_wide (TREE_TYPE (arg1
), res
.low
, res
.high
);
1068 /* Propagate overflow flags ourselves. */
1069 if (((!uns
|| is_sizetype
) && overflow
)
1070 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1073 TREE_OVERFLOW (t
) = 1;
1077 t
= force_fit_type_double (TREE_TYPE (arg1
), res
, 1,
1078 ((!uns
|| is_sizetype
) && overflow
)
1079 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1084 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1085 constant. We assume ARG1 and ARG2 have the same data type, or at least
1086 are the same kind of constant and the same machine mode. Return zero if
1087 combining the constants is not allowed in the current operating mode. */
1090 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1092 /* Sanity check for the recursive cases. */
1099 if (TREE_CODE (arg1
) == INTEGER_CST
)
1100 return int_const_binop (code
, arg1
, arg2
, 0);
1102 if (TREE_CODE (arg1
) == REAL_CST
)
1104 enum machine_mode mode
;
1107 REAL_VALUE_TYPE value
;
1108 REAL_VALUE_TYPE result
;
1112 /* The following codes are handled by real_arithmetic. */
1127 d1
= TREE_REAL_CST (arg1
);
1128 d2
= TREE_REAL_CST (arg2
);
1130 type
= TREE_TYPE (arg1
);
1131 mode
= TYPE_MODE (type
);
1133 /* Don't perform operation if we honor signaling NaNs and
1134 either operand is a NaN. */
1135 if (HONOR_SNANS (mode
)
1136 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1139 /* Don't perform operation if it would raise a division
1140 by zero exception. */
1141 if (code
== RDIV_EXPR
1142 && REAL_VALUES_EQUAL (d2
, dconst0
)
1143 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1146 /* If either operand is a NaN, just return it. Otherwise, set up
1147 for floating-point trap; we return an overflow. */
1148 if (REAL_VALUE_ISNAN (d1
))
1150 else if (REAL_VALUE_ISNAN (d2
))
1153 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1154 real_convert (&result
, mode
, &value
);
1156 /* Don't constant fold this floating point operation if
1157 the result has overflowed and flag_trapping_math. */
1158 if (flag_trapping_math
1159 && MODE_HAS_INFINITIES (mode
)
1160 && REAL_VALUE_ISINF (result
)
1161 && !REAL_VALUE_ISINF (d1
)
1162 && !REAL_VALUE_ISINF (d2
))
1165 /* Don't constant fold this floating point operation if the
1166 result may dependent upon the run-time rounding mode and
1167 flag_rounding_math is set, or if GCC's software emulation
1168 is unable to accurately represent the result. */
1169 if ((flag_rounding_math
1170 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1171 && (inexact
|| !real_identical (&result
, &value
)))
1174 t
= build_real (type
, result
);
1176 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1180 if (TREE_CODE (arg1
) == FIXED_CST
)
1182 FIXED_VALUE_TYPE f1
;
1183 FIXED_VALUE_TYPE f2
;
1184 FIXED_VALUE_TYPE result
;
1189 /* The following codes are handled by fixed_arithmetic. */
1195 case TRUNC_DIV_EXPR
:
1196 f2
= TREE_FIXED_CST (arg2
);
1201 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1202 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1210 f1
= TREE_FIXED_CST (arg1
);
1211 type
= TREE_TYPE (arg1
);
1212 sat_p
= TYPE_SATURATING (type
);
1213 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1214 t
= build_fixed (type
, result
);
1215 /* Propagate overflow flags. */
1216 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1217 TREE_OVERFLOW (t
) = 1;
1221 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1223 tree type
= TREE_TYPE (arg1
);
1224 tree r1
= TREE_REALPART (arg1
);
1225 tree i1
= TREE_IMAGPART (arg1
);
1226 tree r2
= TREE_REALPART (arg2
);
1227 tree i2
= TREE_IMAGPART (arg2
);
1234 real
= const_binop (code
, r1
, r2
);
1235 imag
= const_binop (code
, i1
, i2
);
1239 if (COMPLEX_FLOAT_TYPE_P (type
))
1240 return do_mpc_arg2 (arg1
, arg2
, type
,
1241 /* do_nonfinite= */ folding_initializer
,
1244 real
= const_binop (MINUS_EXPR
,
1245 const_binop (MULT_EXPR
, r1
, r2
),
1246 const_binop (MULT_EXPR
, i1
, i2
));
1247 imag
= const_binop (PLUS_EXPR
,
1248 const_binop (MULT_EXPR
, r1
, i2
),
1249 const_binop (MULT_EXPR
, i1
, r2
));
1253 if (COMPLEX_FLOAT_TYPE_P (type
))
1254 return do_mpc_arg2 (arg1
, arg2
, type
,
1255 /* do_nonfinite= */ folding_initializer
,
1258 case TRUNC_DIV_EXPR
:
1260 case FLOOR_DIV_EXPR
:
1261 case ROUND_DIV_EXPR
:
1262 if (flag_complex_method
== 0)
1264 /* Keep this algorithm in sync with
1265 tree-complex.c:expand_complex_div_straight().
1267 Expand complex division to scalars, straightforward algorithm.
1268 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1272 = const_binop (PLUS_EXPR
,
1273 const_binop (MULT_EXPR
, r2
, r2
),
1274 const_binop (MULT_EXPR
, i2
, i2
));
1276 = const_binop (PLUS_EXPR
,
1277 const_binop (MULT_EXPR
, r1
, r2
),
1278 const_binop (MULT_EXPR
, i1
, i2
));
1280 = const_binop (MINUS_EXPR
,
1281 const_binop (MULT_EXPR
, i1
, r2
),
1282 const_binop (MULT_EXPR
, r1
, i2
));
1284 real
= const_binop (code
, t1
, magsquared
);
1285 imag
= const_binop (code
, t2
, magsquared
);
1289 /* Keep this algorithm in sync with
1290 tree-complex.c:expand_complex_div_wide().
1292 Expand complex division to scalars, modified algorithm to minimize
1293 overflow with wide input ranges. */
1294 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1295 fold_abs_const (r2
, TREE_TYPE (type
)),
1296 fold_abs_const (i2
, TREE_TYPE (type
)));
1298 if (integer_nonzerop (compare
))
1300 /* In the TRUE branch, we compute
1302 div = (br * ratio) + bi;
1303 tr = (ar * ratio) + ai;
1304 ti = (ai * ratio) - ar;
1307 tree ratio
= const_binop (code
, r2
, i2
);
1308 tree div
= const_binop (PLUS_EXPR
, i2
,
1309 const_binop (MULT_EXPR
, r2
, ratio
));
1310 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1311 real
= const_binop (PLUS_EXPR
, real
, i1
);
1312 real
= const_binop (code
, real
, div
);
1314 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1315 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1316 imag
= const_binop (code
, imag
, div
);
1320 /* In the FALSE branch, we compute
1322 divisor = (d * ratio) + c;
1323 tr = (b * ratio) + a;
1324 ti = b - (a * ratio);
1327 tree ratio
= const_binop (code
, i2
, r2
);
1328 tree div
= const_binop (PLUS_EXPR
, r2
,
1329 const_binop (MULT_EXPR
, i2
, ratio
));
1331 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1332 real
= const_binop (PLUS_EXPR
, real
, r1
);
1333 real
= const_binop (code
, real
, div
);
1335 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1336 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1337 imag
= const_binop (code
, imag
, div
);
1347 return build_complex (type
, real
, imag
);
1350 if (TREE_CODE (arg1
) == VECTOR_CST
)
1352 tree type
= TREE_TYPE(arg1
);
1353 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1354 tree elements1
, elements2
, list
= NULL_TREE
;
1356 if(TREE_CODE(arg2
) != VECTOR_CST
)
1359 elements1
= TREE_VECTOR_CST_ELTS (arg1
);
1360 elements2
= TREE_VECTOR_CST_ELTS (arg2
);
1362 for (i
= 0; i
< count
; i
++)
1364 tree elem1
, elem2
, elem
;
1366 /* The trailing elements can be empty and should be treated as 0 */
1368 elem1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1371 elem1
= TREE_VALUE(elements1
);
1372 elements1
= TREE_CHAIN (elements1
);
1376 elem2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1379 elem2
= TREE_VALUE(elements2
);
1380 elements2
= TREE_CHAIN (elements2
);
1383 elem
= const_binop (code
, elem1
, elem2
);
1385 /* It is possible that const_binop cannot handle the given
1386 code and return NULL_TREE */
1387 if(elem
== NULL_TREE
)
1390 list
= tree_cons (NULL_TREE
, elem
, list
);
1392 return build_vector(type
, nreverse(list
));
1397 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1398 indicates which particular sizetype to create. */
1401 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1403 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1406 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1407 is a tree code. The type of the result is taken from the operands.
1408 Both must be equivalent integer types, ala int_binop_types_match_p.
1409 If the operands are constant, so is the result. */
1412 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1414 tree type
= TREE_TYPE (arg0
);
1416 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1417 return error_mark_node
;
1419 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1422 /* Handle the special case of two integer constants faster. */
1423 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1425 /* And some specific cases even faster than that. */
1426 if (code
== PLUS_EXPR
)
1428 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1430 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1433 else if (code
== MINUS_EXPR
)
1435 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1438 else if (code
== MULT_EXPR
)
1440 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1444 /* Handle general case of two integer constants. */
1445 return int_const_binop (code
, arg0
, arg1
, 0);
1448 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1451 /* Given two values, either both of sizetype or both of bitsizetype,
1452 compute the difference between the two values. Return the value
1453 in signed type corresponding to the type of the operands. */
1456 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1458 tree type
= TREE_TYPE (arg0
);
1461 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1464 /* If the type is already signed, just do the simple thing. */
1465 if (!TYPE_UNSIGNED (type
))
1466 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1468 if (type
== sizetype
)
1470 else if (type
== bitsizetype
)
1471 ctype
= sbitsizetype
;
1473 ctype
= signed_type_for (type
);
1475 /* If either operand is not a constant, do the conversions to the signed
1476 type and subtract. The hardware will do the right thing with any
1477 overflow in the subtraction. */
1478 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1479 return size_binop_loc (loc
, MINUS_EXPR
,
1480 fold_convert_loc (loc
, ctype
, arg0
),
1481 fold_convert_loc (loc
, ctype
, arg1
));
1483 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1484 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1485 overflow) and negate (which can't either). Special-case a result
1486 of zero while we're here. */
1487 if (tree_int_cst_equal (arg0
, arg1
))
1488 return build_int_cst (ctype
, 0);
1489 else if (tree_int_cst_lt (arg1
, arg0
))
1490 return fold_convert_loc (loc
, ctype
,
1491 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1493 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1494 fold_convert_loc (loc
, ctype
,
1495 size_binop_loc (loc
,
1500 /* A subroutine of fold_convert_const handling conversions of an
1501 INTEGER_CST to another integer type. */
1504 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1508 /* Given an integer constant, make new constant with new type,
1509 appropriately sign-extended or truncated. */
1510 t
= force_fit_type_double (type
, tree_to_double_int (arg1
),
1511 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1512 (TREE_INT_CST_HIGH (arg1
) < 0
1513 && (TYPE_UNSIGNED (type
)
1514 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1515 | TREE_OVERFLOW (arg1
));
1520 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1521 to an integer type. */
1524 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1529 /* The following code implements the floating point to integer
1530 conversion rules required by the Java Language Specification,
1531 that IEEE NaNs are mapped to zero and values that overflow
1532 the target precision saturate, i.e. values greater than
1533 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1534 are mapped to INT_MIN. These semantics are allowed by the
1535 C and C++ standards that simply state that the behavior of
1536 FP-to-integer conversion is unspecified upon overflow. */
1540 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1544 case FIX_TRUNC_EXPR
:
1545 real_trunc (&r
, VOIDmode
, &x
);
1552 /* If R is NaN, return zero and show we have an overflow. */
1553 if (REAL_VALUE_ISNAN (r
))
1556 val
= double_int_zero
;
1559 /* See if R is less than the lower bound or greater than the
1564 tree lt
= TYPE_MIN_VALUE (type
);
1565 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1566 if (REAL_VALUES_LESS (r
, l
))
1569 val
= tree_to_double_int (lt
);
1575 tree ut
= TYPE_MAX_VALUE (type
);
1578 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1579 if (REAL_VALUES_LESS (u
, r
))
1582 val
= tree_to_double_int (ut
);
1588 real_to_integer2 ((HOST_WIDE_INT
*) &val
.low
, &val
.high
, &r
);
1590 t
= force_fit_type_double (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1594 /* A subroutine of fold_convert_const handling conversions of a
1595 FIXED_CST to an integer type. */
1598 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1601 double_int temp
, temp_trunc
;
1604 /* Right shift FIXED_CST to temp by fbit. */
1605 temp
= TREE_FIXED_CST (arg1
).data
;
1606 mode
= TREE_FIXED_CST (arg1
).mode
;
1607 if (GET_MODE_FBIT (mode
) < 2 * HOST_BITS_PER_WIDE_INT
)
1609 temp
= double_int_rshift (temp
, GET_MODE_FBIT (mode
),
1610 HOST_BITS_PER_DOUBLE_INT
,
1611 SIGNED_FIXED_POINT_MODE_P (mode
));
1613 /* Left shift temp to temp_trunc by fbit. */
1614 temp_trunc
= double_int_lshift (temp
, GET_MODE_FBIT (mode
),
1615 HOST_BITS_PER_DOUBLE_INT
,
1616 SIGNED_FIXED_POINT_MODE_P (mode
));
1620 temp
= double_int_zero
;
1621 temp_trunc
= double_int_zero
;
1624 /* If FIXED_CST is negative, we need to round the value toward 0.
1625 By checking if the fractional bits are not zero to add 1 to temp. */
1626 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1627 && double_int_negative_p (temp_trunc
)
1628 && !double_int_equal_p (TREE_FIXED_CST (arg1
).data
, temp_trunc
))
1629 temp
= double_int_add (temp
, double_int_one
);
1631 /* Given a fixed-point constant, make new constant with new type,
1632 appropriately sign-extended or truncated. */
1633 t
= force_fit_type_double (type
, temp
, -1,
1634 (double_int_negative_p (temp
)
1635 && (TYPE_UNSIGNED (type
)
1636 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1637 | TREE_OVERFLOW (arg1
));
1642 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1643 to another floating point type. */
1646 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1648 REAL_VALUE_TYPE value
;
1651 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1652 t
= build_real (type
, value
);
1654 /* If converting an infinity or NAN to a representation that doesn't
1655 have one, set the overflow bit so that we can produce some kind of
1656 error message at the appropriate point if necessary. It's not the
1657 most user-friendly message, but it's better than nothing. */
1658 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1659 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1660 TREE_OVERFLOW (t
) = 1;
1661 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1662 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1663 TREE_OVERFLOW (t
) = 1;
1664 /* Regular overflow, conversion produced an infinity in a mode that
1665 can't represent them. */
1666 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1667 && REAL_VALUE_ISINF (value
)
1668 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1669 TREE_OVERFLOW (t
) = 1;
1671 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1675 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1676 to a floating point type. */
1679 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1681 REAL_VALUE_TYPE value
;
1684 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1685 t
= build_real (type
, value
);
1687 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1691 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1692 to another fixed-point type. */
1695 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1697 FIXED_VALUE_TYPE value
;
1701 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1702 TYPE_SATURATING (type
));
1703 t
= build_fixed (type
, value
);
1705 /* Propagate overflow flags. */
1706 if (overflow_p
| TREE_OVERFLOW (arg1
))
1707 TREE_OVERFLOW (t
) = 1;
1711 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1712 to a fixed-point type. */
1715 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1717 FIXED_VALUE_TYPE value
;
1721 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
1722 TREE_INT_CST (arg1
),
1723 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1724 TYPE_SATURATING (type
));
1725 t
= build_fixed (type
, value
);
1727 /* Propagate overflow flags. */
1728 if (overflow_p
| TREE_OVERFLOW (arg1
))
1729 TREE_OVERFLOW (t
) = 1;
1733 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1734 to a fixed-point type. */
1737 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1739 FIXED_VALUE_TYPE value
;
1743 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1744 &TREE_REAL_CST (arg1
),
1745 TYPE_SATURATING (type
));
1746 t
= build_fixed (type
, value
);
1748 /* Propagate overflow flags. */
1749 if (overflow_p
| TREE_OVERFLOW (arg1
))
1750 TREE_OVERFLOW (t
) = 1;
1754 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1755 type TYPE. If no simplification can be done return NULL_TREE. */
1758 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1760 if (TREE_TYPE (arg1
) == type
)
1763 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1764 || TREE_CODE (type
) == OFFSET_TYPE
)
1766 if (TREE_CODE (arg1
) == INTEGER_CST
)
1767 return fold_convert_const_int_from_int (type
, arg1
);
1768 else if (TREE_CODE (arg1
) == REAL_CST
)
1769 return fold_convert_const_int_from_real (code
, type
, arg1
);
1770 else if (TREE_CODE (arg1
) == FIXED_CST
)
1771 return fold_convert_const_int_from_fixed (type
, arg1
);
1773 else if (TREE_CODE (type
) == REAL_TYPE
)
1775 if (TREE_CODE (arg1
) == INTEGER_CST
)
1776 return build_real_from_int_cst (type
, arg1
);
1777 else if (TREE_CODE (arg1
) == REAL_CST
)
1778 return fold_convert_const_real_from_real (type
, arg1
);
1779 else if (TREE_CODE (arg1
) == FIXED_CST
)
1780 return fold_convert_const_real_from_fixed (type
, arg1
);
1782 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1784 if (TREE_CODE (arg1
) == FIXED_CST
)
1785 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1786 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1787 return fold_convert_const_fixed_from_int (type
, arg1
);
1788 else if (TREE_CODE (arg1
) == REAL_CST
)
1789 return fold_convert_const_fixed_from_real (type
, arg1
);
1794 /* Construct a vector of zero elements of vector type TYPE. */
1797 build_zero_vector (tree type
)
1801 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1802 return build_vector_from_val (type
, t
);
1805 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1808 fold_convertible_p (const_tree type
, const_tree arg
)
1810 tree orig
= TREE_TYPE (arg
);
1815 if (TREE_CODE (arg
) == ERROR_MARK
1816 || TREE_CODE (type
) == ERROR_MARK
1817 || TREE_CODE (orig
) == ERROR_MARK
)
1820 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1823 switch (TREE_CODE (type
))
1825 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1826 case POINTER_TYPE
: case REFERENCE_TYPE
:
1828 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1829 || TREE_CODE (orig
) == OFFSET_TYPE
)
1831 return (TREE_CODE (orig
) == VECTOR_TYPE
1832 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1835 case FIXED_POINT_TYPE
:
1839 return TREE_CODE (type
) == TREE_CODE (orig
);
1846 /* Convert expression ARG to type TYPE. Used by the middle-end for
1847 simple conversions in preference to calling the front-end's convert. */
1850 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1852 tree orig
= TREE_TYPE (arg
);
1858 if (TREE_CODE (arg
) == ERROR_MARK
1859 || TREE_CODE (type
) == ERROR_MARK
1860 || TREE_CODE (orig
) == ERROR_MARK
)
1861 return error_mark_node
;
1863 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1864 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1866 switch (TREE_CODE (type
))
1869 case REFERENCE_TYPE
:
1870 /* Handle conversions between pointers to different address spaces. */
1871 if (POINTER_TYPE_P (orig
)
1872 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1873 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1874 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1877 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1879 if (TREE_CODE (arg
) == INTEGER_CST
)
1881 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1882 if (tem
!= NULL_TREE
)
1885 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1886 || TREE_CODE (orig
) == OFFSET_TYPE
)
1887 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1888 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1889 return fold_convert_loc (loc
, type
,
1890 fold_build1_loc (loc
, REALPART_EXPR
,
1891 TREE_TYPE (orig
), arg
));
1892 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1893 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1894 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1897 if (TREE_CODE (arg
) == INTEGER_CST
)
1899 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1900 if (tem
!= NULL_TREE
)
1903 else if (TREE_CODE (arg
) == REAL_CST
)
1905 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1906 if (tem
!= NULL_TREE
)
1909 else if (TREE_CODE (arg
) == FIXED_CST
)
1911 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1912 if (tem
!= NULL_TREE
)
1916 switch (TREE_CODE (orig
))
1919 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1920 case POINTER_TYPE
: case REFERENCE_TYPE
:
1921 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1924 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1926 case FIXED_POINT_TYPE
:
1927 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1930 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1931 return fold_convert_loc (loc
, type
, tem
);
1937 case FIXED_POINT_TYPE
:
1938 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1939 || TREE_CODE (arg
) == REAL_CST
)
1941 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1942 if (tem
!= NULL_TREE
)
1943 goto fold_convert_exit
;
1946 switch (TREE_CODE (orig
))
1948 case FIXED_POINT_TYPE
:
1953 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1956 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1957 return fold_convert_loc (loc
, type
, tem
);
1964 switch (TREE_CODE (orig
))
1967 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1968 case POINTER_TYPE
: case REFERENCE_TYPE
:
1970 case FIXED_POINT_TYPE
:
1971 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
1972 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
1973 fold_convert_loc (loc
, TREE_TYPE (type
),
1974 integer_zero_node
));
1979 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1981 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1982 TREE_OPERAND (arg
, 0));
1983 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1984 TREE_OPERAND (arg
, 1));
1985 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
1988 arg
= save_expr (arg
);
1989 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1990 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
1991 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
1992 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
1993 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2001 if (integer_zerop (arg
))
2002 return build_zero_vector (type
);
2003 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2004 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2005 || TREE_CODE (orig
) == VECTOR_TYPE
);
2006 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2009 tem
= fold_ignored_result (arg
);
2010 if (TREE_CODE (tem
) == MODIFY_EXPR
)
2011 goto fold_convert_exit
;
2012 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2018 protected_set_expr_location (tem
, loc
);
2022 /* Return false if expr can be assumed not to be an lvalue, true
2026 maybe_lvalue_p (const_tree x
)
2028 /* We only need to wrap lvalue tree codes. */
2029 switch (TREE_CODE (x
))
2042 case ARRAY_RANGE_REF
:
2048 case PREINCREMENT_EXPR
:
2049 case PREDECREMENT_EXPR
:
2051 case TRY_CATCH_EXPR
:
2052 case WITH_CLEANUP_EXPR
:
2061 /* Assume the worst for front-end tree codes. */
2062 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2070 /* Return an expr equal to X but certainly not valid as an lvalue. */
2073 non_lvalue_loc (location_t loc
, tree x
)
2075 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2080 if (! maybe_lvalue_p (x
))
2082 x
= build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2083 SET_EXPR_LOCATION (x
, loc
);
2087 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2088 Zero means allow extended lvalues. */
2090 int pedantic_lvalues
;
2092 /* When pedantic, return an expr equal to X but certainly not valid as a
2093 pedantic lvalue. Otherwise, return X. */
2096 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2098 if (pedantic_lvalues
)
2099 return non_lvalue_loc (loc
, x
);
2100 protected_set_expr_location (x
, loc
);
2104 /* Given a tree comparison code, return the code that is the logical inverse
2105 of the given code. It is not safe to do this for floating-point
2106 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2107 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2110 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2112 if (honor_nans
&& flag_trapping_math
)
2122 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2124 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2126 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2128 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2142 return UNORDERED_EXPR
;
2143 case UNORDERED_EXPR
:
2144 return ORDERED_EXPR
;
2150 /* Similar, but return the comparison that results if the operands are
2151 swapped. This is safe for floating-point. */
2154 swap_tree_comparison (enum tree_code code
)
2161 case UNORDERED_EXPR
:
2187 /* Convert a comparison tree code from an enum tree_code representation
2188 into a compcode bit-based encoding. This function is the inverse of
2189 compcode_to_comparison. */
2191 static enum comparison_code
2192 comparison_to_compcode (enum tree_code code
)
2209 return COMPCODE_ORD
;
2210 case UNORDERED_EXPR
:
2211 return COMPCODE_UNORD
;
2213 return COMPCODE_UNLT
;
2215 return COMPCODE_UNEQ
;
2217 return COMPCODE_UNLE
;
2219 return COMPCODE_UNGT
;
2221 return COMPCODE_LTGT
;
2223 return COMPCODE_UNGE
;
2229 /* Convert a compcode bit-based encoding of a comparison operator back
2230 to GCC's enum tree_code representation. This function is the
2231 inverse of comparison_to_compcode. */
2233 static enum tree_code
2234 compcode_to_comparison (enum comparison_code code
)
2251 return ORDERED_EXPR
;
2252 case COMPCODE_UNORD
:
2253 return UNORDERED_EXPR
;
2271 /* Return a tree for the comparison which is the combination of
2272 doing the AND or OR (depending on CODE) of the two operations LCODE
2273 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2274 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2275 if this makes the transformation invalid. */
2278 combine_comparisons (location_t loc
,
2279 enum tree_code code
, enum tree_code lcode
,
2280 enum tree_code rcode
, tree truth_type
,
2281 tree ll_arg
, tree lr_arg
)
2283 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2284 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2285 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2290 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2291 compcode
= lcompcode
& rcompcode
;
2294 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2295 compcode
= lcompcode
| rcompcode
;
2304 /* Eliminate unordered comparisons, as well as LTGT and ORD
2305 which are not used unless the mode has NaNs. */
2306 compcode
&= ~COMPCODE_UNORD
;
2307 if (compcode
== COMPCODE_LTGT
)
2308 compcode
= COMPCODE_NE
;
2309 else if (compcode
== COMPCODE_ORD
)
2310 compcode
= COMPCODE_TRUE
;
2312 else if (flag_trapping_math
)
2314 /* Check that the original operation and the optimized ones will trap
2315 under the same condition. */
2316 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2317 && (lcompcode
!= COMPCODE_EQ
)
2318 && (lcompcode
!= COMPCODE_ORD
);
2319 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2320 && (rcompcode
!= COMPCODE_EQ
)
2321 && (rcompcode
!= COMPCODE_ORD
);
2322 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2323 && (compcode
!= COMPCODE_EQ
)
2324 && (compcode
!= COMPCODE_ORD
);
2326 /* In a short-circuited boolean expression the LHS might be
2327 such that the RHS, if evaluated, will never trap. For
2328 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2329 if neither x nor y is NaN. (This is a mixed blessing: for
2330 example, the expression above will never trap, hence
2331 optimizing it to x < y would be invalid). */
2332 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2333 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2336 /* If the comparison was short-circuited, and only the RHS
2337 trapped, we may now generate a spurious trap. */
2339 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2342 /* If we changed the conditions that cause a trap, we lose. */
2343 if ((ltrap
|| rtrap
) != trap
)
2347 if (compcode
== COMPCODE_TRUE
)
2348 return constant_boolean_node (true, truth_type
);
2349 else if (compcode
== COMPCODE_FALSE
)
2350 return constant_boolean_node (false, truth_type
);
2353 enum tree_code tcode
;
2355 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2356 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2360 /* Return nonzero if two operands (typically of the same tree node)
2361 are necessarily equal. If either argument has side-effects this
2362 function returns zero. FLAGS modifies behavior as follows:
2364 If OEP_ONLY_CONST is set, only return nonzero for constants.
2365 This function tests whether the operands are indistinguishable;
2366 it does not test whether they are equal using C's == operation.
2367 The distinction is important for IEEE floating point, because
2368 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2369 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2371 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2372 even though it may hold multiple values during a function.
2373 This is because a GCC tree node guarantees that nothing else is
2374 executed between the evaluation of its "operands" (which may often
2375 be evaluated in arbitrary order). Hence if the operands themselves
2376 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2377 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2378 unset means assuming isochronic (or instantaneous) tree equivalence.
2379 Unless comparing arbitrary expression trees, such as from different
2380 statements, this flag can usually be left unset.
2382 If OEP_PURE_SAME is set, then pure functions with identical arguments
2383 are considered the same. It is used when the caller has other ways
2384 to ensure that global memory is unchanged in between. */
2387 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2389 /* If either is ERROR_MARK, they aren't equal. */
2390 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2391 || TREE_TYPE (arg0
) == error_mark_node
2392 || TREE_TYPE (arg1
) == error_mark_node
)
2395 /* Similar, if either does not have a type (like a released SSA name),
2396 they aren't equal. */
2397 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2400 /* Check equality of integer constants before bailing out due to
2401 precision differences. */
2402 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2403 return tree_int_cst_equal (arg0
, arg1
);
2405 /* If both types don't have the same signedness, then we can't consider
2406 them equal. We must check this before the STRIP_NOPS calls
2407 because they may change the signedness of the arguments. As pointers
2408 strictly don't have a signedness, require either two pointers or
2409 two non-pointers as well. */
2410 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2411 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2414 /* We cannot consider pointers to different address space equal. */
2415 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2416 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2417 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2420 /* If both types don't have the same precision, then it is not safe
2422 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
2428 /* In case both args are comparisons but with different comparison
2429 code, try to swap the comparison operands of one arg to produce
2430 a match and compare that variant. */
2431 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2432 && COMPARISON_CLASS_P (arg0
)
2433 && COMPARISON_CLASS_P (arg1
))
2435 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2437 if (TREE_CODE (arg0
) == swap_code
)
2438 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2439 TREE_OPERAND (arg1
, 1), flags
)
2440 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2441 TREE_OPERAND (arg1
, 0), flags
);
2444 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2445 /* This is needed for conversions and for COMPONENT_REF.
2446 Might as well play it safe and always test this. */
2447 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2448 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2449 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2452 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2453 We don't care about side effects in that case because the SAVE_EXPR
2454 takes care of that for us. In all other cases, two expressions are
2455 equal if they have no side effects. If we have two identical
2456 expressions with side effects that should be treated the same due
2457 to the only side effects being identical SAVE_EXPR's, that will
2458 be detected in the recursive calls below. */
2459 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2460 && (TREE_CODE (arg0
) == SAVE_EXPR
2461 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2464 /* Next handle constant cases, those for which we can return 1 even
2465 if ONLY_CONST is set. */
2466 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2467 switch (TREE_CODE (arg0
))
2470 return tree_int_cst_equal (arg0
, arg1
);
2473 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2474 TREE_FIXED_CST (arg1
));
2477 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2478 TREE_REAL_CST (arg1
)))
2482 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2484 /* If we do not distinguish between signed and unsigned zero,
2485 consider them equal. */
2486 if (real_zerop (arg0
) && real_zerop (arg1
))
2495 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2496 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2499 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2502 v1
= TREE_CHAIN (v1
);
2503 v2
= TREE_CHAIN (v2
);
2510 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2512 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2516 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2517 && ! memcmp (TREE_STRING_POINTER (arg0
),
2518 TREE_STRING_POINTER (arg1
),
2519 TREE_STRING_LENGTH (arg0
)));
2522 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2528 if (flags
& OEP_ONLY_CONST
)
2531 /* Define macros to test an operand from arg0 and arg1 for equality and a
2532 variant that allows null and views null as being different from any
2533 non-null value. In the latter case, if either is null, the both
2534 must be; otherwise, do the normal comparison. */
2535 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2536 TREE_OPERAND (arg1, N), flags)
2538 #define OP_SAME_WITH_NULL(N) \
2539 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2540 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2542 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2545 /* Two conversions are equal only if signedness and modes match. */
2546 switch (TREE_CODE (arg0
))
2549 case FIX_TRUNC_EXPR
:
2550 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2551 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2561 case tcc_comparison
:
2563 if (OP_SAME (0) && OP_SAME (1))
2566 /* For commutative ops, allow the other order. */
2567 return (commutative_tree_code (TREE_CODE (arg0
))
2568 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2569 TREE_OPERAND (arg1
, 1), flags
)
2570 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2571 TREE_OPERAND (arg1
, 0), flags
));
2574 /* If either of the pointer (or reference) expressions we are
2575 dereferencing contain a side effect, these cannot be equal. */
2576 if (TREE_SIDE_EFFECTS (arg0
)
2577 || TREE_SIDE_EFFECTS (arg1
))
2580 switch (TREE_CODE (arg0
))
2588 /* Require equal access sizes, and similar pointer types.
2589 We can have incomplete types for array references of
2590 variable-sized arrays from the Fortran frontent
2592 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2593 || (TYPE_SIZE (TREE_TYPE (arg0
))
2594 && TYPE_SIZE (TREE_TYPE (arg1
))
2595 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2596 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2597 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
2598 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
2599 && OP_SAME (0) && OP_SAME (1));
2602 case ARRAY_RANGE_REF
:
2603 /* Operands 2 and 3 may be null.
2604 Compare the array index by value if it is constant first as we
2605 may have different types but same value here. */
2607 && (tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2608 TREE_OPERAND (arg1
, 1))
2610 && OP_SAME_WITH_NULL (2)
2611 && OP_SAME_WITH_NULL (3));
2614 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2615 may be NULL when we're called to compare MEM_EXPRs. */
2616 return OP_SAME_WITH_NULL (0)
2618 && OP_SAME_WITH_NULL (2);
2621 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2627 case tcc_expression
:
2628 switch (TREE_CODE (arg0
))
2631 case TRUTH_NOT_EXPR
:
2634 case TRUTH_ANDIF_EXPR
:
2635 case TRUTH_ORIF_EXPR
:
2636 return OP_SAME (0) && OP_SAME (1);
2638 case TRUTH_AND_EXPR
:
2640 case TRUTH_XOR_EXPR
:
2641 if (OP_SAME (0) && OP_SAME (1))
2644 /* Otherwise take into account this is a commutative operation. */
2645 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2646 TREE_OPERAND (arg1
, 1), flags
)
2647 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2648 TREE_OPERAND (arg1
, 0), flags
));
2651 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2658 switch (TREE_CODE (arg0
))
2661 /* If the CALL_EXPRs call different functions, then they
2662 clearly can not be equal. */
2663 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2668 unsigned int cef
= call_expr_flags (arg0
);
2669 if (flags
& OEP_PURE_SAME
)
2670 cef
&= ECF_CONST
| ECF_PURE
;
2677 /* Now see if all the arguments are the same. */
2679 const_call_expr_arg_iterator iter0
, iter1
;
2681 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2682 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2684 a0
= next_const_call_expr_arg (&iter0
),
2685 a1
= next_const_call_expr_arg (&iter1
))
2686 if (! operand_equal_p (a0
, a1
, flags
))
2689 /* If we get here and both argument lists are exhausted
2690 then the CALL_EXPRs are equal. */
2691 return ! (a0
|| a1
);
2697 case tcc_declaration
:
2698 /* Consider __builtin_sqrt equal to sqrt. */
2699 return (TREE_CODE (arg0
) == FUNCTION_DECL
2700 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2701 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2702 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2709 #undef OP_SAME_WITH_NULL
2712 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2713 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2715 When in doubt, return 0. */
2718 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2720 int unsignedp1
, unsignedpo
;
2721 tree primarg0
, primarg1
, primother
;
2722 unsigned int correct_width
;
2724 if (operand_equal_p (arg0
, arg1
, 0))
2727 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2728 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2731 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2732 and see if the inner values are the same. This removes any
2733 signedness comparison, which doesn't matter here. */
2734 primarg0
= arg0
, primarg1
= arg1
;
2735 STRIP_NOPS (primarg0
);
2736 STRIP_NOPS (primarg1
);
2737 if (operand_equal_p (primarg0
, primarg1
, 0))
2740 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2741 actual comparison operand, ARG0.
2743 First throw away any conversions to wider types
2744 already present in the operands. */
2746 primarg1
= get_narrower (arg1
, &unsignedp1
);
2747 primother
= get_narrower (other
, &unsignedpo
);
2749 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2750 if (unsignedp1
== unsignedpo
2751 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2752 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2754 tree type
= TREE_TYPE (arg0
);
2756 /* Make sure shorter operand is extended the right way
2757 to match the longer operand. */
2758 primarg1
= fold_convert (signed_or_unsigned_type_for
2759 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2761 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2768 /* See if ARG is an expression that is either a comparison or is performing
2769 arithmetic on comparisons. The comparisons must only be comparing
2770 two different values, which will be stored in *CVAL1 and *CVAL2; if
2771 they are nonzero it means that some operands have already been found.
2772 No variables may be used anywhere else in the expression except in the
2773 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2774 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2776 If this is true, return 1. Otherwise, return zero. */
2779 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2781 enum tree_code code
= TREE_CODE (arg
);
2782 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2784 /* We can handle some of the tcc_expression cases here. */
2785 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2787 else if (tclass
== tcc_expression
2788 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2789 || code
== COMPOUND_EXPR
))
2790 tclass
= tcc_binary
;
2792 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2793 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2795 /* If we've already found a CVAL1 or CVAL2, this expression is
2796 two complex to handle. */
2797 if (*cval1
|| *cval2
)
2807 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2810 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2811 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2812 cval1
, cval2
, save_p
));
2817 case tcc_expression
:
2818 if (code
== COND_EXPR
)
2819 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2820 cval1
, cval2
, save_p
)
2821 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2822 cval1
, cval2
, save_p
)
2823 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2824 cval1
, cval2
, save_p
));
2827 case tcc_comparison
:
2828 /* First see if we can handle the first operand, then the second. For
2829 the second operand, we know *CVAL1 can't be zero. It must be that
2830 one side of the comparison is each of the values; test for the
2831 case where this isn't true by failing if the two operands
2834 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2835 TREE_OPERAND (arg
, 1), 0))
2839 *cval1
= TREE_OPERAND (arg
, 0);
2840 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2842 else if (*cval2
== 0)
2843 *cval2
= TREE_OPERAND (arg
, 0);
2844 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2849 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2851 else if (*cval2
== 0)
2852 *cval2
= TREE_OPERAND (arg
, 1);
2853 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2865 /* ARG is a tree that is known to contain just arithmetic operations and
2866 comparisons. Evaluate the operations in the tree substituting NEW0 for
2867 any occurrence of OLD0 as an operand of a comparison and likewise for
2871 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2872 tree old1
, tree new1
)
2874 tree type
= TREE_TYPE (arg
);
2875 enum tree_code code
= TREE_CODE (arg
);
2876 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2878 /* We can handle some of the tcc_expression cases here. */
2879 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2881 else if (tclass
== tcc_expression
2882 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2883 tclass
= tcc_binary
;
2888 return fold_build1_loc (loc
, code
, type
,
2889 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2890 old0
, new0
, old1
, new1
));
2893 return fold_build2_loc (loc
, code
, type
,
2894 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2895 old0
, new0
, old1
, new1
),
2896 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2897 old0
, new0
, old1
, new1
));
2899 case tcc_expression
:
2903 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
2907 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
2911 return fold_build3_loc (loc
, code
, type
,
2912 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2913 old0
, new0
, old1
, new1
),
2914 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2915 old0
, new0
, old1
, new1
),
2916 eval_subst (loc
, TREE_OPERAND (arg
, 2),
2917 old0
, new0
, old1
, new1
));
2921 /* Fall through - ??? */
2923 case tcc_comparison
:
2925 tree arg0
= TREE_OPERAND (arg
, 0);
2926 tree arg1
= TREE_OPERAND (arg
, 1);
2928 /* We need to check both for exact equality and tree equality. The
2929 former will be true if the operand has a side-effect. In that
2930 case, we know the operand occurred exactly once. */
2932 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2934 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2937 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2939 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2942 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
2950 /* Return a tree for the case when the result of an expression is RESULT
2951 converted to TYPE and OMITTED was previously an operand of the expression
2952 but is now not needed (e.g., we folded OMITTED * 0).
2954 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2955 the conversion of RESULT to TYPE. */
2958 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
2960 tree t
= fold_convert_loc (loc
, type
, result
);
2962 /* If the resulting operand is an empty statement, just return the omitted
2963 statement casted to void. */
2964 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2966 t
= build1 (NOP_EXPR
, void_type_node
, fold_ignored_result (omitted
));
2967 goto omit_one_operand_exit
;
2970 if (TREE_SIDE_EFFECTS (omitted
))
2972 t
= build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
2973 goto omit_one_operand_exit
;
2976 return non_lvalue_loc (loc
, t
);
2978 omit_one_operand_exit
:
2979 protected_set_expr_location (t
, loc
);
2983 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2986 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
2989 tree t
= fold_convert_loc (loc
, type
, result
);
2991 /* If the resulting operand is an empty statement, just return the omitted
2992 statement casted to void. */
2993 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2995 t
= build1 (NOP_EXPR
, void_type_node
, fold_ignored_result (omitted
));
2996 goto pedantic_omit_one_operand_exit
;
2999 if (TREE_SIDE_EFFECTS (omitted
))
3001 t
= build2 (COMPOUND_EXPR
, type
, fold_ignored_result (omitted
), t
);
3002 goto pedantic_omit_one_operand_exit
;
3005 return pedantic_non_lvalue_loc (loc
, t
);
3007 pedantic_omit_one_operand_exit
:
3008 protected_set_expr_location (t
, loc
);
3012 /* Return a tree for the case when the result of an expression is RESULT
3013 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3014 of the expression but are now not needed.
3016 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3017 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3018 evaluated before OMITTED2. Otherwise, if neither has side effects,
3019 just do the conversion of RESULT to TYPE. */
3022 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3023 tree omitted1
, tree omitted2
)
3025 tree t
= fold_convert_loc (loc
, type
, result
);
3027 if (TREE_SIDE_EFFECTS (omitted2
))
3029 t
= build2 (COMPOUND_EXPR
, type
, omitted2
, t
);
3030 SET_EXPR_LOCATION (t
, loc
);
3032 if (TREE_SIDE_EFFECTS (omitted1
))
3034 t
= build2 (COMPOUND_EXPR
, type
, omitted1
, t
);
3035 SET_EXPR_LOCATION (t
, loc
);
3038 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3042 /* Return a simplified tree node for the truth-negation of ARG. This
3043 never alters ARG itself. We assume that ARG is an operation that
3044 returns a truth value (0 or 1).
3046 FIXME: one would think we would fold the result, but it causes
3047 problems with the dominator optimizer. */
3050 fold_truth_not_expr (location_t loc
, tree arg
)
3052 tree t
, type
= TREE_TYPE (arg
);
3053 enum tree_code code
= TREE_CODE (arg
);
3054 location_t loc1
, loc2
;
3056 /* If this is a comparison, we can simply invert it, except for
3057 floating-point non-equality comparisons, in which case we just
3058 enclose a TRUTH_NOT_EXPR around what we have. */
3060 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3062 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3063 if (FLOAT_TYPE_P (op_type
)
3064 && flag_trapping_math
3065 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3066 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3069 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3070 if (code
== ERROR_MARK
)
3073 t
= build2 (code
, type
, TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
3074 SET_EXPR_LOCATION (t
, loc
);
3081 return constant_boolean_node (integer_zerop (arg
), type
);
3083 case TRUTH_AND_EXPR
:
3084 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3085 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3086 if (loc1
== UNKNOWN_LOCATION
)
3088 if (loc2
== UNKNOWN_LOCATION
)
3090 t
= build2 (TRUTH_OR_EXPR
, type
,
3091 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3092 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3096 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3097 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3098 if (loc1
== UNKNOWN_LOCATION
)
3100 if (loc2
== UNKNOWN_LOCATION
)
3102 t
= build2 (TRUTH_AND_EXPR
, type
,
3103 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3104 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3107 case TRUTH_XOR_EXPR
:
3108 /* Here we can invert either operand. We invert the first operand
3109 unless the second operand is a TRUTH_NOT_EXPR in which case our
3110 result is the XOR of the first operand with the inside of the
3111 negation of the second operand. */
3113 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3114 t
= build2 (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3115 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3117 t
= build2 (TRUTH_XOR_EXPR
, type
,
3118 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3119 TREE_OPERAND (arg
, 1));
3122 case TRUTH_ANDIF_EXPR
:
3123 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3124 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3125 if (loc1
== UNKNOWN_LOCATION
)
3127 if (loc2
== UNKNOWN_LOCATION
)
3129 t
= build2 (TRUTH_ORIF_EXPR
, type
,
3130 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3131 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3134 case TRUTH_ORIF_EXPR
:
3135 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3136 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3137 if (loc1
== UNKNOWN_LOCATION
)
3139 if (loc2
== UNKNOWN_LOCATION
)
3141 t
= build2 (TRUTH_ANDIF_EXPR
, type
,
3142 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3143 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3146 case TRUTH_NOT_EXPR
:
3147 return TREE_OPERAND (arg
, 0);
3151 tree arg1
= TREE_OPERAND (arg
, 1);
3152 tree arg2
= TREE_OPERAND (arg
, 2);
3154 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3155 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 2));
3156 if (loc1
== UNKNOWN_LOCATION
)
3158 if (loc2
== UNKNOWN_LOCATION
)
3161 /* A COND_EXPR may have a throw as one operand, which
3162 then has void type. Just leave void operands
3164 t
= build3 (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3165 VOID_TYPE_P (TREE_TYPE (arg1
))
3166 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3167 VOID_TYPE_P (TREE_TYPE (arg2
))
3168 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3173 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3174 if (loc1
== UNKNOWN_LOCATION
)
3176 t
= build2 (COMPOUND_EXPR
, type
,
3177 TREE_OPERAND (arg
, 0),
3178 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3181 case NON_LVALUE_EXPR
:
3182 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3183 if (loc1
== UNKNOWN_LOCATION
)
3185 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3188 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3190 t
= build1 (TRUTH_NOT_EXPR
, type
, arg
);
3194 /* ... fall through ... */
3197 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3198 if (loc1
== UNKNOWN_LOCATION
)
3200 t
= build1 (TREE_CODE (arg
), type
,
3201 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3205 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3207 t
= build2 (EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3211 t
= build1 (TRUTH_NOT_EXPR
, type
, arg
);
3214 case CLEANUP_POINT_EXPR
:
3215 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3216 if (loc1
== UNKNOWN_LOCATION
)
3218 t
= build1 (CLEANUP_POINT_EXPR
, type
,
3219 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3228 SET_EXPR_LOCATION (t
, loc
);
3233 /* Return a simplified tree node for the truth-negation of ARG. This
3234 never alters ARG itself. We assume that ARG is an operation that
3235 returns a truth value (0 or 1).
3237 FIXME: one would think we would fold the result, but it causes
3238 problems with the dominator optimizer. */
3241 invert_truthvalue_loc (location_t loc
, tree arg
)
3245 if (TREE_CODE (arg
) == ERROR_MARK
)
3248 tem
= fold_truth_not_expr (loc
, arg
);
3251 tem
= build1 (TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3252 SET_EXPR_LOCATION (tem
, loc
);
3258 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3259 operands are another bit-wise operation with a common input. If so,
3260 distribute the bit operations to save an operation and possibly two if
3261 constants are involved. For example, convert
3262 (A | B) & (A | C) into A | (B & C)
3263 Further simplification will occur if B and C are constants.
3265 If this optimization cannot be done, 0 will be returned. */
3268 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3269 tree arg0
, tree arg1
)
3274 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3275 || TREE_CODE (arg0
) == code
3276 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3277 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3280 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3282 common
= TREE_OPERAND (arg0
, 0);
3283 left
= TREE_OPERAND (arg0
, 1);
3284 right
= TREE_OPERAND (arg1
, 1);
3286 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3288 common
= TREE_OPERAND (arg0
, 0);
3289 left
= TREE_OPERAND (arg0
, 1);
3290 right
= TREE_OPERAND (arg1
, 0);
3292 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3294 common
= TREE_OPERAND (arg0
, 1);
3295 left
= TREE_OPERAND (arg0
, 0);
3296 right
= TREE_OPERAND (arg1
, 1);
3298 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3300 common
= TREE_OPERAND (arg0
, 1);
3301 left
= TREE_OPERAND (arg0
, 0);
3302 right
= TREE_OPERAND (arg1
, 0);
3307 common
= fold_convert_loc (loc
, type
, common
);
3308 left
= fold_convert_loc (loc
, type
, left
);
3309 right
= fold_convert_loc (loc
, type
, right
);
3310 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3311 fold_build2_loc (loc
, code
, type
, left
, right
));
3314 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3315 with code CODE. This optimization is unsafe. */
3317 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3318 tree arg0
, tree arg1
)
3320 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3321 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3323 /* (A / C) +- (B / C) -> (A +- B) / C. */
3325 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3326 TREE_OPERAND (arg1
, 1), 0))
3327 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3328 fold_build2_loc (loc
, code
, type
,
3329 TREE_OPERAND (arg0
, 0),
3330 TREE_OPERAND (arg1
, 0)),
3331 TREE_OPERAND (arg0
, 1));
3333 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3334 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3335 TREE_OPERAND (arg1
, 0), 0)
3336 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3337 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3339 REAL_VALUE_TYPE r0
, r1
;
3340 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3341 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3343 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3345 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3346 real_arithmetic (&r0
, code
, &r0
, &r1
);
3347 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3348 TREE_OPERAND (arg0
, 0),
3349 build_real (type
, r0
));
3355 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3356 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3359 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3360 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3362 tree result
, bftype
;
3366 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3367 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3368 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3369 && host_integerp (size
, 0)
3370 && tree_low_cst (size
, 0) == bitsize
)
3371 return fold_convert_loc (loc
, type
, inner
);
3375 if (TYPE_PRECISION (bftype
) != bitsize
3376 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3377 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3379 result
= build3 (BIT_FIELD_REF
, bftype
, inner
,
3380 size_int (bitsize
), bitsize_int (bitpos
));
3381 SET_EXPR_LOCATION (result
, loc
);
3384 result
= fold_convert_loc (loc
, type
, result
);
3389 /* Optimize a bit-field compare.
3391 There are two cases: First is a compare against a constant and the
3392 second is a comparison of two items where the fields are at the same
3393 bit position relative to the start of a chunk (byte, halfword, word)
3394 large enough to contain it. In these cases we can avoid the shift
3395 implicit in bitfield extractions.
3397 For constants, we emit a compare of the shifted constant with the
3398 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3399 compared. For two fields at the same position, we do the ANDs with the
3400 similar mask and compare the result of the ANDs.
3402 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3403 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3404 are the left and right operands of the comparison, respectively.
3406 If the optimization described above can be done, we return the resulting
3407 tree. Otherwise we return zero. */
3410 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3411 tree compare_type
, tree lhs
, tree rhs
)
3413 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3414 tree type
= TREE_TYPE (lhs
);
3415 tree signed_type
, unsigned_type
;
3416 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3417 enum machine_mode lmode
, rmode
, nmode
;
3418 int lunsignedp
, runsignedp
;
3419 int lvolatilep
= 0, rvolatilep
= 0;
3420 tree linner
, rinner
= NULL_TREE
;
3424 /* Get all the information about the extractions being done. If the bit size
3425 if the same as the size of the underlying object, we aren't doing an
3426 extraction at all and so can do nothing. We also don't want to
3427 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3428 then will no longer be able to replace it. */
3429 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3430 &lunsignedp
, &lvolatilep
, false);
3431 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3432 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3437 /* If this is not a constant, we can only do something if bit positions,
3438 sizes, and signedness are the same. */
3439 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3440 &runsignedp
, &rvolatilep
, false);
3442 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3443 || lunsignedp
!= runsignedp
|| offset
!= 0
3444 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3448 /* See if we can find a mode to refer to this field. We should be able to,
3449 but fail if we can't. */
3451 && GET_MODE_BITSIZE (lmode
) > 0
3452 && flag_strict_volatile_bitfields
> 0)
3455 nmode
= get_best_mode (lbitsize
, lbitpos
,
3456 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3457 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3458 TYPE_ALIGN (TREE_TYPE (rinner
))),
3459 word_mode
, lvolatilep
|| rvolatilep
);
3460 if (nmode
== VOIDmode
)
3463 /* Set signed and unsigned types of the precision of this mode for the
3465 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3466 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3468 /* Compute the bit position and size for the new reference and our offset
3469 within it. If the new reference is the same size as the original, we
3470 won't optimize anything, so return zero. */
3471 nbitsize
= GET_MODE_BITSIZE (nmode
);
3472 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3474 if (nbitsize
== lbitsize
)
3477 if (BYTES_BIG_ENDIAN
)
3478 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3480 /* Make the mask to be used against the extracted field. */
3481 mask
= build_int_cst_type (unsigned_type
, -1);
3482 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3483 mask
= const_binop (RSHIFT_EXPR
, mask
,
3484 size_int (nbitsize
- lbitsize
- lbitpos
));
3487 /* If not comparing with constant, just rework the comparison
3489 return fold_build2_loc (loc
, code
, compare_type
,
3490 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3491 make_bit_field_ref (loc
, linner
,
3496 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3497 make_bit_field_ref (loc
, rinner
,
3503 /* Otherwise, we are handling the constant case. See if the constant is too
3504 big for the field. Warn and return a tree of for 0 (false) if so. We do
3505 this not only for its own sake, but to avoid having to test for this
3506 error case below. If we didn't, we might generate wrong code.
3508 For unsigned fields, the constant shifted right by the field length should
3509 be all zero. For signed fields, the high-order bits should agree with
3514 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3515 fold_convert_loc (loc
,
3516 unsigned_type
, rhs
),
3517 size_int (lbitsize
))))
3519 warning (0, "comparison is always %d due to width of bit-field",
3521 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3526 tree tem
= const_binop (RSHIFT_EXPR
,
3527 fold_convert_loc (loc
, signed_type
, rhs
),
3528 size_int (lbitsize
- 1));
3529 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3531 warning (0, "comparison is always %d due to width of bit-field",
3533 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3537 /* Single-bit compares should always be against zero. */
3538 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3540 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3541 rhs
= build_int_cst (type
, 0);
3544 /* Make a new bitfield reference, shift the constant over the
3545 appropriate number of bits and mask it with the computed mask
3546 (in case this was a signed field). If we changed it, make a new one. */
3547 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3550 TREE_SIDE_EFFECTS (lhs
) = 1;
3551 TREE_THIS_VOLATILE (lhs
) = 1;
3554 rhs
= const_binop (BIT_AND_EXPR
,
3555 const_binop (LSHIFT_EXPR
,
3556 fold_convert_loc (loc
, unsigned_type
, rhs
),
3557 size_int (lbitpos
)),
3560 lhs
= build2 (code
, compare_type
,
3561 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
3563 SET_EXPR_LOCATION (lhs
, loc
);
3567 /* Subroutine for fold_truthop: decode a field reference.
3569 If EXP is a comparison reference, we return the innermost reference.
3571 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3572 set to the starting bit number.
3574 If the innermost field can be completely contained in a mode-sized
3575 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3577 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3578 otherwise it is not changed.
3580 *PUNSIGNEDP is set to the signedness of the field.
3582 *PMASK is set to the mask used. This is either contained in a
3583 BIT_AND_EXPR or derived from the width of the field.
3585 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3587 Return 0 if this is not a component reference or is one that we can't
3588 do anything with. */
3591 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3592 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3593 int *punsignedp
, int *pvolatilep
,
3594 tree
*pmask
, tree
*pand_mask
)
3596 tree outer_type
= 0;
3598 tree mask
, inner
, offset
;
3600 unsigned int precision
;
3602 /* All the optimizations using this function assume integer fields.
3603 There are problems with FP fields since the type_for_size call
3604 below can fail for, e.g., XFmode. */
3605 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3608 /* We are interested in the bare arrangement of bits, so strip everything
3609 that doesn't affect the machine mode. However, record the type of the
3610 outermost expression if it may matter below. */
3611 if (CONVERT_EXPR_P (exp
)
3612 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3613 outer_type
= TREE_TYPE (exp
);
3616 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3618 and_mask
= TREE_OPERAND (exp
, 1);
3619 exp
= TREE_OPERAND (exp
, 0);
3620 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3621 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3625 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3626 punsignedp
, pvolatilep
, false);
3627 if ((inner
== exp
&& and_mask
== 0)
3628 || *pbitsize
< 0 || offset
!= 0
3629 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3632 /* If the number of bits in the reference is the same as the bitsize of
3633 the outer type, then the outer type gives the signedness. Otherwise
3634 (in case of a small bitfield) the signedness is unchanged. */
3635 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3636 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3638 /* Compute the mask to access the bitfield. */
3639 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3640 precision
= TYPE_PRECISION (unsigned_type
);
3642 mask
= build_int_cst_type (unsigned_type
, -1);
3644 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3645 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3647 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3649 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3650 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3653 *pand_mask
= and_mask
;
3657 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3661 all_ones_mask_p (const_tree mask
, int size
)
3663 tree type
= TREE_TYPE (mask
);
3664 unsigned int precision
= TYPE_PRECISION (type
);
3667 tmask
= build_int_cst_type (signed_type_for (type
), -1);
3670 tree_int_cst_equal (mask
,
3671 const_binop (RSHIFT_EXPR
,
3672 const_binop (LSHIFT_EXPR
, tmask
,
3673 size_int (precision
- size
)),
3674 size_int (precision
- size
)));
3677 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3678 represents the sign bit of EXP's type. If EXP represents a sign
3679 or zero extension, also test VAL against the unextended type.
3680 The return value is the (sub)expression whose sign bit is VAL,
3681 or NULL_TREE otherwise. */
3684 sign_bit_p (tree exp
, const_tree val
)
3686 unsigned HOST_WIDE_INT mask_lo
, lo
;
3687 HOST_WIDE_INT mask_hi
, hi
;
3691 /* Tree EXP must have an integral type. */
3692 t
= TREE_TYPE (exp
);
3693 if (! INTEGRAL_TYPE_P (t
))
3696 /* Tree VAL must be an integer constant. */
3697 if (TREE_CODE (val
) != INTEGER_CST
3698 || TREE_OVERFLOW (val
))
3701 width
= TYPE_PRECISION (t
);
3702 if (width
> HOST_BITS_PER_WIDE_INT
)
3704 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3707 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3708 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3714 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3717 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3718 >> (HOST_BITS_PER_WIDE_INT
- width
));
3721 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3722 treat VAL as if it were unsigned. */
3723 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3724 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3727 /* Handle extension from a narrower type. */
3728 if (TREE_CODE (exp
) == NOP_EXPR
3729 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3730 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3735 /* Subroutine for fold_truthop: determine if an operand is simple enough
3736 to be evaluated unconditionally. */
3739 simple_operand_p (const_tree exp
)
3741 /* Strip any conversions that don't change the machine mode. */
3744 return (CONSTANT_CLASS_P (exp
)
3745 || TREE_CODE (exp
) == SSA_NAME
3747 && ! TREE_ADDRESSABLE (exp
)
3748 && ! TREE_THIS_VOLATILE (exp
)
3749 && ! DECL_NONLOCAL (exp
)
3750 /* Don't regard global variables as simple. They may be
3751 allocated in ways unknown to the compiler (shared memory,
3752 #pragma weak, etc). */
3753 && ! TREE_PUBLIC (exp
)
3754 && ! DECL_EXTERNAL (exp
)
3755 /* Loading a static variable is unduly expensive, but global
3756 registers aren't expensive. */
3757 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3760 /* The following functions are subroutines to fold_range_test and allow it to
3761 try to change a logical combination of comparisons into a range test.
3764 X == 2 || X == 3 || X == 4 || X == 5
3768 (unsigned) (X - 2) <= 3
3770 We describe each set of comparisons as being either inside or outside
3771 a range, using a variable named like IN_P, and then describe the
3772 range with a lower and upper bound. If one of the bounds is omitted,
3773 it represents either the highest or lowest value of the type.
3775 In the comments below, we represent a range by two numbers in brackets
3776 preceded by a "+" to designate being inside that range, or a "-" to
3777 designate being outside that range, so the condition can be inverted by
3778 flipping the prefix. An omitted bound is represented by a "-". For
3779 example, "- [-, 10]" means being outside the range starting at the lowest
3780 possible value and ending at 10, in other words, being greater than 10.
3781 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3784 We set up things so that the missing bounds are handled in a consistent
3785 manner so neither a missing bound nor "true" and "false" need to be
3786 handled using a special case. */
3788 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3789 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3790 and UPPER1_P are nonzero if the respective argument is an upper bound
3791 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3792 must be specified for a comparison. ARG1 will be converted to ARG0's
3793 type if both are specified. */
3796 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3797 tree arg1
, int upper1_p
)
3803 /* If neither arg represents infinity, do the normal operation.
3804 Else, if not a comparison, return infinity. Else handle the special
3805 comparison rules. Note that most of the cases below won't occur, but
3806 are handled for consistency. */
3808 if (arg0
!= 0 && arg1
!= 0)
3810 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3811 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3813 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3816 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3819 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3820 for neither. In real maths, we cannot assume open ended ranges are
3821 the same. But, this is computer arithmetic, where numbers are finite.
3822 We can therefore make the transformation of any unbounded range with
3823 the value Z, Z being greater than any representable number. This permits
3824 us to treat unbounded ranges as equal. */
3825 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3826 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3830 result
= sgn0
== sgn1
;
3833 result
= sgn0
!= sgn1
;
3836 result
= sgn0
< sgn1
;
3839 result
= sgn0
<= sgn1
;
3842 result
= sgn0
> sgn1
;
3845 result
= sgn0
>= sgn1
;
3851 return constant_boolean_node (result
, type
);
3854 /* Given EXP, a logical expression, set the range it is testing into
3855 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3856 actually being tested. *PLOW and *PHIGH will be made of the same
3857 type as the returned expression. If EXP is not a comparison, we
3858 will most likely not be returning a useful value and range. Set
3859 *STRICT_OVERFLOW_P to true if the return value is only valid
3860 because signed overflow is undefined; otherwise, do not change
3861 *STRICT_OVERFLOW_P. */
3864 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
3865 bool *strict_overflow_p
)
3867 enum tree_code code
;
3868 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
3869 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
3871 tree low
, high
, n_low
, n_high
;
3872 location_t loc
= EXPR_LOCATION (exp
);
3874 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3875 and see if we can refine the range. Some of the cases below may not
3876 happen, but it doesn't seem worth worrying about this. We "continue"
3877 the outer loop when we've changed something; otherwise we "break"
3878 the switch, which will "break" the while. */
3881 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
3885 code
= TREE_CODE (exp
);
3886 exp_type
= TREE_TYPE (exp
);
3888 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3890 if (TREE_OPERAND_LENGTH (exp
) > 0)
3891 arg0
= TREE_OPERAND (exp
, 0);
3892 if (TREE_CODE_CLASS (code
) == tcc_comparison
3893 || TREE_CODE_CLASS (code
) == tcc_unary
3894 || TREE_CODE_CLASS (code
) == tcc_binary
)
3895 arg0_type
= TREE_TYPE (arg0
);
3896 if (TREE_CODE_CLASS (code
) == tcc_binary
3897 || TREE_CODE_CLASS (code
) == tcc_comparison
3898 || (TREE_CODE_CLASS (code
) == tcc_expression
3899 && TREE_OPERAND_LENGTH (exp
) > 1))
3900 arg1
= TREE_OPERAND (exp
, 1);
3905 case TRUTH_NOT_EXPR
:
3906 in_p
= ! in_p
, exp
= arg0
;
3909 case EQ_EXPR
: case NE_EXPR
:
3910 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3911 /* We can only do something if the range is testing for zero
3912 and if the second operand is an integer constant. Note that
3913 saying something is "in" the range we make is done by
3914 complementing IN_P since it will set in the initial case of
3915 being not equal to zero; "out" is leaving it alone. */
3916 if (low
== 0 || high
== 0
3917 || ! integer_zerop (low
) || ! integer_zerop (high
)
3918 || TREE_CODE (arg1
) != INTEGER_CST
)
3923 case NE_EXPR
: /* - [c, c] */
3926 case EQ_EXPR
: /* + [c, c] */
3927 in_p
= ! in_p
, low
= high
= arg1
;
3929 case GT_EXPR
: /* - [-, c] */
3930 low
= 0, high
= arg1
;
3932 case GE_EXPR
: /* + [c, -] */
3933 in_p
= ! in_p
, low
= arg1
, high
= 0;
3935 case LT_EXPR
: /* - [c, -] */
3936 low
= arg1
, high
= 0;
3938 case LE_EXPR
: /* + [-, c] */
3939 in_p
= ! in_p
, low
= 0, high
= arg1
;
3945 /* If this is an unsigned comparison, we also know that EXP is
3946 greater than or equal to zero. We base the range tests we make
3947 on that fact, so we record it here so we can parse existing
3948 range tests. We test arg0_type since often the return type
3949 of, e.g. EQ_EXPR, is boolean. */
3950 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3952 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3954 build_int_cst (arg0_type
, 0),
3958 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3960 /* If the high bound is missing, but we have a nonzero low
3961 bound, reverse the range so it goes from zero to the low bound
3963 if (high
== 0 && low
&& ! integer_zerop (low
))
3966 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3967 integer_one_node
, 0);
3968 low
= build_int_cst (arg0_type
, 0);
3976 /* (-x) IN [a,b] -> x in [-b, -a] */
3977 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3978 build_int_cst (exp_type
, 0),
3980 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3981 build_int_cst (exp_type
, 0),
3983 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
3989 exp
= build2 (MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3990 build_int_cst (exp_type
, 1));
3991 SET_EXPR_LOCATION (exp
, loc
);
3994 case PLUS_EXPR
: case MINUS_EXPR
:
3995 if (TREE_CODE (arg1
) != INTEGER_CST
)
3998 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3999 move a constant to the other side. */
4000 if (!TYPE_UNSIGNED (arg0_type
)
4001 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4004 /* If EXP is signed, any overflow in the computation is undefined,
4005 so we don't worry about it so long as our computations on
4006 the bounds don't overflow. For unsigned, overflow is defined
4007 and this is exactly the right thing. */
4008 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4009 arg0_type
, low
, 0, arg1
, 0);
4010 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4011 arg0_type
, high
, 1, arg1
, 0);
4012 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4013 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4016 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4017 *strict_overflow_p
= true;
4020 /* Check for an unsigned range which has wrapped around the maximum
4021 value thus making n_high < n_low, and normalize it. */
4022 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4024 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4025 integer_one_node
, 0);
4026 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4027 integer_one_node
, 0);
4029 /* If the range is of the form +/- [ x+1, x ], we won't
4030 be able to normalize it. But then, it represents the
4031 whole range or the empty set, so make it
4033 if (tree_int_cst_equal (n_low
, low
)
4034 && tree_int_cst_equal (n_high
, high
))
4040 low
= n_low
, high
= n_high
;
4045 CASE_CONVERT
: case NON_LVALUE_EXPR
:
4046 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4049 if (! INTEGRAL_TYPE_P (arg0_type
)
4050 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4051 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4054 n_low
= low
, n_high
= high
;
4057 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4060 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4063 /* If we're converting arg0 from an unsigned type, to exp,
4064 a signed type, we will be doing the comparison as unsigned.
4065 The tests above have already verified that LOW and HIGH
4068 So we have to ensure that we will handle large unsigned
4069 values the same way that the current signed bounds treat
4072 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4076 /* For fixed-point modes, we need to pass the saturating flag
4077 as the 2nd parameter. */
4078 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4079 equiv_type
= lang_hooks
.types
.type_for_mode
4080 (TYPE_MODE (arg0_type
),
4081 TYPE_SATURATING (arg0_type
));
4083 equiv_type
= lang_hooks
.types
.type_for_mode
4084 (TYPE_MODE (arg0_type
), 1);
4086 /* A range without an upper bound is, naturally, unbounded.
4087 Since convert would have cropped a very large value, use
4088 the max value for the destination type. */
4090 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4091 : TYPE_MAX_VALUE (arg0_type
);
4093 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4094 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4095 fold_convert_loc (loc
, arg0_type
,
4097 build_int_cst (arg0_type
, 1));
4099 /* If the low bound is specified, "and" the range with the
4100 range for which the original unsigned value will be
4104 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4105 1, n_low
, n_high
, 1,
4106 fold_convert_loc (loc
, arg0_type
,
4111 in_p
= (n_in_p
== in_p
);
4115 /* Otherwise, "or" the range with the range of the input
4116 that will be interpreted as negative. */
4117 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4118 0, n_low
, n_high
, 1,
4119 fold_convert_loc (loc
, arg0_type
,
4124 in_p
= (in_p
!= n_in_p
);
4129 low
= n_low
, high
= n_high
;
4139 /* If EXP is a constant, we can evaluate whether this is true or false. */
4140 if (TREE_CODE (exp
) == INTEGER_CST
)
4142 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4144 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4150 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4154 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4155 type, TYPE, return an expression to test if EXP is in (or out of, depending
4156 on IN_P) the range. Return 0 if the test couldn't be created. */
4159 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4160 tree low
, tree high
)
4162 tree etype
= TREE_TYPE (exp
), value
;
4164 #ifdef HAVE_canonicalize_funcptr_for_compare
4165 /* Disable this optimization for function pointer expressions
4166 on targets that require function pointer canonicalization. */
4167 if (HAVE_canonicalize_funcptr_for_compare
4168 && TREE_CODE (etype
) == POINTER_TYPE
4169 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4175 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4177 return invert_truthvalue_loc (loc
, value
);
4182 if (low
== 0 && high
== 0)
4183 return build_int_cst (type
, 1);
4186 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4187 fold_convert_loc (loc
, etype
, high
));
4190 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4191 fold_convert_loc (loc
, etype
, low
));
4193 if (operand_equal_p (low
, high
, 0))
4194 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4195 fold_convert_loc (loc
, etype
, low
));
4197 if (integer_zerop (low
))
4199 if (! TYPE_UNSIGNED (etype
))
4201 etype
= unsigned_type_for (etype
);
4202 high
= fold_convert_loc (loc
, etype
, high
);
4203 exp
= fold_convert_loc (loc
, etype
, exp
);
4205 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4208 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4209 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4211 unsigned HOST_WIDE_INT lo
;
4215 prec
= TYPE_PRECISION (etype
);
4216 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4219 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4223 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4224 lo
= (unsigned HOST_WIDE_INT
) -1;
4227 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4229 if (TYPE_UNSIGNED (etype
))
4231 tree signed_etype
= signed_type_for (etype
);
4232 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4234 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4236 etype
= signed_etype
;
4237 exp
= fold_convert_loc (loc
, etype
, exp
);
4239 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4240 build_int_cst (etype
, 0));
4244 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4245 This requires wrap-around arithmetics for the type of the expression.
4246 First make sure that arithmetics in this type is valid, then make sure
4247 that it wraps around. */
4248 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4249 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4250 TYPE_UNSIGNED (etype
));
4252 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4254 tree utype
, minv
, maxv
;
4256 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4257 for the type in question, as we rely on this here. */
4258 utype
= unsigned_type_for (etype
);
4259 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4260 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4261 integer_one_node
, 1);
4262 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4264 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4271 high
= fold_convert_loc (loc
, etype
, high
);
4272 low
= fold_convert_loc (loc
, etype
, low
);
4273 exp
= fold_convert_loc (loc
, etype
, exp
);
4275 value
= const_binop (MINUS_EXPR
, high
, low
);
4278 if (POINTER_TYPE_P (etype
))
4280 if (value
!= 0 && !TREE_OVERFLOW (value
))
4282 low
= fold_convert_loc (loc
, sizetype
, low
);
4283 low
= fold_build1_loc (loc
, NEGATE_EXPR
, sizetype
, low
);
4284 return build_range_check (loc
, type
,
4285 fold_build2_loc (loc
, POINTER_PLUS_EXPR
,
4287 1, build_int_cst (etype
, 0), value
);
4292 if (value
!= 0 && !TREE_OVERFLOW (value
))
4293 return build_range_check (loc
, type
,
4294 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4295 1, build_int_cst (etype
, 0), value
);
4300 /* Return the predecessor of VAL in its type, handling the infinite case. */
4303 range_predecessor (tree val
)
4305 tree type
= TREE_TYPE (val
);
4307 if (INTEGRAL_TYPE_P (type
)
4308 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4311 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4314 /* Return the successor of VAL in its type, handling the infinite case. */
4317 range_successor (tree val
)
4319 tree type
= TREE_TYPE (val
);
4321 if (INTEGRAL_TYPE_P (type
)
4322 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4325 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4328 /* Given two ranges, see if we can merge them into one. Return 1 if we
4329 can, 0 if we can't. Set the output range into the specified parameters. */
4332 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4333 tree high0
, int in1_p
, tree low1
, tree high1
)
4341 int lowequal
= ((low0
== 0 && low1
== 0)
4342 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4343 low0
, 0, low1
, 0)));
4344 int highequal
= ((high0
== 0 && high1
== 0)
4345 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4346 high0
, 1, high1
, 1)));
4348 /* Make range 0 be the range that starts first, or ends last if they
4349 start at the same value. Swap them if it isn't. */
4350 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4353 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4354 high1
, 1, high0
, 1))))
4356 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4357 tem
= low0
, low0
= low1
, low1
= tem
;
4358 tem
= high0
, high0
= high1
, high1
= tem
;
4361 /* Now flag two cases, whether the ranges are disjoint or whether the
4362 second range is totally subsumed in the first. Note that the tests
4363 below are simplified by the ones above. */
4364 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4365 high0
, 1, low1
, 0));
4366 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4367 high1
, 1, high0
, 1));
4369 /* We now have four cases, depending on whether we are including or
4370 excluding the two ranges. */
4373 /* If they don't overlap, the result is false. If the second range
4374 is a subset it is the result. Otherwise, the range is from the start
4375 of the second to the end of the first. */
4377 in_p
= 0, low
= high
= 0;
4379 in_p
= 1, low
= low1
, high
= high1
;
4381 in_p
= 1, low
= low1
, high
= high0
;
4384 else if (in0_p
&& ! in1_p
)
4386 /* If they don't overlap, the result is the first range. If they are
4387 equal, the result is false. If the second range is a subset of the
4388 first, and the ranges begin at the same place, we go from just after
4389 the end of the second range to the end of the first. If the second
4390 range is not a subset of the first, or if it is a subset and both
4391 ranges end at the same place, the range starts at the start of the
4392 first range and ends just before the second range.
4393 Otherwise, we can't describe this as a single range. */
4395 in_p
= 1, low
= low0
, high
= high0
;
4396 else if (lowequal
&& highequal
)
4397 in_p
= 0, low
= high
= 0;
4398 else if (subset
&& lowequal
)
4400 low
= range_successor (high1
);
4405 /* We are in the weird situation where high0 > high1 but
4406 high1 has no successor. Punt. */
4410 else if (! subset
|| highequal
)
4413 high
= range_predecessor (low1
);
4417 /* low0 < low1 but low1 has no predecessor. Punt. */
4425 else if (! in0_p
&& in1_p
)
4427 /* If they don't overlap, the result is the second range. If the second
4428 is a subset of the first, the result is false. Otherwise,
4429 the range starts just after the first range and ends at the
4430 end of the second. */
4432 in_p
= 1, low
= low1
, high
= high1
;
4433 else if (subset
|| highequal
)
4434 in_p
= 0, low
= high
= 0;
4437 low
= range_successor (high0
);
4442 /* high1 > high0 but high0 has no successor. Punt. */
4450 /* The case where we are excluding both ranges. Here the complex case
4451 is if they don't overlap. In that case, the only time we have a
4452 range is if they are adjacent. If the second is a subset of the
4453 first, the result is the first. Otherwise, the range to exclude
4454 starts at the beginning of the first range and ends at the end of the
4458 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4459 range_successor (high0
),
4461 in_p
= 0, low
= low0
, high
= high1
;
4464 /* Canonicalize - [min, x] into - [-, x]. */
4465 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4466 switch (TREE_CODE (TREE_TYPE (low0
)))
4469 if (TYPE_PRECISION (TREE_TYPE (low0
))
4470 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4474 if (tree_int_cst_equal (low0
,
4475 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4479 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4480 && integer_zerop (low0
))
4487 /* Canonicalize - [x, max] into - [x, -]. */
4488 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4489 switch (TREE_CODE (TREE_TYPE (high1
)))
4492 if (TYPE_PRECISION (TREE_TYPE (high1
))
4493 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4497 if (tree_int_cst_equal (high1
,
4498 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4502 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4503 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4505 integer_one_node
, 1)))
4512 /* The ranges might be also adjacent between the maximum and
4513 minimum values of the given type. For
4514 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4515 return + [x + 1, y - 1]. */
4516 if (low0
== 0 && high1
== 0)
4518 low
= range_successor (high0
);
4519 high
= range_predecessor (low1
);
4520 if (low
== 0 || high
== 0)
4530 in_p
= 0, low
= low0
, high
= high0
;
4532 in_p
= 0, low
= low0
, high
= high1
;
4535 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4540 /* Subroutine of fold, looking inside expressions of the form
4541 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4542 of the COND_EXPR. This function is being used also to optimize
4543 A op B ? C : A, by reversing the comparison first.
4545 Return a folded expression whose code is not a COND_EXPR
4546 anymore, or NULL_TREE if no folding opportunity is found. */
4549 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4550 tree arg0
, tree arg1
, tree arg2
)
4552 enum tree_code comp_code
= TREE_CODE (arg0
);
4553 tree arg00
= TREE_OPERAND (arg0
, 0);
4554 tree arg01
= TREE_OPERAND (arg0
, 1);
4555 tree arg1_type
= TREE_TYPE (arg1
);
4561 /* If we have A op 0 ? A : -A, consider applying the following
4564 A == 0? A : -A same as -A
4565 A != 0? A : -A same as A
4566 A >= 0? A : -A same as abs (A)
4567 A > 0? A : -A same as abs (A)
4568 A <= 0? A : -A same as -abs (A)
4569 A < 0? A : -A same as -abs (A)
4571 None of these transformations work for modes with signed
4572 zeros. If A is +/-0, the first two transformations will
4573 change the sign of the result (from +0 to -0, or vice
4574 versa). The last four will fix the sign of the result,
4575 even though the original expressions could be positive or
4576 negative, depending on the sign of A.
4578 Note that all these transformations are correct if A is
4579 NaN, since the two alternatives (A and -A) are also NaNs. */
4580 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4581 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4582 ? real_zerop (arg01
)
4583 : integer_zerop (arg01
))
4584 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4585 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4586 /* In the case that A is of the form X-Y, '-A' (arg2) may
4587 have already been folded to Y-X, check for that. */
4588 || (TREE_CODE (arg1
) == MINUS_EXPR
4589 && TREE_CODE (arg2
) == MINUS_EXPR
4590 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4591 TREE_OPERAND (arg2
, 1), 0)
4592 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4593 TREE_OPERAND (arg2
, 0), 0))))
4598 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4599 return pedantic_non_lvalue_loc (loc
,
4600 fold_convert_loc (loc
, type
,
4601 negate_expr (tem
)));
4604 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4607 if (flag_trapping_math
)
4612 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4613 arg1
= fold_convert_loc (loc
, signed_type_for
4614 (TREE_TYPE (arg1
)), arg1
);
4615 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4616 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4619 if (flag_trapping_math
)
4623 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4624 arg1
= fold_convert_loc (loc
, signed_type_for
4625 (TREE_TYPE (arg1
)), arg1
);
4626 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4627 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4629 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4633 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4634 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4635 both transformations are correct when A is NaN: A != 0
4636 is then true, and A == 0 is false. */
4638 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4639 && integer_zerop (arg01
) && integer_zerop (arg2
))
4641 if (comp_code
== NE_EXPR
)
4642 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4643 else if (comp_code
== EQ_EXPR
)
4644 return build_int_cst (type
, 0);
4647 /* Try some transformations of A op B ? A : B.
4649 A == B? A : B same as B
4650 A != B? A : B same as A
4651 A >= B? A : B same as max (A, B)
4652 A > B? A : B same as max (B, A)
4653 A <= B? A : B same as min (A, B)
4654 A < B? A : B same as min (B, A)
4656 As above, these transformations don't work in the presence
4657 of signed zeros. For example, if A and B are zeros of
4658 opposite sign, the first two transformations will change
4659 the sign of the result. In the last four, the original
4660 expressions give different results for (A=+0, B=-0) and
4661 (A=-0, B=+0), but the transformed expressions do not.
4663 The first two transformations are correct if either A or B
4664 is a NaN. In the first transformation, the condition will
4665 be false, and B will indeed be chosen. In the case of the
4666 second transformation, the condition A != B will be true,
4667 and A will be chosen.
4669 The conversions to max() and min() are not correct if B is
4670 a number and A is not. The conditions in the original
4671 expressions will be false, so all four give B. The min()
4672 and max() versions would give a NaN instead. */
4673 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4674 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4675 /* Avoid these transformations if the COND_EXPR may be used
4676 as an lvalue in the C++ front-end. PR c++/19199. */
4678 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4679 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4680 || ! maybe_lvalue_p (arg1
)
4681 || ! maybe_lvalue_p (arg2
)))
4683 tree comp_op0
= arg00
;
4684 tree comp_op1
= arg01
;
4685 tree comp_type
= TREE_TYPE (comp_op0
);
4687 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4688 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4698 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4700 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4705 /* In C++ a ?: expression can be an lvalue, so put the
4706 operand which will be used if they are equal first
4707 so that we can convert this back to the
4708 corresponding COND_EXPR. */
4709 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4711 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4712 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4713 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4714 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4715 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4716 comp_op1
, comp_op0
);
4717 return pedantic_non_lvalue_loc (loc
,
4718 fold_convert_loc (loc
, type
, tem
));
4725 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4727 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4728 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4729 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4730 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4731 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4732 comp_op1
, comp_op0
);
4733 return pedantic_non_lvalue_loc (loc
,
4734 fold_convert_loc (loc
, type
, tem
));
4738 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4739 return pedantic_non_lvalue_loc (loc
,
4740 fold_convert_loc (loc
, type
, arg2
));
4743 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4744 return pedantic_non_lvalue_loc (loc
,
4745 fold_convert_loc (loc
, type
, arg1
));
4748 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4753 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4754 we might still be able to simplify this. For example,
4755 if C1 is one less or one more than C2, this might have started
4756 out as a MIN or MAX and been transformed by this function.
4757 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4759 if (INTEGRAL_TYPE_P (type
)
4760 && TREE_CODE (arg01
) == INTEGER_CST
4761 && TREE_CODE (arg2
) == INTEGER_CST
)
4765 if (TREE_CODE (arg1
) == INTEGER_CST
)
4767 /* We can replace A with C1 in this case. */
4768 arg1
= fold_convert_loc (loc
, type
, arg01
);
4769 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4772 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4773 MIN_EXPR, to preserve the signedness of the comparison. */
4774 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4776 && operand_equal_p (arg01
,
4777 const_binop (PLUS_EXPR
, arg2
,
4778 build_int_cst (type
, 1)),
4781 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4782 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4784 return pedantic_non_lvalue_loc (loc
,
4785 fold_convert_loc (loc
, type
, tem
));
4790 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4792 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4794 && operand_equal_p (arg01
,
4795 const_binop (MINUS_EXPR
, arg2
,
4796 build_int_cst (type
, 1)),
4799 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4800 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4802 return pedantic_non_lvalue_loc (loc
,
4803 fold_convert_loc (loc
, type
, tem
));
4808 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4809 MAX_EXPR, to preserve the signedness of the comparison. */
4810 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4812 && operand_equal_p (arg01
,
4813 const_binop (MINUS_EXPR
, arg2
,
4814 build_int_cst (type
, 1)),
4817 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4818 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4820 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4825 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4826 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4828 && operand_equal_p (arg01
,
4829 const_binop (PLUS_EXPR
, arg2
,
4830 build_int_cst (type
, 1)),
4833 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4834 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4836 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4850 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4851 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4852 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4856 /* EXP is some logical combination of boolean tests. See if we can
4857 merge it into some range test. Return the new tree if so. */
4860 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4863 int or_op
= (code
== TRUTH_ORIF_EXPR
4864 || code
== TRUTH_OR_EXPR
);
4865 int in0_p
, in1_p
, in_p
;
4866 tree low0
, low1
, low
, high0
, high1
, high
;
4867 bool strict_overflow_p
= false;
4868 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4869 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4871 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4872 "when simplifying range test");
4874 /* If this is an OR operation, invert both sides; we will invert
4875 again at the end. */
4877 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4879 /* If both expressions are the same, if we can merge the ranges, and we
4880 can build the range test, return it or it inverted. If one of the
4881 ranges is always true or always false, consider it to be the same
4882 expression as the other. */
4883 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4884 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4886 && 0 != (tem
= (build_range_check (UNKNOWN_LOCATION
, type
,
4888 : rhs
!= 0 ? rhs
: integer_zero_node
,
4891 if (strict_overflow_p
)
4892 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4893 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4896 /* On machines where the branch cost is expensive, if this is a
4897 short-circuited branch and the underlying object on both sides
4898 is the same, make a non-short-circuit operation. */
4899 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4900 && lhs
!= 0 && rhs
!= 0
4901 && (code
== TRUTH_ANDIF_EXPR
4902 || code
== TRUTH_ORIF_EXPR
)
4903 && operand_equal_p (lhs
, rhs
, 0))
4905 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4906 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4907 which cases we can't do this. */
4908 if (simple_operand_p (lhs
))
4910 tem
= build2 (code
== TRUTH_ANDIF_EXPR
4911 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4913 SET_EXPR_LOCATION (tem
, loc
);
4917 else if (lang_hooks
.decls
.global_bindings_p () == 0
4918 && ! CONTAINS_PLACEHOLDER_P (lhs
))
4920 tree common
= save_expr (lhs
);
4922 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4923 or_op
? ! in0_p
: in0_p
,
4925 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4926 or_op
? ! in1_p
: in1_p
,
4929 if (strict_overflow_p
)
4930 fold_overflow_warning (warnmsg
,
4931 WARN_STRICT_OVERFLOW_COMPARISON
);
4932 tem
= build2 (code
== TRUTH_ANDIF_EXPR
4933 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4935 SET_EXPR_LOCATION (tem
, loc
);
4944 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4945 bit value. Arrange things so the extra bits will be set to zero if and
4946 only if C is signed-extended to its full width. If MASK is nonzero,
4947 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4950 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4952 tree type
= TREE_TYPE (c
);
4953 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4956 if (p
== modesize
|| unsignedp
)
4959 /* We work by getting just the sign bit into the low-order bit, then
4960 into the high-order bit, then sign-extend. We then XOR that value
4962 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1));
4963 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1));
4965 /* We must use a signed type in order to get an arithmetic right shift.
4966 However, we must also avoid introducing accidental overflows, so that
4967 a subsequent call to integer_zerop will work. Hence we must
4968 do the type conversion here. At this point, the constant is either
4969 zero or one, and the conversion to a signed type can never overflow.
4970 We could get an overflow if this conversion is done anywhere else. */
4971 if (TYPE_UNSIGNED (type
))
4972 temp
= fold_convert (signed_type_for (type
), temp
);
4974 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
4975 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
4977 temp
= const_binop (BIT_AND_EXPR
, temp
,
4978 fold_convert (TREE_TYPE (c
), mask
));
4979 /* If necessary, convert the type back to match the type of C. */
4980 if (TYPE_UNSIGNED (type
))
4981 temp
= fold_convert (type
, temp
);
4983 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
4986 /* For an expression that has the form
4990 we can drop one of the inner expressions and simplify to
4994 LOC is the location of the resulting expression. OP is the inner
4995 logical operation; the left-hand side in the examples above, while CMPOP
4996 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4997 removing a condition that guards another, as in
4998 (A != NULL && A->...) || A == NULL
4999 which we must not transform. If RHS_ONLY is true, only eliminate the
5000 right-most operand of the inner logical operation. */
5003 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5006 tree type
= TREE_TYPE (cmpop
);
5007 enum tree_code code
= TREE_CODE (cmpop
);
5008 enum tree_code truthop_code
= TREE_CODE (op
);
5009 tree lhs
= TREE_OPERAND (op
, 0);
5010 tree rhs
= TREE_OPERAND (op
, 1);
5011 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5012 enum tree_code rhs_code
= TREE_CODE (rhs
);
5013 enum tree_code lhs_code
= TREE_CODE (lhs
);
5014 enum tree_code inv_code
;
5016 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5019 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5022 if (rhs_code
== truthop_code
)
5024 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5025 if (newrhs
!= NULL_TREE
)
5028 rhs_code
= TREE_CODE (rhs
);
5031 if (lhs_code
== truthop_code
&& !rhs_only
)
5033 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5034 if (newlhs
!= NULL_TREE
)
5037 lhs_code
= TREE_CODE (lhs
);
5041 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5042 if (inv_code
== rhs_code
5043 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5044 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5046 if (!rhs_only
&& inv_code
== lhs_code
5047 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5048 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5050 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5051 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5056 /* Find ways of folding logical expressions of LHS and RHS:
5057 Try to merge two comparisons to the same innermost item.
5058 Look for range tests like "ch >= '0' && ch <= '9'".
5059 Look for combinations of simple terms on machines with expensive branches
5060 and evaluate the RHS unconditionally.
5062 For example, if we have p->a == 2 && p->b == 4 and we can make an
5063 object large enough to span both A and B, we can do this with a comparison
5064 against the object ANDed with the a mask.
5066 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5067 operations to do this with one comparison.
5069 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5070 function and the one above.
5072 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5073 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5075 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5078 We return the simplified tree or 0 if no optimization is possible. */
5081 fold_truthop (location_t loc
, enum tree_code code
, tree truth_type
,
5084 /* If this is the "or" of two comparisons, we can do something if
5085 the comparisons are NE_EXPR. If this is the "and", we can do something
5086 if the comparisons are EQ_EXPR. I.e.,
5087 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5089 WANTED_CODE is this operation code. For single bit fields, we can
5090 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5091 comparison for one-bit fields. */
5093 enum tree_code wanted_code
;
5094 enum tree_code lcode
, rcode
;
5095 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5096 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5097 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5098 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5099 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5100 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5101 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5102 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5103 enum machine_mode lnmode
, rnmode
;
5104 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5105 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5106 tree l_const
, r_const
;
5107 tree lntype
, rntype
, result
;
5108 HOST_WIDE_INT first_bit
, end_bit
;
5110 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5111 enum tree_code orig_code
= code
;
5113 /* Start by getting the comparison codes. Fail if anything is volatile.
5114 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5115 it were surrounded with a NE_EXPR. */
5117 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5120 lcode
= TREE_CODE (lhs
);
5121 rcode
= TREE_CODE (rhs
);
5123 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5125 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5126 build_int_cst (TREE_TYPE (lhs
), 0));
5130 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5132 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5133 build_int_cst (TREE_TYPE (rhs
), 0));
5137 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5138 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5141 ll_arg
= TREE_OPERAND (lhs
, 0);
5142 lr_arg
= TREE_OPERAND (lhs
, 1);
5143 rl_arg
= TREE_OPERAND (rhs
, 0);
5144 rr_arg
= TREE_OPERAND (rhs
, 1);
5146 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5147 if (simple_operand_p (ll_arg
)
5148 && simple_operand_p (lr_arg
))
5151 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5152 && operand_equal_p (lr_arg
, rr_arg
, 0))
5154 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5155 truth_type
, ll_arg
, lr_arg
);
5159 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5160 && operand_equal_p (lr_arg
, rl_arg
, 0))
5162 result
= combine_comparisons (loc
, code
, lcode
,
5163 swap_tree_comparison (rcode
),
5164 truth_type
, ll_arg
, lr_arg
);
5170 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5171 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5173 /* If the RHS can be evaluated unconditionally and its operands are
5174 simple, it wins to evaluate the RHS unconditionally on machines
5175 with expensive branches. In this case, this isn't a comparison
5176 that can be merged. Avoid doing this if the RHS is a floating-point
5177 comparison since those can trap. */
5179 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5181 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5182 && simple_operand_p (rl_arg
)
5183 && simple_operand_p (rr_arg
))
5185 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5186 if (code
== TRUTH_OR_EXPR
5187 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5188 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5189 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5190 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5192 result
= build2 (NE_EXPR
, truth_type
,
5193 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5195 build_int_cst (TREE_TYPE (ll_arg
), 0));
5196 goto fold_truthop_exit
;
5199 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5200 if (code
== TRUTH_AND_EXPR
5201 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5202 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5203 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5204 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5206 result
= build2 (EQ_EXPR
, truth_type
,
5207 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5209 build_int_cst (TREE_TYPE (ll_arg
), 0));
5210 goto fold_truthop_exit
;
5213 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
5215 if (code
!= orig_code
|| lhs
!= orig_lhs
|| rhs
!= orig_rhs
)
5217 result
= build2 (code
, truth_type
, lhs
, rhs
);
5218 goto fold_truthop_exit
;
5224 /* See if the comparisons can be merged. Then get all the parameters for
5227 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5228 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5232 ll_inner
= decode_field_reference (loc
, ll_arg
,
5233 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5234 &ll_unsignedp
, &volatilep
, &ll_mask
,
5236 lr_inner
= decode_field_reference (loc
, lr_arg
,
5237 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5238 &lr_unsignedp
, &volatilep
, &lr_mask
,
5240 rl_inner
= decode_field_reference (loc
, rl_arg
,
5241 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5242 &rl_unsignedp
, &volatilep
, &rl_mask
,
5244 rr_inner
= decode_field_reference (loc
, rr_arg
,
5245 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5246 &rr_unsignedp
, &volatilep
, &rr_mask
,
5249 /* It must be true that the inner operation on the lhs of each
5250 comparison must be the same if we are to be able to do anything.
5251 Then see if we have constants. If not, the same must be true for
5253 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5254 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5257 if (TREE_CODE (lr_arg
) == INTEGER_CST
5258 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5259 l_const
= lr_arg
, r_const
= rr_arg
;
5260 else if (lr_inner
== 0 || rr_inner
== 0
5261 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5264 l_const
= r_const
= 0;
5266 /* If either comparison code is not correct for our logical operation,
5267 fail. However, we can convert a one-bit comparison against zero into
5268 the opposite comparison against that bit being set in the field. */
5270 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5271 if (lcode
!= wanted_code
)
5273 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5275 /* Make the left operand unsigned, since we are only interested
5276 in the value of one bit. Otherwise we are doing the wrong
5285 /* This is analogous to the code for l_const above. */
5286 if (rcode
!= wanted_code
)
5288 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5297 /* See if we can find a mode that contains both fields being compared on
5298 the left. If we can't, fail. Otherwise, update all constants and masks
5299 to be relative to a field of that size. */
5300 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5301 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5302 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5303 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5305 if (lnmode
== VOIDmode
)
5308 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5309 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5310 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5311 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5313 if (BYTES_BIG_ENDIAN
)
5315 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5316 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5319 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5320 size_int (xll_bitpos
));
5321 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5322 size_int (xrl_bitpos
));
5326 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5327 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5328 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5329 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5330 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5333 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5335 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5340 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5341 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5342 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5343 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5344 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5347 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5349 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5353 /* If the right sides are not constant, do the same for it. Also,
5354 disallow this optimization if a size or signedness mismatch occurs
5355 between the left and right sides. */
5358 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5359 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5360 /* Make sure the two fields on the right
5361 correspond to the left without being swapped. */
5362 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5365 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5366 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5367 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5368 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5370 if (rnmode
== VOIDmode
)
5373 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5374 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5375 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5376 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5378 if (BYTES_BIG_ENDIAN
)
5380 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5381 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5384 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5386 size_int (xlr_bitpos
));
5387 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5389 size_int (xrr_bitpos
));
5391 /* Make a mask that corresponds to both fields being compared.
5392 Do this for both items being compared. If the operands are the
5393 same size and the bits being compared are in the same position
5394 then we can do this by masking both and comparing the masked
5396 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5397 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5398 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5400 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5401 ll_unsignedp
|| rl_unsignedp
);
5402 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5403 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5405 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5406 lr_unsignedp
|| rr_unsignedp
);
5407 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5408 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5410 result
= build2 (wanted_code
, truth_type
, lhs
, rhs
);
5411 goto fold_truthop_exit
;
5414 /* There is still another way we can do something: If both pairs of
5415 fields being compared are adjacent, we may be able to make a wider
5416 field containing them both.
5418 Note that we still must mask the lhs/rhs expressions. Furthermore,
5419 the mask must be shifted to account for the shift done by
5420 make_bit_field_ref. */
5421 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5422 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5423 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5424 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5428 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5429 ll_bitsize
+ rl_bitsize
,
5430 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5431 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5432 lr_bitsize
+ rr_bitsize
,
5433 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5435 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5436 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5437 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5438 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5440 /* Convert to the smaller type before masking out unwanted bits. */
5442 if (lntype
!= rntype
)
5444 if (lnbitsize
> rnbitsize
)
5446 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5447 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5450 else if (lnbitsize
< rnbitsize
)
5452 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5453 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5458 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5459 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5461 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5462 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5464 result
= build2 (wanted_code
, truth_type
, lhs
, rhs
);
5465 goto fold_truthop_exit
;
5471 /* Handle the case of comparisons with constants. If there is something in
5472 common between the masks, those bits of the constants must be the same.
5473 If not, the condition is always false. Test for this to avoid generating
5474 incorrect code below. */
5475 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5476 if (! integer_zerop (result
)
5477 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5478 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5480 if (wanted_code
== NE_EXPR
)
5482 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5483 return constant_boolean_node (true, truth_type
);
5487 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5488 return constant_boolean_node (false, truth_type
);
5492 /* Construct the expression we will return. First get the component
5493 reference we will make. Unless the mask is all ones the width of
5494 that field, perform the mask operation. Then compare with the
5496 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5497 ll_unsignedp
|| rl_unsignedp
);
5499 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5500 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5502 result
= build2 (BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5503 SET_EXPR_LOCATION (result
, loc
);
5506 result
= build2 (wanted_code
, truth_type
, result
,
5507 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5510 SET_EXPR_LOCATION (result
, loc
);
5514 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5518 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5522 enum tree_code op_code
;
5525 int consts_equal
, consts_lt
;
5528 STRIP_SIGN_NOPS (arg0
);
5530 op_code
= TREE_CODE (arg0
);
5531 minmax_const
= TREE_OPERAND (arg0
, 1);
5532 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5533 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5534 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5535 inner
= TREE_OPERAND (arg0
, 0);
5537 /* If something does not permit us to optimize, return the original tree. */
5538 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5539 || TREE_CODE (comp_const
) != INTEGER_CST
5540 || TREE_OVERFLOW (comp_const
)
5541 || TREE_CODE (minmax_const
) != INTEGER_CST
5542 || TREE_OVERFLOW (minmax_const
))
5545 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5546 and GT_EXPR, doing the rest with recursive calls using logical
5550 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5553 = optimize_minmax_comparison (loc
,
5554 invert_tree_comparison (code
, false),
5557 return invert_truthvalue_loc (loc
, tem
);
5563 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5564 optimize_minmax_comparison
5565 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5566 optimize_minmax_comparison
5567 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5570 if (op_code
== MAX_EXPR
&& consts_equal
)
5571 /* MAX (X, 0) == 0 -> X <= 0 */
5572 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5574 else if (op_code
== MAX_EXPR
&& consts_lt
)
5575 /* MAX (X, 0) == 5 -> X == 5 */
5576 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5578 else if (op_code
== MAX_EXPR
)
5579 /* MAX (X, 0) == -1 -> false */
5580 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5582 else if (consts_equal
)
5583 /* MIN (X, 0) == 0 -> X >= 0 */
5584 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5587 /* MIN (X, 0) == 5 -> false */
5588 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5591 /* MIN (X, 0) == -1 -> X == -1 */
5592 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5595 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5596 /* MAX (X, 0) > 0 -> X > 0
5597 MAX (X, 0) > 5 -> X > 5 */
5598 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5600 else if (op_code
== MAX_EXPR
)
5601 /* MAX (X, 0) > -1 -> true */
5602 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5604 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5605 /* MIN (X, 0) > 0 -> false
5606 MIN (X, 0) > 5 -> false */
5607 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5610 /* MIN (X, 0) > -1 -> X > -1 */
5611 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5618 /* T is an integer expression that is being multiplied, divided, or taken a
5619 modulus (CODE says which and what kind of divide or modulus) by a
5620 constant C. See if we can eliminate that operation by folding it with
5621 other operations already in T. WIDE_TYPE, if non-null, is a type that
5622 should be used for the computation if wider than our type.
5624 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5625 (X * 2) + (Y * 4). We must, however, be assured that either the original
5626 expression would not overflow or that overflow is undefined for the type
5627 in the language in question.
5629 If we return a non-null expression, it is an equivalent form of the
5630 original computation, but need not be in the original type.
5632 We set *STRICT_OVERFLOW_P to true if the return values depends on
5633 signed overflow being undefined. Otherwise we do not change
5634 *STRICT_OVERFLOW_P. */
5637 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5638 bool *strict_overflow_p
)
5640 /* To avoid exponential search depth, refuse to allow recursion past
5641 three levels. Beyond that (1) it's highly unlikely that we'll find
5642 something interesting and (2) we've probably processed it before
5643 when we built the inner expression. */
5652 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5659 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5660 bool *strict_overflow_p
)
5662 tree type
= TREE_TYPE (t
);
5663 enum tree_code tcode
= TREE_CODE (t
);
5664 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5665 > GET_MODE_SIZE (TYPE_MODE (type
)))
5666 ? wide_type
: type
);
5668 int same_p
= tcode
== code
;
5669 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5670 bool sub_strict_overflow_p
;
5672 /* Don't deal with constants of zero here; they confuse the code below. */
5673 if (integer_zerop (c
))
5676 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5677 op0
= TREE_OPERAND (t
, 0);
5679 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5680 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5682 /* Note that we need not handle conditional operations here since fold
5683 already handles those cases. So just do arithmetic here. */
5687 /* For a constant, we can always simplify if we are a multiply
5688 or (for divide and modulus) if it is a multiple of our constant. */
5689 if (code
== MULT_EXPR
5690 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
)))
5691 return const_binop (code
, fold_convert (ctype
, t
),
5692 fold_convert (ctype
, c
));
5695 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5696 /* If op0 is an expression ... */
5697 if ((COMPARISON_CLASS_P (op0
)
5698 || UNARY_CLASS_P (op0
)
5699 || BINARY_CLASS_P (op0
)
5700 || VL_EXP_CLASS_P (op0
)
5701 || EXPRESSION_CLASS_P (op0
))
5702 /* ... and has wrapping overflow, and its type is smaller
5703 than ctype, then we cannot pass through as widening. */
5704 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5705 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5706 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5707 && (TYPE_PRECISION (ctype
)
5708 > TYPE_PRECISION (TREE_TYPE (op0
))))
5709 /* ... or this is a truncation (t is narrower than op0),
5710 then we cannot pass through this narrowing. */
5711 || (TYPE_PRECISION (type
)
5712 < TYPE_PRECISION (TREE_TYPE (op0
)))
5713 /* ... or signedness changes for division or modulus,
5714 then we cannot pass through this conversion. */
5715 || (code
!= MULT_EXPR
5716 && (TYPE_UNSIGNED (ctype
)
5717 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5718 /* ... or has undefined overflow while the converted to
5719 type has not, we cannot do the operation in the inner type
5720 as that would introduce undefined overflow. */
5721 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5722 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5725 /* Pass the constant down and see if we can make a simplification. If
5726 we can, replace this expression with the inner simplification for
5727 possible later conversion to our or some other type. */
5728 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5729 && TREE_CODE (t2
) == INTEGER_CST
5730 && !TREE_OVERFLOW (t2
)
5731 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5733 ? ctype
: NULL_TREE
,
5734 strict_overflow_p
))))
5739 /* If widening the type changes it from signed to unsigned, then we
5740 must avoid building ABS_EXPR itself as unsigned. */
5741 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5743 tree cstype
= (*signed_type_for
) (ctype
);
5744 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5747 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5748 return fold_convert (ctype
, t1
);
5752 /* If the constant is negative, we cannot simplify this. */
5753 if (tree_int_cst_sgn (c
) == -1)
5757 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5759 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5762 case MIN_EXPR
: case MAX_EXPR
:
5763 /* If widening the type changes the signedness, then we can't perform
5764 this optimization as that changes the result. */
5765 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5768 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5769 sub_strict_overflow_p
= false;
5770 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5771 &sub_strict_overflow_p
)) != 0
5772 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5773 &sub_strict_overflow_p
)) != 0)
5775 if (tree_int_cst_sgn (c
) < 0)
5776 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5777 if (sub_strict_overflow_p
)
5778 *strict_overflow_p
= true;
5779 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5780 fold_convert (ctype
, t2
));
5784 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5785 /* If the second operand is constant, this is a multiplication
5786 or floor division, by a power of two, so we can treat it that
5787 way unless the multiplier or divisor overflows. Signed
5788 left-shift overflow is implementation-defined rather than
5789 undefined in C90, so do not convert signed left shift into
5791 if (TREE_CODE (op1
) == INTEGER_CST
5792 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5793 /* const_binop may not detect overflow correctly,
5794 so check for it explicitly here. */
5795 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5796 && TREE_INT_CST_HIGH (op1
) == 0
5797 && 0 != (t1
= fold_convert (ctype
,
5798 const_binop (LSHIFT_EXPR
,
5801 && !TREE_OVERFLOW (t1
))
5802 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5803 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5805 fold_convert (ctype
, op0
),
5807 c
, code
, wide_type
, strict_overflow_p
);
5810 case PLUS_EXPR
: case MINUS_EXPR
:
5811 /* See if we can eliminate the operation on both sides. If we can, we
5812 can return a new PLUS or MINUS. If we can't, the only remaining
5813 cases where we can do anything are if the second operand is a
5815 sub_strict_overflow_p
= false;
5816 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5817 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5818 if (t1
!= 0 && t2
!= 0
5819 && (code
== MULT_EXPR
5820 /* If not multiplication, we can only do this if both operands
5821 are divisible by c. */
5822 || (multiple_of_p (ctype
, op0
, c
)
5823 && multiple_of_p (ctype
, op1
, c
))))
5825 if (sub_strict_overflow_p
)
5826 *strict_overflow_p
= true;
5827 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5828 fold_convert (ctype
, t2
));
5831 /* If this was a subtraction, negate OP1 and set it to be an addition.
5832 This simplifies the logic below. */
5833 if (tcode
== MINUS_EXPR
)
5835 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5836 /* If OP1 was not easily negatable, the constant may be OP0. */
5837 if (TREE_CODE (op0
) == INTEGER_CST
)
5848 if (TREE_CODE (op1
) != INTEGER_CST
)
5851 /* If either OP1 or C are negative, this optimization is not safe for
5852 some of the division and remainder types while for others we need
5853 to change the code. */
5854 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5856 if (code
== CEIL_DIV_EXPR
)
5857 code
= FLOOR_DIV_EXPR
;
5858 else if (code
== FLOOR_DIV_EXPR
)
5859 code
= CEIL_DIV_EXPR
;
5860 else if (code
!= MULT_EXPR
5861 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5865 /* If it's a multiply or a division/modulus operation of a multiple
5866 of our constant, do the operation and verify it doesn't overflow. */
5867 if (code
== MULT_EXPR
5868 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5870 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5871 fold_convert (ctype
, c
));
5872 /* We allow the constant to overflow with wrapping semantics. */
5874 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5880 /* If we have an unsigned type is not a sizetype, we cannot widen
5881 the operation since it will change the result if the original
5882 computation overflowed. */
5883 if (TYPE_UNSIGNED (ctype
)
5884 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5888 /* If we were able to eliminate our operation from the first side,
5889 apply our operation to the second side and reform the PLUS. */
5890 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5891 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5893 /* The last case is if we are a multiply. In that case, we can
5894 apply the distributive law to commute the multiply and addition
5895 if the multiplication of the constants doesn't overflow. */
5896 if (code
== MULT_EXPR
)
5897 return fold_build2 (tcode
, ctype
,
5898 fold_build2 (code
, ctype
,
5899 fold_convert (ctype
, op0
),
5900 fold_convert (ctype
, c
)),
5906 /* We have a special case here if we are doing something like
5907 (C * 8) % 4 since we know that's zero. */
5908 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5909 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5910 /* If the multiplication can overflow we cannot optimize this.
5911 ??? Until we can properly mark individual operations as
5912 not overflowing we need to treat sizetype special here as
5913 stor-layout relies on this opimization to make
5914 DECL_FIELD_BIT_OFFSET always a constant. */
5915 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5916 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
5917 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))))
5918 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5919 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5921 *strict_overflow_p
= true;
5922 return omit_one_operand (type
, integer_zero_node
, op0
);
5925 /* ... fall through ... */
5927 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5928 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5929 /* If we can extract our operation from the LHS, do so and return a
5930 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5931 do something only if the second operand is a constant. */
5933 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5934 strict_overflow_p
)) != 0)
5935 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5936 fold_convert (ctype
, op1
));
5937 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5938 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5939 strict_overflow_p
)) != 0)
5940 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5941 fold_convert (ctype
, t1
));
5942 else if (TREE_CODE (op1
) != INTEGER_CST
)
5945 /* If these are the same operation types, we can associate them
5946 assuming no overflow. */
5948 && 0 != (t1
= int_const_binop (MULT_EXPR
,
5949 fold_convert (ctype
, op1
),
5950 fold_convert (ctype
, c
), 1))
5951 && 0 != (t1
= force_fit_type_double (ctype
, tree_to_double_int (t1
),
5952 (TYPE_UNSIGNED (ctype
)
5953 && tcode
!= MULT_EXPR
) ? -1 : 1,
5954 TREE_OVERFLOW (t1
)))
5955 && !TREE_OVERFLOW (t1
))
5956 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
5958 /* If these operations "cancel" each other, we have the main
5959 optimizations of this pass, which occur when either constant is a
5960 multiple of the other, in which case we replace this with either an
5961 operation or CODE or TCODE.
5963 If we have an unsigned type that is not a sizetype, we cannot do
5964 this since it will change the result if the original computation
5966 if ((TYPE_OVERFLOW_UNDEFINED (ctype
)
5967 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5968 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5969 || (tcode
== MULT_EXPR
5970 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5971 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5972 && code
!= MULT_EXPR
)))
5974 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5976 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5977 *strict_overflow_p
= true;
5978 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5979 fold_convert (ctype
,
5980 const_binop (TRUNC_DIV_EXPR
,
5983 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
)))
5985 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5986 *strict_overflow_p
= true;
5987 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5988 fold_convert (ctype
,
5989 const_binop (TRUNC_DIV_EXPR
,
6002 /* Return a node which has the indicated constant VALUE (either 0 or
6003 1), and is of the indicated TYPE. */
6006 constant_boolean_node (int value
, tree type
)
6008 if (type
== integer_type_node
)
6009 return value
? integer_one_node
: integer_zero_node
;
6010 else if (type
== boolean_type_node
)
6011 return value
? boolean_true_node
: boolean_false_node
;
6013 return build_int_cst (type
, value
);
6017 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6018 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6019 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6020 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6021 COND is the first argument to CODE; otherwise (as in the example
6022 given here), it is the second argument. TYPE is the type of the
6023 original expression. Return NULL_TREE if no simplification is
6027 fold_binary_op_with_conditional_arg (location_t loc
,
6028 enum tree_code code
,
6029 tree type
, tree op0
, tree op1
,
6030 tree cond
, tree arg
, int cond_first_p
)
6032 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6033 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6034 tree test
, true_value
, false_value
;
6035 tree lhs
= NULL_TREE
;
6036 tree rhs
= NULL_TREE
;
6038 if (TREE_CODE (cond
) == COND_EXPR
)
6040 test
= TREE_OPERAND (cond
, 0);
6041 true_value
= TREE_OPERAND (cond
, 1);
6042 false_value
= TREE_OPERAND (cond
, 2);
6043 /* If this operand throws an expression, then it does not make
6044 sense to try to perform a logical or arithmetic operation
6046 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6048 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6053 tree testtype
= TREE_TYPE (cond
);
6055 true_value
= constant_boolean_node (true, testtype
);
6056 false_value
= constant_boolean_node (false, testtype
);
6059 /* This transformation is only worthwhile if we don't have to wrap ARG
6060 in a SAVE_EXPR and the operation can be simplified on at least one
6061 of the branches once its pushed inside the COND_EXPR. */
6062 if (!TREE_CONSTANT (arg
)
6063 && (TREE_SIDE_EFFECTS (arg
)
6064 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6067 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6070 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6072 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6074 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6078 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6080 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6082 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6085 /* Check that we have simplified at least one of the branches. */
6086 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6089 return fold_build3_loc (loc
, COND_EXPR
, type
, test
, lhs
, rhs
);
6093 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6095 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6096 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6097 ADDEND is the same as X.
6099 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6100 and finite. The problematic cases are when X is zero, and its mode
6101 has signed zeros. In the case of rounding towards -infinity,
6102 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6103 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6106 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6108 if (!real_zerop (addend
))
6111 /* Don't allow the fold with -fsignaling-nans. */
6112 if (HONOR_SNANS (TYPE_MODE (type
)))
6115 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6116 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6119 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6120 if (TREE_CODE (addend
) == REAL_CST
6121 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6124 /* The mode has signed zeros, and we have to honor their sign.
6125 In this situation, there is only one case we can return true for.
6126 X - 0 is the same as X unless rounding towards -infinity is
6128 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6131 /* Subroutine of fold() that checks comparisons of built-in math
6132 functions against real constants.
6134 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6135 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6136 is the type of the result and ARG0 and ARG1 are the operands of the
6137 comparison. ARG1 must be a TREE_REAL_CST.
6139 The function returns the constant folded tree if a simplification
6140 can be made, and NULL_TREE otherwise. */
6143 fold_mathfn_compare (location_t loc
,
6144 enum built_in_function fcode
, enum tree_code code
,
6145 tree type
, tree arg0
, tree arg1
)
6149 if (BUILTIN_SQRT_P (fcode
))
6151 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6152 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6154 c
= TREE_REAL_CST (arg1
);
6155 if (REAL_VALUE_NEGATIVE (c
))
6157 /* sqrt(x) < y is always false, if y is negative. */
6158 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6159 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6161 /* sqrt(x) > y is always true, if y is negative and we
6162 don't care about NaNs, i.e. negative values of x. */
6163 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6164 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6166 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6167 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6168 build_real (TREE_TYPE (arg
), dconst0
));
6170 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6174 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6175 real_convert (&c2
, mode
, &c2
);
6177 if (REAL_VALUE_ISINF (c2
))
6179 /* sqrt(x) > y is x == +Inf, when y is very large. */
6180 if (HONOR_INFINITIES (mode
))
6181 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6182 build_real (TREE_TYPE (arg
), c2
));
6184 /* sqrt(x) > y is always false, when y is very large
6185 and we don't care about infinities. */
6186 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6189 /* sqrt(x) > c is the same as x > c*c. */
6190 return fold_build2_loc (loc
, code
, type
, arg
,
6191 build_real (TREE_TYPE (arg
), c2
));
6193 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6197 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6198 real_convert (&c2
, mode
, &c2
);
6200 if (REAL_VALUE_ISINF (c2
))
6202 /* sqrt(x) < y is always true, when y is a very large
6203 value and we don't care about NaNs or Infinities. */
6204 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6205 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6207 /* sqrt(x) < y is x != +Inf when y is very large and we
6208 don't care about NaNs. */
6209 if (! HONOR_NANS (mode
))
6210 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6211 build_real (TREE_TYPE (arg
), c2
));
6213 /* sqrt(x) < y is x >= 0 when y is very large and we
6214 don't care about Infinities. */
6215 if (! HONOR_INFINITIES (mode
))
6216 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6217 build_real (TREE_TYPE (arg
), dconst0
));
6219 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6220 if (lang_hooks
.decls
.global_bindings_p () != 0
6221 || CONTAINS_PLACEHOLDER_P (arg
))
6224 arg
= save_expr (arg
);
6225 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6226 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6227 build_real (TREE_TYPE (arg
),
6229 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6230 build_real (TREE_TYPE (arg
),
6234 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6235 if (! HONOR_NANS (mode
))
6236 return fold_build2_loc (loc
, code
, type
, arg
,
6237 build_real (TREE_TYPE (arg
), c2
));
6239 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6240 if (lang_hooks
.decls
.global_bindings_p () == 0
6241 && ! CONTAINS_PLACEHOLDER_P (arg
))
6243 arg
= save_expr (arg
);
6244 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6245 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6246 build_real (TREE_TYPE (arg
),
6248 fold_build2_loc (loc
, code
, type
, arg
,
6249 build_real (TREE_TYPE (arg
),
6258 /* Subroutine of fold() that optimizes comparisons against Infinities,
6259 either +Inf or -Inf.
6261 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6262 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6263 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6265 The function returns the constant folded tree if a simplification
6266 can be made, and NULL_TREE otherwise. */
6269 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6270 tree arg0
, tree arg1
)
6272 enum machine_mode mode
;
6273 REAL_VALUE_TYPE max
;
6277 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6279 /* For negative infinity swap the sense of the comparison. */
6280 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6282 code
= swap_tree_comparison (code
);
6287 /* x > +Inf is always false, if with ignore sNANs. */
6288 if (HONOR_SNANS (mode
))
6290 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6293 /* x <= +Inf is always true, if we don't case about NaNs. */
6294 if (! HONOR_NANS (mode
))
6295 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6297 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6298 if (lang_hooks
.decls
.global_bindings_p () == 0
6299 && ! CONTAINS_PLACEHOLDER_P (arg0
))
6301 arg0
= save_expr (arg0
);
6302 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6308 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6309 real_maxval (&max
, neg
, mode
);
6310 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6311 arg0
, build_real (TREE_TYPE (arg0
), max
));
6314 /* x < +Inf is always equal to x <= DBL_MAX. */
6315 real_maxval (&max
, neg
, mode
);
6316 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6317 arg0
, build_real (TREE_TYPE (arg0
), max
));
6320 /* x != +Inf is always equal to !(x > DBL_MAX). */
6321 real_maxval (&max
, neg
, mode
);
6322 if (! HONOR_NANS (mode
))
6323 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6324 arg0
, build_real (TREE_TYPE (arg0
), max
));
6326 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6327 arg0
, build_real (TREE_TYPE (arg0
), max
));
6328 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6337 /* Subroutine of fold() that optimizes comparisons of a division by
6338 a nonzero integer constant against an integer constant, i.e.
6341 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6342 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6343 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6345 The function returns the constant folded tree if a simplification
6346 can be made, and NULL_TREE otherwise. */
6349 fold_div_compare (location_t loc
,
6350 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6352 tree prod
, tmp
, hi
, lo
;
6353 tree arg00
= TREE_OPERAND (arg0
, 0);
6354 tree arg01
= TREE_OPERAND (arg0
, 1);
6356 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6360 /* We have to do this the hard way to detect unsigned overflow.
6361 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6362 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
6363 TREE_INT_CST_HIGH (arg01
),
6364 TREE_INT_CST_LOW (arg1
),
6365 TREE_INT_CST_HIGH (arg1
),
6366 &val
.low
, &val
.high
, unsigned_p
);
6367 prod
= force_fit_type_double (TREE_TYPE (arg00
), val
, -1, overflow
);
6368 neg_overflow
= false;
6372 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6373 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6376 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6377 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
6378 TREE_INT_CST_HIGH (prod
),
6379 TREE_INT_CST_LOW (tmp
),
6380 TREE_INT_CST_HIGH (tmp
),
6381 &val
.low
, &val
.high
, unsigned_p
);
6382 hi
= force_fit_type_double (TREE_TYPE (arg00
), val
,
6383 -1, overflow
| TREE_OVERFLOW (prod
));
6385 else if (tree_int_cst_sgn (arg01
) >= 0)
6387 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6388 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6389 switch (tree_int_cst_sgn (arg1
))
6392 neg_overflow
= true;
6393 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6398 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6403 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6413 /* A negative divisor reverses the relational operators. */
6414 code
= swap_tree_comparison (code
);
6416 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6417 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6418 switch (tree_int_cst_sgn (arg1
))
6421 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6426 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6431 neg_overflow
= true;
6432 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6444 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6445 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6446 if (TREE_OVERFLOW (hi
))
6447 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6448 if (TREE_OVERFLOW (lo
))
6449 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6450 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6453 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6454 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6455 if (TREE_OVERFLOW (hi
))
6456 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6457 if (TREE_OVERFLOW (lo
))
6458 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6459 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6462 if (TREE_OVERFLOW (lo
))
6464 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6465 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6467 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6470 if (TREE_OVERFLOW (hi
))
6472 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6473 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6475 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6478 if (TREE_OVERFLOW (hi
))
6480 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6481 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6483 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6486 if (TREE_OVERFLOW (lo
))
6488 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6489 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6491 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6501 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6502 equality/inequality test, then return a simplified form of the test
6503 using a sign testing. Otherwise return NULL. TYPE is the desired
6507 fold_single_bit_test_into_sign_test (location_t loc
,
6508 enum tree_code code
, tree arg0
, tree arg1
,
6511 /* If this is testing a single bit, we can optimize the test. */
6512 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6513 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6514 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6516 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6517 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6518 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6520 if (arg00
!= NULL_TREE
6521 /* This is only a win if casting to a signed type is cheap,
6522 i.e. when arg00's type is not a partial mode. */
6523 && TYPE_PRECISION (TREE_TYPE (arg00
))
6524 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6526 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6527 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6529 fold_convert_loc (loc
, stype
, arg00
),
6530 build_int_cst (stype
, 0));
6537 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6538 equality/inequality test, then return a simplified form of
6539 the test using shifts and logical operations. Otherwise return
6540 NULL. TYPE is the desired result type. */
6543 fold_single_bit_test (location_t loc
, enum tree_code code
,
6544 tree arg0
, tree arg1
, tree result_type
)
6546 /* If this is testing a single bit, we can optimize the test. */
6547 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6548 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6549 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6551 tree inner
= TREE_OPERAND (arg0
, 0);
6552 tree type
= TREE_TYPE (arg0
);
6553 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6554 enum machine_mode operand_mode
= TYPE_MODE (type
);
6556 tree signed_type
, unsigned_type
, intermediate_type
;
6559 /* First, see if we can fold the single bit test into a sign-bit
6561 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6566 /* Otherwise we have (A & C) != 0 where C is a single bit,
6567 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6568 Similarly for (A & C) == 0. */
6570 /* If INNER is a right shift of a constant and it plus BITNUM does
6571 not overflow, adjust BITNUM and INNER. */
6572 if (TREE_CODE (inner
) == RSHIFT_EXPR
6573 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6574 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6575 && bitnum
< TYPE_PRECISION (type
)
6576 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6577 bitnum
- TYPE_PRECISION (type
)))
6579 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6580 inner
= TREE_OPERAND (inner
, 0);
6583 /* If we are going to be able to omit the AND below, we must do our
6584 operations as unsigned. If we must use the AND, we have a choice.
6585 Normally unsigned is faster, but for some machines signed is. */
6586 #ifdef LOAD_EXTEND_OP
6587 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6588 && !flag_syntax_only
) ? 0 : 1;
6593 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6594 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6595 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6596 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6599 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6600 inner
, size_int (bitnum
));
6602 one
= build_int_cst (intermediate_type
, 1);
6604 if (code
== EQ_EXPR
)
6605 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6607 /* Put the AND last so it can combine with more things. */
6608 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6610 /* Make sure to return the proper type. */
6611 inner
= fold_convert_loc (loc
, result_type
, inner
);
6618 /* Check whether we are allowed to reorder operands arg0 and arg1,
6619 such that the evaluation of arg1 occurs before arg0. */
6622 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6624 if (! flag_evaluation_order
)
6626 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6628 return ! TREE_SIDE_EFFECTS (arg0
)
6629 && ! TREE_SIDE_EFFECTS (arg1
);
6632 /* Test whether it is preferable two swap two operands, ARG0 and
6633 ARG1, for example because ARG0 is an integer constant and ARG1
6634 isn't. If REORDER is true, only recommend swapping if we can
6635 evaluate the operands in reverse order. */
6638 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6640 STRIP_SIGN_NOPS (arg0
);
6641 STRIP_SIGN_NOPS (arg1
);
6643 if (TREE_CODE (arg1
) == INTEGER_CST
)
6645 if (TREE_CODE (arg0
) == INTEGER_CST
)
6648 if (TREE_CODE (arg1
) == REAL_CST
)
6650 if (TREE_CODE (arg0
) == REAL_CST
)
6653 if (TREE_CODE (arg1
) == FIXED_CST
)
6655 if (TREE_CODE (arg0
) == FIXED_CST
)
6658 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6660 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6663 if (TREE_CONSTANT (arg1
))
6665 if (TREE_CONSTANT (arg0
))
6668 if (optimize_function_for_size_p (cfun
))
6671 if (reorder
&& flag_evaluation_order
6672 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6675 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6676 for commutative and comparison operators. Ensuring a canonical
6677 form allows the optimizers to find additional redundancies without
6678 having to explicitly check for both orderings. */
6679 if (TREE_CODE (arg0
) == SSA_NAME
6680 && TREE_CODE (arg1
) == SSA_NAME
6681 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6684 /* Put SSA_NAMEs last. */
6685 if (TREE_CODE (arg1
) == SSA_NAME
)
6687 if (TREE_CODE (arg0
) == SSA_NAME
)
6690 /* Put variables last. */
6699 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6700 ARG0 is extended to a wider type. */
6703 fold_widened_comparison (location_t loc
, enum tree_code code
,
6704 tree type
, tree arg0
, tree arg1
)
6706 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6708 tree shorter_type
, outer_type
;
6712 if (arg0_unw
== arg0
)
6714 shorter_type
= TREE_TYPE (arg0_unw
);
6716 #ifdef HAVE_canonicalize_funcptr_for_compare
6717 /* Disable this optimization if we're casting a function pointer
6718 type on targets that require function pointer canonicalization. */
6719 if (HAVE_canonicalize_funcptr_for_compare
6720 && TREE_CODE (shorter_type
) == POINTER_TYPE
6721 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6725 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6728 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6730 /* If possible, express the comparison in the shorter mode. */
6731 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6732 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6733 && (TREE_TYPE (arg1_unw
) == shorter_type
6734 || ((TYPE_PRECISION (shorter_type
)
6735 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6736 && (TYPE_UNSIGNED (shorter_type
)
6737 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6738 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6739 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6740 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6741 && int_fits_type_p (arg1_unw
, shorter_type
))))
6742 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6743 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6745 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6746 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6747 || !int_fits_type_p (arg1_unw
, shorter_type
))
6750 /* If we are comparing with the integer that does not fit into the range
6751 of the shorter type, the result is known. */
6752 outer_type
= TREE_TYPE (arg1_unw
);
6753 min
= lower_bound_in_type (outer_type
, shorter_type
);
6754 max
= upper_bound_in_type (outer_type
, shorter_type
);
6756 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6758 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6765 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6770 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6776 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6778 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6783 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6785 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6794 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6795 ARG0 just the signedness is changed. */
6798 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6799 tree arg0
, tree arg1
)
6802 tree inner_type
, outer_type
;
6804 if (!CONVERT_EXPR_P (arg0
))
6807 outer_type
= TREE_TYPE (arg0
);
6808 arg0_inner
= TREE_OPERAND (arg0
, 0);
6809 inner_type
= TREE_TYPE (arg0_inner
);
6811 #ifdef HAVE_canonicalize_funcptr_for_compare
6812 /* Disable this optimization if we're casting a function pointer
6813 type on targets that require function pointer canonicalization. */
6814 if (HAVE_canonicalize_funcptr_for_compare
6815 && TREE_CODE (inner_type
) == POINTER_TYPE
6816 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6820 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6823 if (TREE_CODE (arg1
) != INTEGER_CST
6824 && !(CONVERT_EXPR_P (arg1
)
6825 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6828 if ((TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6829 || POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6834 if (TREE_CODE (arg1
) == INTEGER_CST
)
6835 arg1
= force_fit_type_double (inner_type
, tree_to_double_int (arg1
),
6836 0, TREE_OVERFLOW (arg1
));
6838 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6840 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6843 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6844 step of the array. Reconstructs s and delta in the case of s *
6845 delta being an integer constant (and thus already folded). ADDR is
6846 the address. MULT is the multiplicative expression. If the
6847 function succeeds, the new address expression is returned.
6848 Otherwise NULL_TREE is returned. LOC is the location of the
6849 resulting expression. */
6852 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6854 tree s
, delta
, step
;
6855 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6860 /* Strip the nops that might be added when converting op1 to sizetype. */
6863 /* Canonicalize op1 into a possibly non-constant delta
6864 and an INTEGER_CST s. */
6865 if (TREE_CODE (op1
) == MULT_EXPR
)
6867 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6872 if (TREE_CODE (arg0
) == INTEGER_CST
)
6877 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6885 else if (TREE_CODE (op1
) == INTEGER_CST
)
6892 /* Simulate we are delta * 1. */
6894 s
= integer_one_node
;
6897 for (;; ref
= TREE_OPERAND (ref
, 0))
6899 if (TREE_CODE (ref
) == ARRAY_REF
)
6903 /* Remember if this was a multi-dimensional array. */
6904 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6907 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6910 itype
= TREE_TYPE (domain
);
6912 step
= array_ref_element_size (ref
);
6913 if (TREE_CODE (step
) != INTEGER_CST
)
6918 if (! tree_int_cst_equal (step
, s
))
6923 /* Try if delta is a multiple of step. */
6924 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6930 /* Only fold here if we can verify we do not overflow one
6931 dimension of a multi-dimensional array. */
6936 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
6937 || !TYPE_MAX_VALUE (domain
)
6938 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6941 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6942 fold_convert_loc (loc
, itype
,
6943 TREE_OPERAND (ref
, 1)),
6944 fold_convert_loc (loc
, itype
, delta
));
6946 || TREE_CODE (tmp
) != INTEGER_CST
6947 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6956 if (!handled_component_p (ref
))
6960 /* We found the suitable array reference. So copy everything up to it,
6961 and replace the index. */
6963 pref
= TREE_OPERAND (addr
, 0);
6964 ret
= copy_node (pref
);
6965 SET_EXPR_LOCATION (ret
, loc
);
6970 pref
= TREE_OPERAND (pref
, 0);
6971 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6972 pos
= TREE_OPERAND (pos
, 0);
6975 TREE_OPERAND (pos
, 1) = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
6976 fold_convert_loc (loc
, itype
,
6977 TREE_OPERAND (pos
, 1)),
6978 fold_convert_loc (loc
, itype
, delta
));
6980 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6984 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6985 means A >= Y && A != MAX, but in this case we know that
6986 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6989 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6991 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6993 if (TREE_CODE (bound
) == LT_EXPR
)
6994 a
= TREE_OPERAND (bound
, 0);
6995 else if (TREE_CODE (bound
) == GT_EXPR
)
6996 a
= TREE_OPERAND (bound
, 1);
7000 typea
= TREE_TYPE (a
);
7001 if (!INTEGRAL_TYPE_P (typea
)
7002 && !POINTER_TYPE_P (typea
))
7005 if (TREE_CODE (ineq
) == LT_EXPR
)
7007 a1
= TREE_OPERAND (ineq
, 1);
7008 y
= TREE_OPERAND (ineq
, 0);
7010 else if (TREE_CODE (ineq
) == GT_EXPR
)
7012 a1
= TREE_OPERAND (ineq
, 0);
7013 y
= TREE_OPERAND (ineq
, 1);
7018 if (TREE_TYPE (a1
) != typea
)
7021 if (POINTER_TYPE_P (typea
))
7023 /* Convert the pointer types into integer before taking the difference. */
7024 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7025 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7026 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7029 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7031 if (!diff
|| !integer_onep (diff
))
7034 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7037 /* Fold a sum or difference of at least one multiplication.
7038 Returns the folded tree or NULL if no simplification could be made. */
7041 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7042 tree arg0
, tree arg1
)
7044 tree arg00
, arg01
, arg10
, arg11
;
7045 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7047 /* (A * C) +- (B * C) -> (A+-B) * C.
7048 (A * C) +- A -> A * (C+-1).
7049 We are most concerned about the case where C is a constant,
7050 but other combinations show up during loop reduction. Since
7051 it is not difficult, try all four possibilities. */
7053 if (TREE_CODE (arg0
) == MULT_EXPR
)
7055 arg00
= TREE_OPERAND (arg0
, 0);
7056 arg01
= TREE_OPERAND (arg0
, 1);
7058 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7060 arg00
= build_one_cst (type
);
7065 /* We cannot generate constant 1 for fract. */
7066 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7069 arg01
= build_one_cst (type
);
7071 if (TREE_CODE (arg1
) == MULT_EXPR
)
7073 arg10
= TREE_OPERAND (arg1
, 0);
7074 arg11
= TREE_OPERAND (arg1
, 1);
7076 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7078 arg10
= build_one_cst (type
);
7079 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7080 the purpose of this canonicalization. */
7081 if (TREE_INT_CST_HIGH (arg1
) == -1
7082 && negate_expr_p (arg1
)
7083 && code
== PLUS_EXPR
)
7085 arg11
= negate_expr (arg1
);
7093 /* We cannot generate constant 1 for fract. */
7094 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7097 arg11
= build_one_cst (type
);
7101 if (operand_equal_p (arg01
, arg11
, 0))
7102 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7103 else if (operand_equal_p (arg00
, arg10
, 0))
7104 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7105 else if (operand_equal_p (arg00
, arg11
, 0))
7106 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7107 else if (operand_equal_p (arg01
, arg10
, 0))
7108 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7110 /* No identical multiplicands; see if we can find a common
7111 power-of-two factor in non-power-of-two multiplies. This
7112 can help in multi-dimensional array access. */
7113 else if (host_integerp (arg01
, 0)
7114 && host_integerp (arg11
, 0))
7116 HOST_WIDE_INT int01
, int11
, tmp
;
7119 int01
= TREE_INT_CST_LOW (arg01
);
7120 int11
= TREE_INT_CST_LOW (arg11
);
7122 /* Move min of absolute values to int11. */
7123 if ((int01
>= 0 ? int01
: -int01
)
7124 < (int11
>= 0 ? int11
: -int11
))
7126 tmp
= int01
, int01
= int11
, int11
= tmp
;
7127 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7134 if (exact_log2 (abs (int11
)) > 0 && int01
% int11
== 0
7135 /* The remainder should not be a constant, otherwise we
7136 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7137 increased the number of multiplications necessary. */
7138 && TREE_CODE (arg10
) != INTEGER_CST
)
7140 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7141 build_int_cst (TREE_TYPE (arg00
),
7146 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7151 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7152 fold_build2_loc (loc
, code
, type
,
7153 fold_convert_loc (loc
, type
, alt0
),
7154 fold_convert_loc (loc
, type
, alt1
)),
7155 fold_convert_loc (loc
, type
, same
));
7160 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7161 specified by EXPR into the buffer PTR of length LEN bytes.
7162 Return the number of bytes placed in the buffer, or zero
7166 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7168 tree type
= TREE_TYPE (expr
);
7169 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7170 int byte
, offset
, word
, words
;
7171 unsigned char value
;
7173 if (total_bytes
> len
)
7175 words
= total_bytes
/ UNITS_PER_WORD
;
7177 for (byte
= 0; byte
< total_bytes
; byte
++)
7179 int bitpos
= byte
* BITS_PER_UNIT
;
7180 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7181 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7183 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7184 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7186 if (total_bytes
> UNITS_PER_WORD
)
7188 word
= byte
/ UNITS_PER_WORD
;
7189 if (WORDS_BIG_ENDIAN
)
7190 word
= (words
- 1) - word
;
7191 offset
= word
* UNITS_PER_WORD
;
7192 if (BYTES_BIG_ENDIAN
)
7193 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7195 offset
+= byte
% UNITS_PER_WORD
;
7198 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7199 ptr
[offset
] = value
;
7205 /* Subroutine of native_encode_expr. Encode the REAL_CST
7206 specified by EXPR into the buffer PTR of length LEN bytes.
7207 Return the number of bytes placed in the buffer, or zero
7211 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7213 tree type
= TREE_TYPE (expr
);
7214 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7215 int byte
, offset
, word
, words
, bitpos
;
7216 unsigned char value
;
7218 /* There are always 32 bits in each long, no matter the size of
7219 the hosts long. We handle floating point representations with
7223 if (total_bytes
> len
)
7225 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7227 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7229 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7230 bitpos
+= BITS_PER_UNIT
)
7232 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7233 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7235 if (UNITS_PER_WORD
< 4)
7237 word
= byte
/ UNITS_PER_WORD
;
7238 if (WORDS_BIG_ENDIAN
)
7239 word
= (words
- 1) - word
;
7240 offset
= word
* UNITS_PER_WORD
;
7241 if (BYTES_BIG_ENDIAN
)
7242 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7244 offset
+= byte
% UNITS_PER_WORD
;
7247 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7248 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7253 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7254 specified by EXPR into the buffer PTR of length LEN bytes.
7255 Return the number of bytes placed in the buffer, or zero
7259 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7264 part
= TREE_REALPART (expr
);
7265 rsize
= native_encode_expr (part
, ptr
, len
);
7268 part
= TREE_IMAGPART (expr
);
7269 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7272 return rsize
+ isize
;
7276 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7277 specified by EXPR into the buffer PTR of length LEN bytes.
7278 Return the number of bytes placed in the buffer, or zero
7282 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7284 int i
, size
, offset
, count
;
7285 tree itype
, elem
, elements
;
7288 elements
= TREE_VECTOR_CST_ELTS (expr
);
7289 count
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
));
7290 itype
= TREE_TYPE (TREE_TYPE (expr
));
7291 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7292 for (i
= 0; i
< count
; i
++)
7296 elem
= TREE_VALUE (elements
);
7297 elements
= TREE_CHAIN (elements
);
7304 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7309 if (offset
+ size
> len
)
7311 memset (ptr
+offset
, 0, size
);
7319 /* Subroutine of native_encode_expr. Encode the STRING_CST
7320 specified by EXPR into the buffer PTR of length LEN bytes.
7321 Return the number of bytes placed in the buffer, or zero
7325 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
7327 tree type
= TREE_TYPE (expr
);
7328 HOST_WIDE_INT total_bytes
;
7330 if (TREE_CODE (type
) != ARRAY_TYPE
7331 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7332 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7333 || !host_integerp (TYPE_SIZE_UNIT (type
), 0))
7335 total_bytes
= tree_low_cst (TYPE_SIZE_UNIT (type
), 0);
7336 if (total_bytes
> len
)
7338 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
7340 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
7341 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
7342 total_bytes
- TREE_STRING_LENGTH (expr
));
7345 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
7350 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7351 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7352 buffer PTR of length LEN bytes. Return the number of bytes
7353 placed in the buffer, or zero upon failure. */
7356 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7358 switch (TREE_CODE (expr
))
7361 return native_encode_int (expr
, ptr
, len
);
7364 return native_encode_real (expr
, ptr
, len
);
7367 return native_encode_complex (expr
, ptr
, len
);
7370 return native_encode_vector (expr
, ptr
, len
);
7373 return native_encode_string (expr
, ptr
, len
);
7381 /* Subroutine of native_interpret_expr. Interpret the contents of
7382 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7383 If the buffer cannot be interpreted, return NULL_TREE. */
7386 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7388 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7389 int byte
, offset
, word
, words
;
7390 unsigned char value
;
7393 if (total_bytes
> len
)
7395 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
7398 result
= double_int_zero
;
7399 words
= total_bytes
/ UNITS_PER_WORD
;
7401 for (byte
= 0; byte
< total_bytes
; byte
++)
7403 int bitpos
= byte
* BITS_PER_UNIT
;
7404 if (total_bytes
> UNITS_PER_WORD
)
7406 word
= byte
/ UNITS_PER_WORD
;
7407 if (WORDS_BIG_ENDIAN
)
7408 word
= (words
- 1) - word
;
7409 offset
= word
* UNITS_PER_WORD
;
7410 if (BYTES_BIG_ENDIAN
)
7411 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7413 offset
+= byte
% UNITS_PER_WORD
;
7416 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7417 value
= ptr
[offset
];
7419 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7420 result
.low
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7422 result
.high
|= (unsigned HOST_WIDE_INT
) value
7423 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7426 return double_int_to_tree (type
, result
);
7430 /* Subroutine of native_interpret_expr. Interpret the contents of
7431 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7432 If the buffer cannot be interpreted, return NULL_TREE. */
7435 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7437 enum machine_mode mode
= TYPE_MODE (type
);
7438 int total_bytes
= GET_MODE_SIZE (mode
);
7439 int byte
, offset
, word
, words
, bitpos
;
7440 unsigned char value
;
7441 /* There are always 32 bits in each long, no matter the size of
7442 the hosts long. We handle floating point representations with
7447 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7448 if (total_bytes
> len
|| total_bytes
> 24)
7450 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7452 memset (tmp
, 0, sizeof (tmp
));
7453 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7454 bitpos
+= BITS_PER_UNIT
)
7456 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7457 if (UNITS_PER_WORD
< 4)
7459 word
= byte
/ UNITS_PER_WORD
;
7460 if (WORDS_BIG_ENDIAN
)
7461 word
= (words
- 1) - word
;
7462 offset
= word
* UNITS_PER_WORD
;
7463 if (BYTES_BIG_ENDIAN
)
7464 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7466 offset
+= byte
% UNITS_PER_WORD
;
7469 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7470 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7472 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7475 real_from_target (&r
, tmp
, mode
);
7476 return build_real (type
, r
);
7480 /* Subroutine of native_interpret_expr. Interpret the contents of
7481 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7482 If the buffer cannot be interpreted, return NULL_TREE. */
7485 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7487 tree etype
, rpart
, ipart
;
7490 etype
= TREE_TYPE (type
);
7491 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7494 rpart
= native_interpret_expr (etype
, ptr
, size
);
7497 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7500 return build_complex (type
, rpart
, ipart
);
7504 /* Subroutine of native_interpret_expr. Interpret the contents of
7505 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7506 If the buffer cannot be interpreted, return NULL_TREE. */
7509 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7511 tree etype
, elem
, elements
;
7514 etype
= TREE_TYPE (type
);
7515 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7516 count
= TYPE_VECTOR_SUBPARTS (type
);
7517 if (size
* count
> len
)
7520 elements
= NULL_TREE
;
7521 for (i
= count
- 1; i
>= 0; i
--)
7523 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7526 elements
= tree_cons (NULL_TREE
, elem
, elements
);
7528 return build_vector (type
, elements
);
7532 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7533 the buffer PTR of length LEN as a constant of type TYPE. For
7534 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7535 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7536 return NULL_TREE. */
7539 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7541 switch (TREE_CODE (type
))
7546 return native_interpret_int (type
, ptr
, len
);
7549 return native_interpret_real (type
, ptr
, len
);
7552 return native_interpret_complex (type
, ptr
, len
);
7555 return native_interpret_vector (type
, ptr
, len
);
7563 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7564 TYPE at compile-time. If we're unable to perform the conversion
7565 return NULL_TREE. */
7568 fold_view_convert_expr (tree type
, tree expr
)
7570 /* We support up to 512-bit values (for V8DFmode). */
7571 unsigned char buffer
[64];
7574 /* Check that the host and target are sane. */
7575 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7578 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7582 return native_interpret_expr (type
, buffer
, len
);
7585 /* Build an expression for the address of T. Folds away INDIRECT_REF
7586 to avoid confusing the gimplify process. */
7589 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7591 /* The size of the object is not relevant when talking about its address. */
7592 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7593 t
= TREE_OPERAND (t
, 0);
7595 if (TREE_CODE (t
) == INDIRECT_REF
)
7597 t
= TREE_OPERAND (t
, 0);
7599 if (TREE_TYPE (t
) != ptrtype
)
7601 t
= build1 (NOP_EXPR
, ptrtype
, t
);
7602 SET_EXPR_LOCATION (t
, loc
);
7605 else if (TREE_CODE (t
) == MEM_REF
7606 && integer_zerop (TREE_OPERAND (t
, 1)))
7607 return TREE_OPERAND (t
, 0);
7608 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7610 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7612 if (TREE_TYPE (t
) != ptrtype
)
7613 t
= fold_convert_loc (loc
, ptrtype
, t
);
7617 t
= build1 (ADDR_EXPR
, ptrtype
, t
);
7618 SET_EXPR_LOCATION (t
, loc
);
7624 /* Build an expression for the address of T. */
7627 build_fold_addr_expr_loc (location_t loc
, tree t
)
7629 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7631 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7634 /* Fold a unary expression of code CODE and type TYPE with operand
7635 OP0. Return the folded expression if folding is successful.
7636 Otherwise, return NULL_TREE. */
7639 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7643 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7645 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7646 && TREE_CODE_LENGTH (code
) == 1);
7651 if (CONVERT_EXPR_CODE_P (code
)
7652 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
)
7654 /* Don't use STRIP_NOPS, because signedness of argument type
7656 STRIP_SIGN_NOPS (arg0
);
7660 /* Strip any conversions that don't change the mode. This
7661 is safe for every expression, except for a comparison
7662 expression because its signedness is derived from its
7665 Note that this is done as an internal manipulation within
7666 the constant folder, in order to find the simplest
7667 representation of the arguments so that their form can be
7668 studied. In any cases, the appropriate type conversions
7669 should be put back in the tree that will get out of the
7675 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7677 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7678 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7679 fold_build1_loc (loc
, code
, type
,
7680 fold_convert_loc (loc
, TREE_TYPE (op0
),
7681 TREE_OPERAND (arg0
, 1))));
7682 else if (TREE_CODE (arg0
) == COND_EXPR
)
7684 tree arg01
= TREE_OPERAND (arg0
, 1);
7685 tree arg02
= TREE_OPERAND (arg0
, 2);
7686 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7687 arg01
= fold_build1_loc (loc
, code
, type
,
7688 fold_convert_loc (loc
,
7689 TREE_TYPE (op0
), arg01
));
7690 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7691 arg02
= fold_build1_loc (loc
, code
, type
,
7692 fold_convert_loc (loc
,
7693 TREE_TYPE (op0
), arg02
));
7694 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7697 /* If this was a conversion, and all we did was to move into
7698 inside the COND_EXPR, bring it back out. But leave it if
7699 it is a conversion from integer to integer and the
7700 result precision is no wider than a word since such a
7701 conversion is cheap and may be optimized away by combine,
7702 while it couldn't if it were outside the COND_EXPR. Then return
7703 so we don't get into an infinite recursion loop taking the
7704 conversion out and then back in. */
7706 if ((CONVERT_EXPR_CODE_P (code
)
7707 || code
== NON_LVALUE_EXPR
)
7708 && TREE_CODE (tem
) == COND_EXPR
7709 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7710 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7711 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7712 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7713 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7714 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7715 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7717 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7718 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7719 || flag_syntax_only
))
7721 tem
= build1 (code
, type
,
7723 TREE_TYPE (TREE_OPERAND
7724 (TREE_OPERAND (tem
, 1), 0)),
7725 TREE_OPERAND (tem
, 0),
7726 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7727 TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)));
7728 SET_EXPR_LOCATION (tem
, loc
);
7732 else if (COMPARISON_CLASS_P (arg0
))
7734 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7736 arg0
= copy_node (arg0
);
7737 TREE_TYPE (arg0
) = type
;
7740 else if (TREE_CODE (type
) != INTEGER_TYPE
)
7741 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
,
7742 fold_build1_loc (loc
, code
, type
,
7744 fold_build1_loc (loc
, code
, type
,
7745 integer_zero_node
));
7752 /* Re-association barriers around constants and other re-association
7753 barriers can be removed. */
7754 if (CONSTANT_CLASS_P (op0
)
7755 || TREE_CODE (op0
) == PAREN_EXPR
)
7756 return fold_convert_loc (loc
, type
, op0
);
7761 case FIX_TRUNC_EXPR
:
7762 if (TREE_TYPE (op0
) == type
)
7765 /* If we have (type) (a CMP b) and type is an integral type, return
7766 new expression involving the new type. */
7767 if (COMPARISON_CLASS_P (op0
) && INTEGRAL_TYPE_P (type
))
7768 return fold_build2_loc (loc
, TREE_CODE (op0
), type
, TREE_OPERAND (op0
, 0),
7769 TREE_OPERAND (op0
, 1));
7771 /* Handle cases of two conversions in a row. */
7772 if (CONVERT_EXPR_P (op0
))
7774 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7775 tree inter_type
= TREE_TYPE (op0
);
7776 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7777 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7778 int inside_float
= FLOAT_TYPE_P (inside_type
);
7779 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7780 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7781 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7782 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7783 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7784 int inter_float
= FLOAT_TYPE_P (inter_type
);
7785 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7786 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7787 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7788 int final_int
= INTEGRAL_TYPE_P (type
);
7789 int final_ptr
= POINTER_TYPE_P (type
);
7790 int final_float
= FLOAT_TYPE_P (type
);
7791 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7792 unsigned int final_prec
= TYPE_PRECISION (type
);
7793 int final_unsignedp
= TYPE_UNSIGNED (type
);
7795 /* In addition to the cases of two conversions in a row
7796 handled below, if we are converting something to its own
7797 type via an object of identical or wider precision, neither
7798 conversion is needed. */
7799 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7800 && (((inter_int
|| inter_ptr
) && final_int
)
7801 || (inter_float
&& final_float
))
7802 && inter_prec
>= final_prec
)
7803 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7805 /* Likewise, if the intermediate and initial types are either both
7806 float or both integer, we don't need the middle conversion if the
7807 former is wider than the latter and doesn't change the signedness
7808 (for integers). Avoid this if the final type is a pointer since
7809 then we sometimes need the middle conversion. Likewise if the
7810 final type has a precision not equal to the size of its mode. */
7811 if (((inter_int
&& inside_int
)
7812 || (inter_float
&& inside_float
)
7813 || (inter_vec
&& inside_vec
))
7814 && inter_prec
>= inside_prec
7815 && (inter_float
|| inter_vec
7816 || inter_unsignedp
== inside_unsignedp
)
7817 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7818 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7820 && (! final_vec
|| inter_prec
== inside_prec
))
7821 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7823 /* If we have a sign-extension of a zero-extended value, we can
7824 replace that by a single zero-extension. */
7825 if (inside_int
&& inter_int
&& final_int
7826 && inside_prec
< inter_prec
&& inter_prec
< final_prec
7827 && inside_unsignedp
&& !inter_unsignedp
)
7828 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7830 /* Two conversions in a row are not needed unless:
7831 - some conversion is floating-point (overstrict for now), or
7832 - some conversion is a vector (overstrict for now), or
7833 - the intermediate type is narrower than both initial and
7835 - the intermediate type and innermost type differ in signedness,
7836 and the outermost type is wider than the intermediate, or
7837 - the initial type is a pointer type and the precisions of the
7838 intermediate and final types differ, or
7839 - the final type is a pointer type and the precisions of the
7840 initial and intermediate types differ. */
7841 if (! inside_float
&& ! inter_float
&& ! final_float
7842 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7843 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7844 && ! (inside_int
&& inter_int
7845 && inter_unsignedp
!= inside_unsignedp
7846 && inter_prec
< final_prec
)
7847 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7848 == (final_unsignedp
&& final_prec
> inter_prec
))
7849 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7850 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7851 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7852 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7853 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7856 /* Handle (T *)&A.B.C for A being of type T and B and C
7857 living at offset zero. This occurs frequently in
7858 C++ upcasting and then accessing the base. */
7859 if (TREE_CODE (op0
) == ADDR_EXPR
7860 && POINTER_TYPE_P (type
)
7861 && handled_component_p (TREE_OPERAND (op0
, 0)))
7863 HOST_WIDE_INT bitsize
, bitpos
;
7865 enum machine_mode mode
;
7866 int unsignedp
, volatilep
;
7867 tree base
= TREE_OPERAND (op0
, 0);
7868 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7869 &mode
, &unsignedp
, &volatilep
, false);
7870 /* If the reference was to a (constant) zero offset, we can use
7871 the address of the base if it has the same base type
7872 as the result type and the pointer type is unqualified. */
7873 if (! offset
&& bitpos
== 0
7874 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7875 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7876 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7877 return fold_convert_loc (loc
, type
,
7878 build_fold_addr_expr_loc (loc
, base
));
7881 if (TREE_CODE (op0
) == MODIFY_EXPR
7882 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7883 /* Detect assigning a bitfield. */
7884 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7886 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7888 /* Don't leave an assignment inside a conversion
7889 unless assigning a bitfield. */
7890 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7891 /* First do the assignment, then return converted constant. */
7892 tem
= build2 (COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7893 TREE_NO_WARNING (tem
) = 1;
7894 TREE_USED (tem
) = 1;
7895 SET_EXPR_LOCATION (tem
, loc
);
7899 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7900 constants (if x has signed type, the sign bit cannot be set
7901 in c). This folds extension into the BIT_AND_EXPR.
7902 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7903 very likely don't have maximal range for their precision and this
7904 transformation effectively doesn't preserve non-maximal ranges. */
7905 if (TREE_CODE (type
) == INTEGER_TYPE
7906 && TREE_CODE (op0
) == BIT_AND_EXPR
7907 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7909 tree and_expr
= op0
;
7910 tree and0
= TREE_OPERAND (and_expr
, 0);
7911 tree and1
= TREE_OPERAND (and_expr
, 1);
7914 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7915 || (TYPE_PRECISION (type
)
7916 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7918 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7919 <= HOST_BITS_PER_WIDE_INT
7920 && host_integerp (and1
, 1))
7922 unsigned HOST_WIDE_INT cst
;
7924 cst
= tree_low_cst (and1
, 1);
7925 cst
&= (HOST_WIDE_INT
) -1
7926 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7927 change
= (cst
== 0);
7928 #ifdef LOAD_EXTEND_OP
7930 && !flag_syntax_only
7931 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7934 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7935 and0
= fold_convert_loc (loc
, uns
, and0
);
7936 and1
= fold_convert_loc (loc
, uns
, and1
);
7942 tem
= force_fit_type_double (type
, tree_to_double_int (and1
),
7943 0, TREE_OVERFLOW (and1
));
7944 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7945 fold_convert_loc (loc
, type
, and0
), tem
);
7949 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7950 when one of the new casts will fold away. Conservatively we assume
7951 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7952 if (POINTER_TYPE_P (type
)
7953 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7954 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7955 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7956 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7958 tree arg00
= TREE_OPERAND (arg0
, 0);
7959 tree arg01
= TREE_OPERAND (arg0
, 1);
7961 return fold_build2_loc (loc
,
7962 TREE_CODE (arg0
), type
,
7963 fold_convert_loc (loc
, type
, arg00
),
7964 fold_convert_loc (loc
, sizetype
, arg01
));
7967 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7968 of the same precision, and X is an integer type not narrower than
7969 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7970 if (INTEGRAL_TYPE_P (type
)
7971 && TREE_CODE (op0
) == BIT_NOT_EXPR
7972 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7973 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7974 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7976 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7977 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7978 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7979 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7980 fold_convert_loc (loc
, type
, tem
));
7983 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7984 type of X and Y (integer types only). */
7985 if (INTEGRAL_TYPE_P (type
)
7986 && TREE_CODE (op0
) == MULT_EXPR
7987 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7988 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7990 /* Be careful not to introduce new overflows. */
7992 if (TYPE_OVERFLOW_WRAPS (type
))
7995 mult_type
= unsigned_type_for (type
);
7997 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7999 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8000 fold_convert_loc (loc
, mult_type
,
8001 TREE_OPERAND (op0
, 0)),
8002 fold_convert_loc (loc
, mult_type
,
8003 TREE_OPERAND (op0
, 1)));
8004 return fold_convert_loc (loc
, type
, tem
);
8008 tem
= fold_convert_const (code
, type
, op0
);
8009 return tem
? tem
: NULL_TREE
;
8011 case ADDR_SPACE_CONVERT_EXPR
:
8012 if (integer_zerop (arg0
))
8013 return fold_convert_const (code
, type
, arg0
);
8016 case FIXED_CONVERT_EXPR
:
8017 tem
= fold_convert_const (code
, type
, arg0
);
8018 return tem
? tem
: NULL_TREE
;
8020 case VIEW_CONVERT_EXPR
:
8021 if (TREE_TYPE (op0
) == type
)
8023 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8024 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8025 type
, TREE_OPERAND (op0
, 0));
8026 if (TREE_CODE (op0
) == MEM_REF
)
8027 return fold_build2_loc (loc
, MEM_REF
, type
,
8028 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8030 /* For integral conversions with the same precision or pointer
8031 conversions use a NOP_EXPR instead. */
8032 if ((INTEGRAL_TYPE_P (type
)
8033 || POINTER_TYPE_P (type
))
8034 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8035 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8036 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8037 return fold_convert_loc (loc
, type
, op0
);
8039 /* Strip inner integral conversions that do not change the precision. */
8040 if (CONVERT_EXPR_P (op0
)
8041 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8042 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8043 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8044 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8045 && (TYPE_PRECISION (TREE_TYPE (op0
))
8046 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8047 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8048 type
, TREE_OPERAND (op0
, 0));
8050 return fold_view_convert_expr (type
, op0
);
8053 tem
= fold_negate_expr (loc
, arg0
);
8055 return fold_convert_loc (loc
, type
, tem
);
8059 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8060 return fold_abs_const (arg0
, type
);
8061 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8062 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8063 /* Convert fabs((double)float) into (double)fabsf(float). */
8064 else if (TREE_CODE (arg0
) == NOP_EXPR
8065 && TREE_CODE (type
) == REAL_TYPE
)
8067 tree targ0
= strip_float_extensions (arg0
);
8069 return fold_convert_loc (loc
, type
,
8070 fold_build1_loc (loc
, ABS_EXPR
,
8074 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8075 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8077 else if (tree_expr_nonnegative_p (arg0
))
8080 /* Strip sign ops from argument. */
8081 if (TREE_CODE (type
) == REAL_TYPE
)
8083 tem
= fold_strip_sign_ops (arg0
);
8085 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8086 fold_convert_loc (loc
, type
, tem
));
8091 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8092 return fold_convert_loc (loc
, type
, arg0
);
8093 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8095 tree itype
= TREE_TYPE (type
);
8096 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8097 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8098 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8099 negate_expr (ipart
));
8101 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8103 tree itype
= TREE_TYPE (type
);
8104 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8105 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8106 return build_complex (type
, rpart
, negate_expr (ipart
));
8108 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8109 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8113 if (TREE_CODE (arg0
) == INTEGER_CST
)
8114 return fold_not_const (arg0
, type
);
8115 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8116 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8117 /* Convert ~ (-A) to A - 1. */
8118 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8119 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8120 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8121 build_int_cst (type
, 1));
8122 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8123 else if (INTEGRAL_TYPE_P (type
)
8124 && ((TREE_CODE (arg0
) == MINUS_EXPR
8125 && integer_onep (TREE_OPERAND (arg0
, 1)))
8126 || (TREE_CODE (arg0
) == PLUS_EXPR
8127 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8128 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8129 fold_convert_loc (loc
, type
,
8130 TREE_OPERAND (arg0
, 0)));
8131 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8132 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8133 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8134 fold_convert_loc (loc
, type
,
8135 TREE_OPERAND (arg0
, 0)))))
8136 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8137 fold_convert_loc (loc
, type
,
8138 TREE_OPERAND (arg0
, 1)));
8139 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8140 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8141 fold_convert_loc (loc
, type
,
8142 TREE_OPERAND (arg0
, 1)))))
8143 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8144 fold_convert_loc (loc
, type
,
8145 TREE_OPERAND (arg0
, 0)), tem
);
8146 /* Perform BIT_NOT_EXPR on each element individually. */
8147 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8149 tree elements
= TREE_VECTOR_CST_ELTS (arg0
), elem
, list
= NULL_TREE
;
8150 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
8152 for (i
= 0; i
< count
; i
++)
8156 elem
= TREE_VALUE (elements
);
8157 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8158 if (elem
== NULL_TREE
)
8160 elements
= TREE_CHAIN (elements
);
8163 elem
= build_int_cst (TREE_TYPE (type
), -1);
8164 list
= tree_cons (NULL_TREE
, elem
, list
);
8167 return build_vector (type
, nreverse (list
));
8172 case TRUTH_NOT_EXPR
:
8173 /* The argument to invert_truthvalue must have Boolean type. */
8174 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8175 arg0
= fold_convert_loc (loc
, boolean_type_node
, arg0
);
8177 /* Note that the operand of this must be an int
8178 and its values must be 0 or 1.
8179 ("true" is a fixed value perhaps depending on the language,
8180 but we don't handle values other than 1 correctly yet.) */
8181 tem
= fold_truth_not_expr (loc
, arg0
);
8184 return fold_convert_loc (loc
, type
, tem
);
8187 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8188 return fold_convert_loc (loc
, type
, arg0
);
8189 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8190 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8191 TREE_OPERAND (arg0
, 1));
8192 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8193 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8194 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8196 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8197 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8198 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8199 TREE_OPERAND (arg0
, 0)),
8200 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8201 TREE_OPERAND (arg0
, 1)));
8202 return fold_convert_loc (loc
, type
, tem
);
8204 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8206 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8207 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8208 TREE_OPERAND (arg0
, 0));
8209 return fold_convert_loc (loc
, type
, tem
);
8211 if (TREE_CODE (arg0
) == CALL_EXPR
)
8213 tree fn
= get_callee_fndecl (arg0
);
8214 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8215 switch (DECL_FUNCTION_CODE (fn
))
8217 CASE_FLT_FN (BUILT_IN_CEXPI
):
8218 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8220 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8230 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8231 return build_zero_cst (type
);
8232 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8233 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8234 TREE_OPERAND (arg0
, 0));
8235 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8236 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8237 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8239 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8240 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8241 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8242 TREE_OPERAND (arg0
, 0)),
8243 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8244 TREE_OPERAND (arg0
, 1)));
8245 return fold_convert_loc (loc
, type
, tem
);
8247 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8249 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8250 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8251 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8253 if (TREE_CODE (arg0
) == CALL_EXPR
)
8255 tree fn
= get_callee_fndecl (arg0
);
8256 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8257 switch (DECL_FUNCTION_CODE (fn
))
8259 CASE_FLT_FN (BUILT_IN_CEXPI
):
8260 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8262 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8272 /* Fold *&X to X if X is an lvalue. */
8273 if (TREE_CODE (op0
) == ADDR_EXPR
)
8275 tree op00
= TREE_OPERAND (op0
, 0);
8276 if ((TREE_CODE (op00
) == VAR_DECL
8277 || TREE_CODE (op00
) == PARM_DECL
8278 || TREE_CODE (op00
) == RESULT_DECL
)
8279 && !TREE_READONLY (op00
))
8286 } /* switch (code) */
8290 /* If the operation was a conversion do _not_ mark a resulting constant
8291 with TREE_OVERFLOW if the original constant was not. These conversions
8292 have implementation defined behavior and retaining the TREE_OVERFLOW
8293 flag here would confuse later passes such as VRP. */
8295 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8296 tree type
, tree op0
)
8298 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8300 && TREE_CODE (res
) == INTEGER_CST
8301 && TREE_CODE (op0
) == INTEGER_CST
8302 && CONVERT_EXPR_CODE_P (code
))
8303 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8308 /* Fold a binary expression of code CODE and type TYPE with operands
8309 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8310 Return the folded expression if folding is successful. Otherwise,
8311 return NULL_TREE. */
8314 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8316 enum tree_code compl_code
;
8318 if (code
== MIN_EXPR
)
8319 compl_code
= MAX_EXPR
;
8320 else if (code
== MAX_EXPR
)
8321 compl_code
= MIN_EXPR
;
8325 /* MIN (MAX (a, b), b) == b. */
8326 if (TREE_CODE (op0
) == compl_code
8327 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8328 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8330 /* MIN (MAX (b, a), b) == b. */
8331 if (TREE_CODE (op0
) == compl_code
8332 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8333 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8334 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8336 /* MIN (a, MAX (a, b)) == a. */
8337 if (TREE_CODE (op1
) == compl_code
8338 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8339 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8340 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8342 /* MIN (a, MAX (b, a)) == a. */
8343 if (TREE_CODE (op1
) == compl_code
8344 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8345 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8346 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8351 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8352 by changing CODE to reduce the magnitude of constants involved in
8353 ARG0 of the comparison.
8354 Returns a canonicalized comparison tree if a simplification was
8355 possible, otherwise returns NULL_TREE.
8356 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8357 valid if signed overflow is undefined. */
8360 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8361 tree arg0
, tree arg1
,
8362 bool *strict_overflow_p
)
8364 enum tree_code code0
= TREE_CODE (arg0
);
8365 tree t
, cst0
= NULL_TREE
;
8369 /* Match A +- CST code arg1 and CST code arg1. We can change the
8370 first form only if overflow is undefined. */
8371 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8372 /* In principle pointers also have undefined overflow behavior,
8373 but that causes problems elsewhere. */
8374 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8375 && (code0
== MINUS_EXPR
8376 || code0
== PLUS_EXPR
)
8377 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8378 || code0
== INTEGER_CST
))
8381 /* Identify the constant in arg0 and its sign. */
8382 if (code0
== INTEGER_CST
)
8385 cst0
= TREE_OPERAND (arg0
, 1);
8386 sgn0
= tree_int_cst_sgn (cst0
);
8388 /* Overflowed constants and zero will cause problems. */
8389 if (integer_zerop (cst0
)
8390 || TREE_OVERFLOW (cst0
))
8393 /* See if we can reduce the magnitude of the constant in
8394 arg0 by changing the comparison code. */
8395 if (code0
== INTEGER_CST
)
8397 /* CST <= arg1 -> CST-1 < arg1. */
8398 if (code
== LE_EXPR
&& sgn0
== 1)
8400 /* -CST < arg1 -> -CST-1 <= arg1. */
8401 else if (code
== LT_EXPR
&& sgn0
== -1)
8403 /* CST > arg1 -> CST-1 >= arg1. */
8404 else if (code
== GT_EXPR
&& sgn0
== 1)
8406 /* -CST >= arg1 -> -CST-1 > arg1. */
8407 else if (code
== GE_EXPR
&& sgn0
== -1)
8411 /* arg1 code' CST' might be more canonical. */
8416 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8418 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8420 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8421 else if (code
== GT_EXPR
8422 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8424 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8425 else if (code
== LE_EXPR
8426 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8428 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8429 else if (code
== GE_EXPR
8430 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8434 *strict_overflow_p
= true;
8437 /* Now build the constant reduced in magnitude. But not if that
8438 would produce one outside of its types range. */
8439 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8441 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8442 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8444 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8445 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8446 /* We cannot swap the comparison here as that would cause us to
8447 endlessly recurse. */
8450 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8451 cst0
, build_int_cst (TREE_TYPE (cst0
), 1), 0);
8452 if (code0
!= INTEGER_CST
)
8453 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8455 /* If swapping might yield to a more canonical form, do so. */
8457 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8459 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8462 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8463 overflow further. Try to decrease the magnitude of constants involved
8464 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8465 and put sole constants at the second argument position.
8466 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8469 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8470 tree arg0
, tree arg1
)
8473 bool strict_overflow_p
;
8474 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8475 "when reducing constant in comparison");
8477 /* Try canonicalization by simplifying arg0. */
8478 strict_overflow_p
= false;
8479 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8480 &strict_overflow_p
);
8483 if (strict_overflow_p
)
8484 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8488 /* Try canonicalization by simplifying arg1 using the swapped
8490 code
= swap_tree_comparison (code
);
8491 strict_overflow_p
= false;
8492 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8493 &strict_overflow_p
);
8494 if (t
&& strict_overflow_p
)
8495 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8499 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8500 space. This is used to avoid issuing overflow warnings for
8501 expressions like &p->x which can not wrap. */
8504 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8506 unsigned HOST_WIDE_INT offset_low
, total_low
;
8507 HOST_WIDE_INT size
, offset_high
, total_high
;
8509 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8515 if (offset
== NULL_TREE
)
8520 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8524 offset_low
= TREE_INT_CST_LOW (offset
);
8525 offset_high
= TREE_INT_CST_HIGH (offset
);
8528 if (add_double_with_sign (offset_low
, offset_high
,
8529 bitpos
/ BITS_PER_UNIT
, 0,
8530 &total_low
, &total_high
,
8534 if (total_high
!= 0)
8537 size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8541 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8543 if (TREE_CODE (base
) == ADDR_EXPR
)
8545 HOST_WIDE_INT base_size
;
8547 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8548 if (base_size
> 0 && size
< base_size
)
8552 return total_low
> (unsigned HOST_WIDE_INT
) size
;
8555 /* Subroutine of fold_binary. This routine performs all of the
8556 transformations that are common to the equality/inequality
8557 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8558 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8559 fold_binary should call fold_binary. Fold a comparison with
8560 tree code CODE and type TYPE with operands OP0 and OP1. Return
8561 the folded comparison or NULL_TREE. */
8564 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8567 tree arg0
, arg1
, tem
;
8572 STRIP_SIGN_NOPS (arg0
);
8573 STRIP_SIGN_NOPS (arg1
);
8575 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8576 if (tem
!= NULL_TREE
)
8579 /* If one arg is a real or integer constant, put it last. */
8580 if (tree_swap_operands_p (arg0
, arg1
, true))
8581 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8583 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8584 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8585 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8586 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8587 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8588 && (TREE_CODE (arg1
) == INTEGER_CST
8589 && !TREE_OVERFLOW (arg1
)))
8591 tree const1
= TREE_OPERAND (arg0
, 1);
8593 tree variable
= TREE_OPERAND (arg0
, 0);
8596 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8598 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8599 TREE_TYPE (arg1
), const2
, const1
);
8601 /* If the constant operation overflowed this can be
8602 simplified as a comparison against INT_MAX/INT_MIN. */
8603 if (TREE_CODE (lhs
) == INTEGER_CST
8604 && TREE_OVERFLOW (lhs
))
8606 int const1_sgn
= tree_int_cst_sgn (const1
);
8607 enum tree_code code2
= code
;
8609 /* Get the sign of the constant on the lhs if the
8610 operation were VARIABLE + CONST1. */
8611 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8612 const1_sgn
= -const1_sgn
;
8614 /* The sign of the constant determines if we overflowed
8615 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8616 Canonicalize to the INT_MIN overflow by swapping the comparison
8618 if (const1_sgn
== -1)
8619 code2
= swap_tree_comparison (code
);
8621 /* We now can look at the canonicalized case
8622 VARIABLE + 1 CODE2 INT_MIN
8623 and decide on the result. */
8624 if (code2
== LT_EXPR
8626 || code2
== EQ_EXPR
)
8627 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8628 else if (code2
== NE_EXPR
8630 || code2
== GT_EXPR
)
8631 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8634 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8635 && (TREE_CODE (lhs
) != INTEGER_CST
8636 || !TREE_OVERFLOW (lhs
)))
8638 fold_overflow_warning ("assuming signed overflow does not occur "
8639 "when changing X +- C1 cmp C2 to "
8641 WARN_STRICT_OVERFLOW_COMPARISON
);
8642 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
8646 /* For comparisons of pointers we can decompose it to a compile time
8647 comparison of the base objects and the offsets into the object.
8648 This requires at least one operand being an ADDR_EXPR or a
8649 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8650 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8651 && (TREE_CODE (arg0
) == ADDR_EXPR
8652 || TREE_CODE (arg1
) == ADDR_EXPR
8653 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8654 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8656 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8657 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8658 enum machine_mode mode
;
8659 int volatilep
, unsignedp
;
8660 bool indirect_base0
= false, indirect_base1
= false;
8662 /* Get base and offset for the access. Strip ADDR_EXPR for
8663 get_inner_reference, but put it back by stripping INDIRECT_REF
8664 off the base object if possible. indirect_baseN will be true
8665 if baseN is not an address but refers to the object itself. */
8667 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8669 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8670 &bitsize
, &bitpos0
, &offset0
, &mode
,
8671 &unsignedp
, &volatilep
, false);
8672 if (TREE_CODE (base0
) == INDIRECT_REF
)
8673 base0
= TREE_OPERAND (base0
, 0);
8675 indirect_base0
= true;
8677 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8679 base0
= TREE_OPERAND (arg0
, 0);
8680 STRIP_SIGN_NOPS (base0
);
8681 if (TREE_CODE (base0
) == ADDR_EXPR
)
8683 base0
= TREE_OPERAND (base0
, 0);
8684 indirect_base0
= true;
8686 offset0
= TREE_OPERAND (arg0
, 1);
8690 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8692 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8693 &bitsize
, &bitpos1
, &offset1
, &mode
,
8694 &unsignedp
, &volatilep
, false);
8695 if (TREE_CODE (base1
) == INDIRECT_REF
)
8696 base1
= TREE_OPERAND (base1
, 0);
8698 indirect_base1
= true;
8700 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8702 base1
= TREE_OPERAND (arg1
, 0);
8703 STRIP_SIGN_NOPS (base1
);
8704 if (TREE_CODE (base1
) == ADDR_EXPR
)
8706 base1
= TREE_OPERAND (base1
, 0);
8707 indirect_base1
= true;
8709 offset1
= TREE_OPERAND (arg1
, 1);
8712 /* A local variable can never be pointed to by
8713 the default SSA name of an incoming parameter. */
8714 if ((TREE_CODE (arg0
) == ADDR_EXPR
8716 && TREE_CODE (base0
) == VAR_DECL
8717 && auto_var_in_fn_p (base0
, current_function_decl
)
8719 && TREE_CODE (base1
) == SSA_NAME
8720 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
8721 && SSA_NAME_IS_DEFAULT_DEF (base1
))
8722 || (TREE_CODE (arg1
) == ADDR_EXPR
8724 && TREE_CODE (base1
) == VAR_DECL
8725 && auto_var_in_fn_p (base1
, current_function_decl
)
8727 && TREE_CODE (base0
) == SSA_NAME
8728 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
8729 && SSA_NAME_IS_DEFAULT_DEF (base0
)))
8731 if (code
== NE_EXPR
)
8732 return constant_boolean_node (1, type
);
8733 else if (code
== EQ_EXPR
)
8734 return constant_boolean_node (0, type
);
8736 /* If we have equivalent bases we might be able to simplify. */
8737 else if (indirect_base0
== indirect_base1
8738 && operand_equal_p (base0
, base1
, 0))
8740 /* We can fold this expression to a constant if the non-constant
8741 offset parts are equal. */
8742 if ((offset0
== offset1
8743 || (offset0
&& offset1
8744 && operand_equal_p (offset0
, offset1
, 0)))
8747 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8752 && bitpos0
!= bitpos1
8753 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8754 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8755 fold_overflow_warning (("assuming pointer wraparound does not "
8756 "occur when comparing P +- C1 with "
8758 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8763 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8765 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8767 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8769 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8771 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8773 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8777 /* We can simplify the comparison to a comparison of the variable
8778 offset parts if the constant offset parts are equal.
8779 Be careful to use signed size type here because otherwise we
8780 mess with array offsets in the wrong way. This is possible
8781 because pointer arithmetic is restricted to retain within an
8782 object and overflow on pointer differences is undefined as of
8783 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8784 else if (bitpos0
== bitpos1
8785 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8786 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8788 /* By converting to signed size type we cover middle-end pointer
8789 arithmetic which operates on unsigned pointer types of size
8790 type size and ARRAY_REF offsets which are properly sign or
8791 zero extended from their type in case it is narrower than
8793 if (offset0
== NULL_TREE
)
8794 offset0
= build_int_cst (ssizetype
, 0);
8796 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8797 if (offset1
== NULL_TREE
)
8798 offset1
= build_int_cst (ssizetype
, 0);
8800 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
8804 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8805 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8806 fold_overflow_warning (("assuming pointer wraparound does not "
8807 "occur when comparing P +- C1 with "
8809 WARN_STRICT_OVERFLOW_COMPARISON
);
8811 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
8814 /* For non-equal bases we can simplify if they are addresses
8815 of local binding decls or constants. */
8816 else if (indirect_base0
&& indirect_base1
8817 /* We know that !operand_equal_p (base0, base1, 0)
8818 because the if condition was false. But make
8819 sure two decls are not the same. */
8821 && TREE_CODE (arg0
) == ADDR_EXPR
8822 && TREE_CODE (arg1
) == ADDR_EXPR
8823 && (((TREE_CODE (base0
) == VAR_DECL
8824 || TREE_CODE (base0
) == PARM_DECL
)
8825 && (targetm
.binds_local_p (base0
)
8826 || CONSTANT_CLASS_P (base1
)))
8827 || CONSTANT_CLASS_P (base0
))
8828 && (((TREE_CODE (base1
) == VAR_DECL
8829 || TREE_CODE (base1
) == PARM_DECL
)
8830 && (targetm
.binds_local_p (base1
)
8831 || CONSTANT_CLASS_P (base0
)))
8832 || CONSTANT_CLASS_P (base1
)))
8834 if (code
== EQ_EXPR
)
8835 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
8837 else if (code
== NE_EXPR
)
8838 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
8841 /* For equal offsets we can simplify to a comparison of the
8843 else if (bitpos0
== bitpos1
8845 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8847 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8848 && ((offset0
== offset1
)
8849 || (offset0
&& offset1
8850 && operand_equal_p (offset0
, offset1
, 0))))
8853 base0
= build_fold_addr_expr_loc (loc
, base0
);
8855 base1
= build_fold_addr_expr_loc (loc
, base1
);
8856 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
8860 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8861 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8862 the resulting offset is smaller in absolute value than the
8864 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8865 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8866 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8867 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8868 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8869 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8870 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8872 tree const1
= TREE_OPERAND (arg0
, 1);
8873 tree const2
= TREE_OPERAND (arg1
, 1);
8874 tree variable1
= TREE_OPERAND (arg0
, 0);
8875 tree variable2
= TREE_OPERAND (arg1
, 0);
8877 const char * const warnmsg
= G_("assuming signed overflow does not "
8878 "occur when combining constants around "
8881 /* Put the constant on the side where it doesn't overflow and is
8882 of lower absolute value than before. */
8883 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8884 ? MINUS_EXPR
: PLUS_EXPR
,
8886 if (!TREE_OVERFLOW (cst
)
8887 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
8889 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8890 return fold_build2_loc (loc
, code
, type
,
8892 fold_build2_loc (loc
,
8893 TREE_CODE (arg1
), TREE_TYPE (arg1
),
8897 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8898 ? MINUS_EXPR
: PLUS_EXPR
,
8900 if (!TREE_OVERFLOW (cst
)
8901 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
8903 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8904 return fold_build2_loc (loc
, code
, type
,
8905 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
8911 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8912 signed arithmetic case. That form is created by the compiler
8913 often enough for folding it to be of value. One example is in
8914 computing loop trip counts after Operator Strength Reduction. */
8915 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8916 && TREE_CODE (arg0
) == MULT_EXPR
8917 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8918 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8919 && integer_zerop (arg1
))
8921 tree const1
= TREE_OPERAND (arg0
, 1);
8922 tree const2
= arg1
; /* zero */
8923 tree variable1
= TREE_OPERAND (arg0
, 0);
8924 enum tree_code cmp_code
= code
;
8926 /* Handle unfolded multiplication by zero. */
8927 if (integer_zerop (const1
))
8928 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
8930 fold_overflow_warning (("assuming signed overflow does not occur when "
8931 "eliminating multiplication in comparison "
8933 WARN_STRICT_OVERFLOW_COMPARISON
);
8935 /* If const1 is negative we swap the sense of the comparison. */
8936 if (tree_int_cst_sgn (const1
) < 0)
8937 cmp_code
= swap_tree_comparison (cmp_code
);
8939 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
8942 tem
= maybe_canonicalize_comparison (loc
, code
, type
, op0
, op1
);
8946 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
8948 tree targ0
= strip_float_extensions (arg0
);
8949 tree targ1
= strip_float_extensions (arg1
);
8950 tree newtype
= TREE_TYPE (targ0
);
8952 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
8953 newtype
= TREE_TYPE (targ1
);
8955 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8956 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
8957 return fold_build2_loc (loc
, code
, type
,
8958 fold_convert_loc (loc
, newtype
, targ0
),
8959 fold_convert_loc (loc
, newtype
, targ1
));
8961 /* (-a) CMP (-b) -> b CMP a */
8962 if (TREE_CODE (arg0
) == NEGATE_EXPR
8963 && TREE_CODE (arg1
) == NEGATE_EXPR
)
8964 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
8965 TREE_OPERAND (arg0
, 0));
8967 if (TREE_CODE (arg1
) == REAL_CST
)
8969 REAL_VALUE_TYPE cst
;
8970 cst
= TREE_REAL_CST (arg1
);
8972 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8973 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8974 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
8975 TREE_OPERAND (arg0
, 0),
8976 build_real (TREE_TYPE (arg1
),
8977 real_value_negate (&cst
)));
8979 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8980 /* a CMP (-0) -> a CMP 0 */
8981 if (REAL_VALUE_MINUS_ZERO (cst
))
8982 return fold_build2_loc (loc
, code
, type
, arg0
,
8983 build_real (TREE_TYPE (arg1
), dconst0
));
8985 /* x != NaN is always true, other ops are always false. */
8986 if (REAL_VALUE_ISNAN (cst
)
8987 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
8989 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
8990 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
8993 /* Fold comparisons against infinity. */
8994 if (REAL_VALUE_ISINF (cst
)
8995 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
8997 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
8998 if (tem
!= NULL_TREE
)
9003 /* If this is a comparison of a real constant with a PLUS_EXPR
9004 or a MINUS_EXPR of a real constant, we can convert it into a
9005 comparison with a revised real constant as long as no overflow
9006 occurs when unsafe_math_optimizations are enabled. */
9007 if (flag_unsafe_math_optimizations
9008 && TREE_CODE (arg1
) == REAL_CST
9009 && (TREE_CODE (arg0
) == PLUS_EXPR
9010 || TREE_CODE (arg0
) == MINUS_EXPR
)
9011 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9012 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9013 ? MINUS_EXPR
: PLUS_EXPR
,
9014 arg1
, TREE_OPERAND (arg0
, 1)))
9015 && !TREE_OVERFLOW (tem
))
9016 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9018 /* Likewise, we can simplify a comparison of a real constant with
9019 a MINUS_EXPR whose first operand is also a real constant, i.e.
9020 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9021 floating-point types only if -fassociative-math is set. */
9022 if (flag_associative_math
9023 && TREE_CODE (arg1
) == REAL_CST
9024 && TREE_CODE (arg0
) == MINUS_EXPR
9025 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9026 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9028 && !TREE_OVERFLOW (tem
))
9029 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9030 TREE_OPERAND (arg0
, 1), tem
);
9032 /* Fold comparisons against built-in math functions. */
9033 if (TREE_CODE (arg1
) == REAL_CST
9034 && flag_unsafe_math_optimizations
9035 && ! flag_errno_math
)
9037 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9039 if (fcode
!= END_BUILTINS
)
9041 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9042 if (tem
!= NULL_TREE
)
9048 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9049 && CONVERT_EXPR_P (arg0
))
9051 /* If we are widening one operand of an integer comparison,
9052 see if the other operand is similarly being widened. Perhaps we
9053 can do the comparison in the narrower type. */
9054 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9058 /* Or if we are changing signedness. */
9059 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9064 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9065 constant, we can simplify it. */
9066 if (TREE_CODE (arg1
) == INTEGER_CST
9067 && (TREE_CODE (arg0
) == MIN_EXPR
9068 || TREE_CODE (arg0
) == MAX_EXPR
)
9069 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9071 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9076 /* Simplify comparison of something with itself. (For IEEE
9077 floating-point, we can only do some of these simplifications.) */
9078 if (operand_equal_p (arg0
, arg1
, 0))
9083 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9084 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9085 return constant_boolean_node (1, type
);
9090 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9091 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9092 return constant_boolean_node (1, type
);
9093 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9096 /* For NE, we can only do this simplification if integer
9097 or we don't honor IEEE floating point NaNs. */
9098 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9099 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9101 /* ... fall through ... */
9104 return constant_boolean_node (0, type
);
9110 /* If we are comparing an expression that just has comparisons
9111 of two integer values, arithmetic expressions of those comparisons,
9112 and constants, we can simplify it. There are only three cases
9113 to check: the two values can either be equal, the first can be
9114 greater, or the second can be greater. Fold the expression for
9115 those three values. Since each value must be 0 or 1, we have
9116 eight possibilities, each of which corresponds to the constant 0
9117 or 1 or one of the six possible comparisons.
9119 This handles common cases like (a > b) == 0 but also handles
9120 expressions like ((x > y) - (y > x)) > 0, which supposedly
9121 occur in macroized code. */
9123 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9125 tree cval1
= 0, cval2
= 0;
9128 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9129 /* Don't handle degenerate cases here; they should already
9130 have been handled anyway. */
9131 && cval1
!= 0 && cval2
!= 0
9132 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9133 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9134 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9135 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9136 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9137 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9138 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9140 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9141 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9143 /* We can't just pass T to eval_subst in case cval1 or cval2
9144 was the same as ARG1. */
9147 = fold_build2_loc (loc
, code
, type
,
9148 eval_subst (loc
, arg0
, cval1
, maxval
,
9152 = fold_build2_loc (loc
, code
, type
,
9153 eval_subst (loc
, arg0
, cval1
, maxval
,
9157 = fold_build2_loc (loc
, code
, type
,
9158 eval_subst (loc
, arg0
, cval1
, minval
,
9162 /* All three of these results should be 0 or 1. Confirm they are.
9163 Then use those values to select the proper code to use. */
9165 if (TREE_CODE (high_result
) == INTEGER_CST
9166 && TREE_CODE (equal_result
) == INTEGER_CST
9167 && TREE_CODE (low_result
) == INTEGER_CST
)
9169 /* Make a 3-bit mask with the high-order bit being the
9170 value for `>', the next for '=', and the low for '<'. */
9171 switch ((integer_onep (high_result
) * 4)
9172 + (integer_onep (equal_result
) * 2)
9173 + integer_onep (low_result
))
9177 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9198 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9203 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9204 SET_EXPR_LOCATION (tem
, loc
);
9207 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9212 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9213 into a single range test. */
9214 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9215 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9216 && TREE_CODE (arg1
) == INTEGER_CST
9217 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9218 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9219 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9220 && !TREE_OVERFLOW (arg1
))
9222 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9223 if (tem
!= NULL_TREE
)
9227 /* Fold ~X op ~Y as Y op X. */
9228 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9229 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9231 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9232 return fold_build2_loc (loc
, code
, type
,
9233 fold_convert_loc (loc
, cmp_type
,
9234 TREE_OPERAND (arg1
, 0)),
9235 TREE_OPERAND (arg0
, 0));
9238 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9239 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9240 && TREE_CODE (arg1
) == INTEGER_CST
)
9242 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9243 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9244 TREE_OPERAND (arg0
, 0),
9245 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9246 fold_convert_loc (loc
, cmp_type
, arg1
)));
9253 /* Subroutine of fold_binary. Optimize complex multiplications of the
9254 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9255 argument EXPR represents the expression "z" of type TYPE. */
9258 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9260 tree itype
= TREE_TYPE (type
);
9261 tree rpart
, ipart
, tem
;
9263 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9265 rpart
= TREE_OPERAND (expr
, 0);
9266 ipart
= TREE_OPERAND (expr
, 1);
9268 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9270 rpart
= TREE_REALPART (expr
);
9271 ipart
= TREE_IMAGPART (expr
);
9275 expr
= save_expr (expr
);
9276 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9277 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9280 rpart
= save_expr (rpart
);
9281 ipart
= save_expr (ipart
);
9282 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9283 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9284 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9285 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9286 build_zero_cst (itype
));
9290 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9291 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9292 guarantees that P and N have the same least significant log2(M) bits.
9293 N is not otherwise constrained. In particular, N is not normalized to
9294 0 <= N < M as is common. In general, the precise value of P is unknown.
9295 M is chosen as large as possible such that constant N can be determined.
9297 Returns M and sets *RESIDUE to N.
9299 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9300 account. This is not always possible due to PR 35705.
9303 static unsigned HOST_WIDE_INT
9304 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9305 bool allow_func_align
)
9307 enum tree_code code
;
9311 code
= TREE_CODE (expr
);
9312 if (code
== ADDR_EXPR
)
9314 expr
= TREE_OPERAND (expr
, 0);
9315 if (handled_component_p (expr
))
9317 HOST_WIDE_INT bitsize
, bitpos
;
9319 enum machine_mode mode
;
9320 int unsignedp
, volatilep
;
9322 expr
= get_inner_reference (expr
, &bitsize
, &bitpos
, &offset
,
9323 &mode
, &unsignedp
, &volatilep
, false);
9324 *residue
= bitpos
/ BITS_PER_UNIT
;
9327 if (TREE_CODE (offset
) == INTEGER_CST
)
9328 *residue
+= TREE_INT_CST_LOW (offset
);
9330 /* We don't handle more complicated offset expressions. */
9336 && (allow_func_align
|| TREE_CODE (expr
) != FUNCTION_DECL
))
9337 return DECL_ALIGN_UNIT (expr
);
9339 else if (code
== POINTER_PLUS_EXPR
)
9342 unsigned HOST_WIDE_INT modulus
;
9343 enum tree_code inner_code
;
9345 op0
= TREE_OPERAND (expr
, 0);
9347 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9350 op1
= TREE_OPERAND (expr
, 1);
9352 inner_code
= TREE_CODE (op1
);
9353 if (inner_code
== INTEGER_CST
)
9355 *residue
+= TREE_INT_CST_LOW (op1
);
9358 else if (inner_code
== MULT_EXPR
)
9360 op1
= TREE_OPERAND (op1
, 1);
9361 if (TREE_CODE (op1
) == INTEGER_CST
)
9363 unsigned HOST_WIDE_INT align
;
9365 /* Compute the greatest power-of-2 divisor of op1. */
9366 align
= TREE_INT_CST_LOW (op1
);
9369 /* If align is non-zero and less than *modulus, replace
9370 *modulus with align., If align is 0, then either op1 is 0
9371 or the greatest power-of-2 divisor of op1 doesn't fit in an
9372 unsigned HOST_WIDE_INT. In either case, no additional
9373 constraint is imposed. */
9375 modulus
= MIN (modulus
, align
);
9382 /* If we get here, we were unable to determine anything useful about the
9388 /* Fold a binary expression of code CODE and type TYPE with operands
9389 OP0 and OP1. LOC is the location of the resulting expression.
9390 Return the folded expression if folding is successful. Otherwise,
9391 return NULL_TREE. */
9394 fold_binary_loc (location_t loc
,
9395 enum tree_code code
, tree type
, tree op0
, tree op1
)
9397 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9398 tree arg0
, arg1
, tem
;
9399 tree t1
= NULL_TREE
;
9400 bool strict_overflow_p
;
9402 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9403 && TREE_CODE_LENGTH (code
) == 2
9405 && op1
!= NULL_TREE
);
9410 /* Strip any conversions that don't change the mode. This is
9411 safe for every expression, except for a comparison expression
9412 because its signedness is derived from its operands. So, in
9413 the latter case, only strip conversions that don't change the
9414 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9417 Note that this is done as an internal manipulation within the
9418 constant folder, in order to find the simplest representation
9419 of the arguments so that their form can be studied. In any
9420 cases, the appropriate type conversions should be put back in
9421 the tree that will get out of the constant folder. */
9423 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9425 STRIP_SIGN_NOPS (arg0
);
9426 STRIP_SIGN_NOPS (arg1
);
9434 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9435 constant but we can't do arithmetic on them. */
9436 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9437 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9438 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9439 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9440 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9441 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
9443 if (kind
== tcc_binary
)
9445 /* Make sure type and arg0 have the same saturating flag. */
9446 gcc_assert (TYPE_SATURATING (type
)
9447 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9448 tem
= const_binop (code
, arg0
, arg1
);
9450 else if (kind
== tcc_comparison
)
9451 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9455 if (tem
!= NULL_TREE
)
9457 if (TREE_TYPE (tem
) != type
)
9458 tem
= fold_convert_loc (loc
, type
, tem
);
9463 /* If this is a commutative operation, and ARG0 is a constant, move it
9464 to ARG1 to reduce the number of tests below. */
9465 if (commutative_tree_code (code
)
9466 && tree_swap_operands_p (arg0
, arg1
, true))
9467 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9469 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9471 First check for cases where an arithmetic operation is applied to a
9472 compound, conditional, or comparison operation. Push the arithmetic
9473 operation inside the compound or conditional to see if any folding
9474 can then be done. Convert comparison to conditional for this purpose.
9475 The also optimizes non-constant cases that used to be done in
9478 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9479 one of the operands is a comparison and the other is a comparison, a
9480 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9481 code below would make the expression more complex. Change it to a
9482 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9483 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9485 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9486 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9487 && ((truth_value_p (TREE_CODE (arg0
))
9488 && (truth_value_p (TREE_CODE (arg1
))
9489 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9490 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9491 || (truth_value_p (TREE_CODE (arg1
))
9492 && (truth_value_p (TREE_CODE (arg0
))
9493 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9494 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9496 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9497 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9500 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9501 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9503 if (code
== EQ_EXPR
)
9504 tem
= invert_truthvalue_loc (loc
, tem
);
9506 return fold_convert_loc (loc
, type
, tem
);
9509 if (TREE_CODE_CLASS (code
) == tcc_binary
9510 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9512 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9514 tem
= fold_build2_loc (loc
, code
, type
,
9515 fold_convert_loc (loc
, TREE_TYPE (op0
),
9516 TREE_OPERAND (arg0
, 1)), op1
);
9517 tem
= build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0), tem
);
9518 goto fold_binary_exit
;
9520 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9521 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9523 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9524 fold_convert_loc (loc
, TREE_TYPE (op1
),
9525 TREE_OPERAND (arg1
, 1)));
9526 tem
= build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0), tem
);
9527 goto fold_binary_exit
;
9530 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
9532 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9534 /*cond_first_p=*/1);
9535 if (tem
!= NULL_TREE
)
9539 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
9541 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9543 /*cond_first_p=*/0);
9544 if (tem
!= NULL_TREE
)
9552 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9553 if (TREE_CODE (arg0
) == ADDR_EXPR
9554 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9556 tree iref
= TREE_OPERAND (arg0
, 0);
9557 return fold_build2 (MEM_REF
, type
,
9558 TREE_OPERAND (iref
, 0),
9559 int_const_binop (PLUS_EXPR
, arg1
,
9560 TREE_OPERAND (iref
, 1), 0));
9563 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9564 if (TREE_CODE (arg0
) == ADDR_EXPR
9565 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9568 HOST_WIDE_INT coffset
;
9569 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9573 return fold_build2 (MEM_REF
, type
,
9574 build_fold_addr_expr (base
),
9575 int_const_binop (PLUS_EXPR
, arg1
,
9576 size_int (coffset
), 0));
9581 case POINTER_PLUS_EXPR
:
9582 /* 0 +p index -> (type)index */
9583 if (integer_zerop (arg0
))
9584 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
9586 /* PTR +p 0 -> PTR */
9587 if (integer_zerop (arg1
))
9588 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9590 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9591 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9592 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9593 return fold_convert_loc (loc
, type
,
9594 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9595 fold_convert_loc (loc
, sizetype
,
9597 fold_convert_loc (loc
, sizetype
,
9600 /* index +p PTR -> PTR +p index */
9601 if (POINTER_TYPE_P (TREE_TYPE (arg1
))
9602 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9603 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, type
,
9604 fold_convert_loc (loc
, type
, arg1
),
9605 fold_convert_loc (loc
, sizetype
, arg0
));
9607 /* (PTR +p B) +p A -> PTR +p (B + A) */
9608 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9611 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
9612 tree arg00
= TREE_OPERAND (arg0
, 0);
9613 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9614 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
9615 return fold_convert_loc (loc
, type
,
9616 fold_build2_loc (loc
, POINTER_PLUS_EXPR
,
9621 /* PTR_CST +p CST -> CST1 */
9622 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9623 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
9624 fold_convert_loc (loc
, type
, arg1
));
9626 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9627 of the array. Loop optimizer sometimes produce this type of
9629 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9631 tem
= try_move_mult_to_index (loc
, arg0
,
9632 fold_convert_loc (loc
, sizetype
, arg1
));
9634 return fold_convert_loc (loc
, type
, tem
);
9640 /* A + (-B) -> A - B */
9641 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9642 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9643 fold_convert_loc (loc
, type
, arg0
),
9644 fold_convert_loc (loc
, type
,
9645 TREE_OPERAND (arg1
, 0)));
9646 /* (-A) + B -> B - A */
9647 if (TREE_CODE (arg0
) == NEGATE_EXPR
9648 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
9649 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9650 fold_convert_loc (loc
, type
, arg1
),
9651 fold_convert_loc (loc
, type
,
9652 TREE_OPERAND (arg0
, 0)));
9654 if (INTEGRAL_TYPE_P (type
))
9656 /* Convert ~A + 1 to -A. */
9657 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9658 && integer_onep (arg1
))
9659 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
9660 fold_convert_loc (loc
, type
,
9661 TREE_OPERAND (arg0
, 0)));
9664 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9665 && !TYPE_OVERFLOW_TRAPS (type
))
9667 tree tem
= TREE_OPERAND (arg0
, 0);
9670 if (operand_equal_p (tem
, arg1
, 0))
9672 t1
= build_int_cst_type (type
, -1);
9673 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
9678 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9679 && !TYPE_OVERFLOW_TRAPS (type
))
9681 tree tem
= TREE_OPERAND (arg1
, 0);
9684 if (operand_equal_p (arg0
, tem
, 0))
9686 t1
= build_int_cst_type (type
, -1);
9687 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
9691 /* X + (X / CST) * -CST is X % CST. */
9692 if (TREE_CODE (arg1
) == MULT_EXPR
9693 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9694 && operand_equal_p (arg0
,
9695 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9697 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9698 tree cst1
= TREE_OPERAND (arg1
, 1);
9699 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9701 if (sum
&& integer_zerop (sum
))
9702 return fold_convert_loc (loc
, type
,
9703 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9704 TREE_TYPE (arg0
), arg0
,
9709 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9710 same or one. Make sure type is not saturating.
9711 fold_plusminus_mult_expr will re-associate. */
9712 if ((TREE_CODE (arg0
) == MULT_EXPR
9713 || TREE_CODE (arg1
) == MULT_EXPR
)
9714 && !TYPE_SATURATING (type
)
9715 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9717 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9722 if (! FLOAT_TYPE_P (type
))
9724 if (integer_zerop (arg1
))
9725 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9727 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9728 with a constant, and the two constants have no bits in common,
9729 we should treat this as a BIT_IOR_EXPR since this may produce more
9731 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9732 && TREE_CODE (arg1
) == BIT_AND_EXPR
9733 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9734 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9735 && integer_zerop (const_binop (BIT_AND_EXPR
,
9736 TREE_OPERAND (arg0
, 1),
9737 TREE_OPERAND (arg1
, 1))))
9739 code
= BIT_IOR_EXPR
;
9743 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9744 (plus (plus (mult) (mult)) (foo)) so that we can
9745 take advantage of the factoring cases below. */
9746 if (((TREE_CODE (arg0
) == PLUS_EXPR
9747 || TREE_CODE (arg0
) == MINUS_EXPR
)
9748 && TREE_CODE (arg1
) == MULT_EXPR
)
9749 || ((TREE_CODE (arg1
) == PLUS_EXPR
9750 || TREE_CODE (arg1
) == MINUS_EXPR
)
9751 && TREE_CODE (arg0
) == MULT_EXPR
))
9753 tree parg0
, parg1
, parg
, marg
;
9754 enum tree_code pcode
;
9756 if (TREE_CODE (arg1
) == MULT_EXPR
)
9757 parg
= arg0
, marg
= arg1
;
9759 parg
= arg1
, marg
= arg0
;
9760 pcode
= TREE_CODE (parg
);
9761 parg0
= TREE_OPERAND (parg
, 0);
9762 parg1
= TREE_OPERAND (parg
, 1);
9766 if (TREE_CODE (parg0
) == MULT_EXPR
9767 && TREE_CODE (parg1
) != MULT_EXPR
)
9768 return fold_build2_loc (loc
, pcode
, type
,
9769 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9770 fold_convert_loc (loc
, type
,
9772 fold_convert_loc (loc
, type
,
9774 fold_convert_loc (loc
, type
, parg1
));
9775 if (TREE_CODE (parg0
) != MULT_EXPR
9776 && TREE_CODE (parg1
) == MULT_EXPR
)
9778 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9779 fold_convert_loc (loc
, type
, parg0
),
9780 fold_build2_loc (loc
, pcode
, type
,
9781 fold_convert_loc (loc
, type
, marg
),
9782 fold_convert_loc (loc
, type
,
9788 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9789 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
9790 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9792 /* Likewise if the operands are reversed. */
9793 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
9794 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
9796 /* Convert X + -C into X - C. */
9797 if (TREE_CODE (arg1
) == REAL_CST
9798 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
9800 tem
= fold_negate_const (arg1
, type
);
9801 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
9802 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9803 fold_convert_loc (loc
, type
, arg0
),
9804 fold_convert_loc (loc
, type
, tem
));
9807 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9808 to __complex__ ( x, y ). This is not the same for SNaNs or
9809 if signed zeros are involved. */
9810 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9811 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9812 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9814 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9815 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9816 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9817 bool arg0rz
= false, arg0iz
= false;
9818 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9819 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9821 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9822 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9823 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9825 tree rp
= arg1r
? arg1r
9826 : build1 (REALPART_EXPR
, rtype
, arg1
);
9827 tree ip
= arg0i
? arg0i
9828 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9829 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9831 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9833 tree rp
= arg0r
? arg0r
9834 : build1 (REALPART_EXPR
, rtype
, arg0
);
9835 tree ip
= arg1i
? arg1i
9836 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9837 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9842 if (flag_unsafe_math_optimizations
9843 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9844 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9845 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9848 /* Convert x+x into x*2.0. */
9849 if (operand_equal_p (arg0
, arg1
, 0)
9850 && SCALAR_FLOAT_TYPE_P (type
))
9851 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
9852 build_real (type
, dconst2
));
9854 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9855 We associate floats only if the user has specified
9856 -fassociative-math. */
9857 if (flag_associative_math
9858 && TREE_CODE (arg1
) == PLUS_EXPR
9859 && TREE_CODE (arg0
) != MULT_EXPR
)
9861 tree tree10
= TREE_OPERAND (arg1
, 0);
9862 tree tree11
= TREE_OPERAND (arg1
, 1);
9863 if (TREE_CODE (tree11
) == MULT_EXPR
9864 && TREE_CODE (tree10
) == MULT_EXPR
)
9867 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
9868 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
9871 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9872 We associate floats only if the user has specified
9873 -fassociative-math. */
9874 if (flag_associative_math
9875 && TREE_CODE (arg0
) == PLUS_EXPR
9876 && TREE_CODE (arg1
) != MULT_EXPR
)
9878 tree tree00
= TREE_OPERAND (arg0
, 0);
9879 tree tree01
= TREE_OPERAND (arg0
, 1);
9880 if (TREE_CODE (tree01
) == MULT_EXPR
9881 && TREE_CODE (tree00
) == MULT_EXPR
)
9884 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
9885 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
9891 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9892 is a rotate of A by C1 bits. */
9893 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9894 is a rotate of A by B bits. */
9896 enum tree_code code0
, code1
;
9898 code0
= TREE_CODE (arg0
);
9899 code1
= TREE_CODE (arg1
);
9900 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9901 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9902 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9903 TREE_OPERAND (arg1
, 0), 0)
9904 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9905 TYPE_UNSIGNED (rtype
))
9906 /* Only create rotates in complete modes. Other cases are not
9907 expanded properly. */
9908 && TYPE_PRECISION (rtype
) == GET_MODE_PRECISION (TYPE_MODE (rtype
)))
9910 tree tree01
, tree11
;
9911 enum tree_code code01
, code11
;
9913 tree01
= TREE_OPERAND (arg0
, 1);
9914 tree11
= TREE_OPERAND (arg1
, 1);
9915 STRIP_NOPS (tree01
);
9916 STRIP_NOPS (tree11
);
9917 code01
= TREE_CODE (tree01
);
9918 code11
= TREE_CODE (tree11
);
9919 if (code01
== INTEGER_CST
9920 && code11
== INTEGER_CST
9921 && TREE_INT_CST_HIGH (tree01
) == 0
9922 && TREE_INT_CST_HIGH (tree11
) == 0
9923 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
9924 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9926 tem
= build2 (LROTATE_EXPR
,
9927 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9928 TREE_OPERAND (arg0
, 0),
9929 code0
== LSHIFT_EXPR
9931 SET_EXPR_LOCATION (tem
, loc
);
9932 return fold_convert_loc (loc
, type
, tem
);
9934 else if (code11
== MINUS_EXPR
)
9936 tree tree110
, tree111
;
9937 tree110
= TREE_OPERAND (tree11
, 0);
9938 tree111
= TREE_OPERAND (tree11
, 1);
9939 STRIP_NOPS (tree110
);
9940 STRIP_NOPS (tree111
);
9941 if (TREE_CODE (tree110
) == INTEGER_CST
9942 && 0 == compare_tree_int (tree110
,
9944 (TREE_TYPE (TREE_OPERAND
9946 && operand_equal_p (tree01
, tree111
, 0))
9948 fold_convert_loc (loc
, type
,
9949 build2 ((code0
== LSHIFT_EXPR
9952 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9953 TREE_OPERAND (arg0
, 0), tree01
));
9955 else if (code01
== MINUS_EXPR
)
9957 tree tree010
, tree011
;
9958 tree010
= TREE_OPERAND (tree01
, 0);
9959 tree011
= TREE_OPERAND (tree01
, 1);
9960 STRIP_NOPS (tree010
);
9961 STRIP_NOPS (tree011
);
9962 if (TREE_CODE (tree010
) == INTEGER_CST
9963 && 0 == compare_tree_int (tree010
,
9965 (TREE_TYPE (TREE_OPERAND
9967 && operand_equal_p (tree11
, tree011
, 0))
9968 return fold_convert_loc
9970 build2 ((code0
!= LSHIFT_EXPR
9973 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9974 TREE_OPERAND (arg0
, 0), tree11
));
9980 /* In most languages, can't associate operations on floats through
9981 parentheses. Rather than remember where the parentheses were, we
9982 don't associate floats at all, unless the user has specified
9984 And, we need to make sure type is not saturating. */
9986 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
9987 && !TYPE_SATURATING (type
))
9989 tree var0
, con0
, lit0
, minus_lit0
;
9990 tree var1
, con1
, lit1
, minus_lit1
;
9993 /* Split both trees into variables, constants, and literals. Then
9994 associate each group together, the constants with literals,
9995 then the result with variables. This increases the chances of
9996 literals being recombined later and of generating relocatable
9997 expressions for the sum of a constant and literal. */
9998 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
9999 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10000 code
== MINUS_EXPR
);
10002 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10003 if (code
== MINUS_EXPR
)
10006 /* With undefined overflow we can only associate constants with one
10007 variable, and constants whose association doesn't overflow. */
10008 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10009 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10016 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10017 tmp0
= TREE_OPERAND (tmp0
, 0);
10018 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10019 tmp1
= TREE_OPERAND (tmp1
, 0);
10020 /* The only case we can still associate with two variables
10021 is if they are the same, modulo negation. */
10022 if (!operand_equal_p (tmp0
, tmp1
, 0))
10026 if (ok
&& lit0
&& lit1
)
10028 tree tmp0
= fold_convert (type
, lit0
);
10029 tree tmp1
= fold_convert (type
, lit1
);
10031 if (!TREE_OVERFLOW (tmp0
) && !TREE_OVERFLOW (tmp1
)
10032 && TREE_OVERFLOW (fold_build2 (code
, type
, tmp0
, tmp1
)))
10037 /* Only do something if we found more than two objects. Otherwise,
10038 nothing has changed and we risk infinite recursion. */
10040 && (2 < ((var0
!= 0) + (var1
!= 0)
10041 + (con0
!= 0) + (con1
!= 0)
10042 + (lit0
!= 0) + (lit1
!= 0)
10043 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10045 var0
= associate_trees (loc
, var0
, var1
, code
, type
);
10046 con0
= associate_trees (loc
, con0
, con1
, code
, type
);
10047 lit0
= associate_trees (loc
, lit0
, lit1
, code
, type
);
10048 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
, code
, type
);
10050 /* Preserve the MINUS_EXPR if the negative part of the literal is
10051 greater than the positive part. Otherwise, the multiplicative
10052 folding code (i.e extract_muldiv) may be fooled in case
10053 unsigned constants are subtracted, like in the following
10054 example: ((X*2 + 4) - 8U)/2. */
10055 if (minus_lit0
&& lit0
)
10057 if (TREE_CODE (lit0
) == INTEGER_CST
10058 && TREE_CODE (minus_lit0
) == INTEGER_CST
10059 && tree_int_cst_lt (lit0
, minus_lit0
))
10061 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10067 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10076 fold_convert_loc (loc
, type
,
10077 associate_trees (loc
, var0
, minus_lit0
,
10078 MINUS_EXPR
, type
));
10081 con0
= associate_trees (loc
, con0
, minus_lit0
,
10084 fold_convert_loc (loc
, type
,
10085 associate_trees (loc
, var0
, con0
,
10090 con0
= associate_trees (loc
, con0
, lit0
, code
, type
);
10092 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10100 /* Pointer simplifications for subtraction, simple reassociations. */
10101 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10103 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10104 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10105 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10107 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10108 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10109 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10110 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10111 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10112 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10114 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10117 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10118 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10120 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10121 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10122 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10123 fold_convert_loc (loc
, type
, arg1
));
10125 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10128 /* A - (-B) -> A + B */
10129 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10130 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10131 fold_convert_loc (loc
, type
,
10132 TREE_OPERAND (arg1
, 0)));
10133 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10134 if (TREE_CODE (arg0
) == NEGATE_EXPR
10135 && (FLOAT_TYPE_P (type
)
10136 || INTEGRAL_TYPE_P (type
))
10137 && negate_expr_p (arg1
)
10138 && reorder_operands_p (arg0
, arg1
))
10139 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10140 fold_convert_loc (loc
, type
,
10141 negate_expr (arg1
)),
10142 fold_convert_loc (loc
, type
,
10143 TREE_OPERAND (arg0
, 0)));
10144 /* Convert -A - 1 to ~A. */
10145 if (INTEGRAL_TYPE_P (type
)
10146 && TREE_CODE (arg0
) == NEGATE_EXPR
10147 && integer_onep (arg1
)
10148 && !TYPE_OVERFLOW_TRAPS (type
))
10149 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10150 fold_convert_loc (loc
, type
,
10151 TREE_OPERAND (arg0
, 0)));
10153 /* Convert -1 - A to ~A. */
10154 if (INTEGRAL_TYPE_P (type
)
10155 && integer_all_onesp (arg0
))
10156 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10159 /* X - (X / CST) * CST is X % CST. */
10160 if (INTEGRAL_TYPE_P (type
)
10161 && TREE_CODE (arg1
) == MULT_EXPR
10162 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10163 && operand_equal_p (arg0
,
10164 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10165 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10166 TREE_OPERAND (arg1
, 1), 0))
10168 fold_convert_loc (loc
, type
,
10169 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10170 arg0
, TREE_OPERAND (arg1
, 1)));
10172 if (! FLOAT_TYPE_P (type
))
10174 if (integer_zerop (arg0
))
10175 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10176 if (integer_zerop (arg1
))
10177 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10179 /* Fold A - (A & B) into ~B & A. */
10180 if (!TREE_SIDE_EFFECTS (arg0
)
10181 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10183 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10185 tree arg10
= fold_convert_loc (loc
, type
,
10186 TREE_OPERAND (arg1
, 0));
10187 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10188 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10190 fold_convert_loc (loc
, type
, arg0
));
10192 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10194 tree arg11
= fold_convert_loc (loc
,
10195 type
, TREE_OPERAND (arg1
, 1));
10196 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10197 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10199 fold_convert_loc (loc
, type
, arg0
));
10203 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10204 any power of 2 minus 1. */
10205 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10206 && TREE_CODE (arg1
) == BIT_AND_EXPR
10207 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10208 TREE_OPERAND (arg1
, 0), 0))
10210 tree mask0
= TREE_OPERAND (arg0
, 1);
10211 tree mask1
= TREE_OPERAND (arg1
, 1);
10212 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10214 if (operand_equal_p (tem
, mask1
, 0))
10216 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10217 TREE_OPERAND (arg0
, 0), mask1
);
10218 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10223 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10224 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10225 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10227 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10228 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10229 (-ARG1 + ARG0) reduces to -ARG1. */
10230 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10231 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10233 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10234 __complex__ ( x, -y ). This is not the same for SNaNs or if
10235 signed zeros are involved. */
10236 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10237 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10238 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10240 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10241 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10242 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10243 bool arg0rz
= false, arg0iz
= false;
10244 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10245 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10247 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10248 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10249 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10251 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10253 : build1 (REALPART_EXPR
, rtype
, arg1
));
10254 tree ip
= arg0i
? arg0i
10255 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10256 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10258 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10260 tree rp
= arg0r
? arg0r
10261 : build1 (REALPART_EXPR
, rtype
, arg0
);
10262 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10264 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10265 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10270 /* Fold &x - &x. This can happen from &x.foo - &x.
10271 This is unsafe for certain floats even in non-IEEE formats.
10272 In IEEE, it is unsafe because it does wrong for NaNs.
10273 Also note that operand_equal_p is always false if an operand
10276 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10277 && operand_equal_p (arg0
, arg1
, 0))
10278 return build_zero_cst (type
);
10280 /* A - B -> A + (-B) if B is easily negatable. */
10281 if (negate_expr_p (arg1
)
10282 && ((FLOAT_TYPE_P (type
)
10283 /* Avoid this transformation if B is a positive REAL_CST. */
10284 && (TREE_CODE (arg1
) != REAL_CST
10285 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10286 || INTEGRAL_TYPE_P (type
)))
10287 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10288 fold_convert_loc (loc
, type
, arg0
),
10289 fold_convert_loc (loc
, type
,
10290 negate_expr (arg1
)));
10292 /* Try folding difference of addresses. */
10294 HOST_WIDE_INT diff
;
10296 if ((TREE_CODE (arg0
) == ADDR_EXPR
10297 || TREE_CODE (arg1
) == ADDR_EXPR
)
10298 && ptr_difference_const (arg0
, arg1
, &diff
))
10299 return build_int_cst_type (type
, diff
);
10302 /* Fold &a[i] - &a[j] to i-j. */
10303 if (TREE_CODE (arg0
) == ADDR_EXPR
10304 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10305 && TREE_CODE (arg1
) == ADDR_EXPR
10306 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10308 tree aref0
= TREE_OPERAND (arg0
, 0);
10309 tree aref1
= TREE_OPERAND (arg1
, 0);
10310 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
10311 TREE_OPERAND (aref1
, 0), 0))
10313 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
10314 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
10315 tree esz
= array_ref_element_size (aref0
);
10316 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
10317 return fold_build2_loc (loc
, MULT_EXPR
, type
, diff
,
10318 fold_convert_loc (loc
, type
, esz
));
10323 if (FLOAT_TYPE_P (type
)
10324 && flag_unsafe_math_optimizations
10325 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10326 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10327 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10330 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10331 same or one. Make sure type is not saturating.
10332 fold_plusminus_mult_expr will re-associate. */
10333 if ((TREE_CODE (arg0
) == MULT_EXPR
10334 || TREE_CODE (arg1
) == MULT_EXPR
)
10335 && !TYPE_SATURATING (type
)
10336 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10338 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10346 /* (-A) * (-B) -> A * B */
10347 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10348 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10349 fold_convert_loc (loc
, type
,
10350 TREE_OPERAND (arg0
, 0)),
10351 fold_convert_loc (loc
, type
,
10352 negate_expr (arg1
)));
10353 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10354 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10355 fold_convert_loc (loc
, type
,
10356 negate_expr (arg0
)),
10357 fold_convert_loc (loc
, type
,
10358 TREE_OPERAND (arg1
, 0)));
10360 if (! FLOAT_TYPE_P (type
))
10362 if (integer_zerop (arg1
))
10363 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10364 if (integer_onep (arg1
))
10365 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10366 /* Transform x * -1 into -x. Make sure to do the negation
10367 on the original operand with conversions not stripped
10368 because we can only strip non-sign-changing conversions. */
10369 if (integer_all_onesp (arg1
))
10370 return fold_convert_loc (loc
, type
, negate_expr (op0
));
10371 /* Transform x * -C into -x * C if x is easily negatable. */
10372 if (TREE_CODE (arg1
) == INTEGER_CST
10373 && tree_int_cst_sgn (arg1
) == -1
10374 && negate_expr_p (arg0
)
10375 && (tem
= negate_expr (arg1
)) != arg1
10376 && !TREE_OVERFLOW (tem
))
10377 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10378 fold_convert_loc (loc
, type
,
10379 negate_expr (arg0
)),
10382 /* (a * (1 << b)) is (a << b) */
10383 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10384 && integer_onep (TREE_OPERAND (arg1
, 0)))
10385 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10386 TREE_OPERAND (arg1
, 1));
10387 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10388 && integer_onep (TREE_OPERAND (arg0
, 0)))
10389 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10390 TREE_OPERAND (arg0
, 1));
10392 /* (A + A) * C -> A * 2 * C */
10393 if (TREE_CODE (arg0
) == PLUS_EXPR
10394 && TREE_CODE (arg1
) == INTEGER_CST
10395 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10396 TREE_OPERAND (arg0
, 1), 0))
10397 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10398 omit_one_operand_loc (loc
, type
,
10399 TREE_OPERAND (arg0
, 0),
10400 TREE_OPERAND (arg0
, 1)),
10401 fold_build2_loc (loc
, MULT_EXPR
, type
,
10402 build_int_cst (type
, 2) , arg1
));
10404 strict_overflow_p
= false;
10405 if (TREE_CODE (arg1
) == INTEGER_CST
10406 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10407 &strict_overflow_p
)))
10409 if (strict_overflow_p
)
10410 fold_overflow_warning (("assuming signed overflow does not "
10411 "occur when simplifying "
10413 WARN_STRICT_OVERFLOW_MISC
);
10414 return fold_convert_loc (loc
, type
, tem
);
10417 /* Optimize z * conj(z) for integer complex numbers. */
10418 if (TREE_CODE (arg0
) == CONJ_EXPR
10419 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10420 return fold_mult_zconjz (loc
, type
, arg1
);
10421 if (TREE_CODE (arg1
) == CONJ_EXPR
10422 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10423 return fold_mult_zconjz (loc
, type
, arg0
);
10427 /* Maybe fold x * 0 to 0. The expressions aren't the same
10428 when x is NaN, since x * 0 is also NaN. Nor are they the
10429 same in modes with signed zeros, since multiplying a
10430 negative value by 0 gives -0, not +0. */
10431 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10432 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10433 && real_zerop (arg1
))
10434 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10435 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10436 Likewise for complex arithmetic with signed zeros. */
10437 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10438 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10439 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10440 && real_onep (arg1
))
10441 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10443 /* Transform x * -1.0 into -x. */
10444 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10445 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10446 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10447 && real_minus_onep (arg1
))
10448 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
10450 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10451 the result for floating point types due to rounding so it is applied
10452 only if -fassociative-math was specify. */
10453 if (flag_associative_math
10454 && TREE_CODE (arg0
) == RDIV_EXPR
10455 && TREE_CODE (arg1
) == REAL_CST
10456 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10458 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10461 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10462 TREE_OPERAND (arg0
, 1));
10465 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10466 if (operand_equal_p (arg0
, arg1
, 0))
10468 tree tem
= fold_strip_sign_ops (arg0
);
10469 if (tem
!= NULL_TREE
)
10471 tem
= fold_convert_loc (loc
, type
, tem
);
10472 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10476 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10477 This is not the same for NaNs or if signed zeros are
10479 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10480 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10481 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10482 && TREE_CODE (arg1
) == COMPLEX_CST
10483 && real_zerop (TREE_REALPART (arg1
)))
10485 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10486 if (real_onep (TREE_IMAGPART (arg1
)))
10488 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10489 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10491 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10492 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10494 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10495 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10496 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10500 /* Optimize z * conj(z) for floating point complex numbers.
10501 Guarded by flag_unsafe_math_optimizations as non-finite
10502 imaginary components don't produce scalar results. */
10503 if (flag_unsafe_math_optimizations
10504 && TREE_CODE (arg0
) == CONJ_EXPR
10505 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10506 return fold_mult_zconjz (loc
, type
, arg1
);
10507 if (flag_unsafe_math_optimizations
10508 && TREE_CODE (arg1
) == CONJ_EXPR
10509 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10510 return fold_mult_zconjz (loc
, type
, arg0
);
10512 if (flag_unsafe_math_optimizations
)
10514 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10515 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10517 /* Optimizations of root(...)*root(...). */
10518 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10521 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10522 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10524 /* Optimize sqrt(x)*sqrt(x) as x. */
10525 if (BUILTIN_SQRT_P (fcode0
)
10526 && operand_equal_p (arg00
, arg10
, 0)
10527 && ! HONOR_SNANS (TYPE_MODE (type
)))
10530 /* Optimize root(x)*root(y) as root(x*y). */
10531 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10532 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10533 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10536 /* Optimize expN(x)*expN(y) as expN(x+y). */
10537 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10539 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10540 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10541 CALL_EXPR_ARG (arg0
, 0),
10542 CALL_EXPR_ARG (arg1
, 0));
10543 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10546 /* Optimizations of pow(...)*pow(...). */
10547 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10548 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10549 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10551 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10552 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10553 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10554 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10556 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10557 if (operand_equal_p (arg01
, arg11
, 0))
10559 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10560 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10562 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10565 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10566 if (operand_equal_p (arg00
, arg10
, 0))
10568 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10569 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10571 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10575 /* Optimize tan(x)*cos(x) as sin(x). */
10576 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10577 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10578 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10579 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10580 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10581 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10582 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10583 CALL_EXPR_ARG (arg1
, 0), 0))
10585 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10587 if (sinfn
!= NULL_TREE
)
10588 return build_call_expr_loc (loc
, sinfn
, 1,
10589 CALL_EXPR_ARG (arg0
, 0));
10592 /* Optimize x*pow(x,c) as pow(x,c+1). */
10593 if (fcode1
== BUILT_IN_POW
10594 || fcode1
== BUILT_IN_POWF
10595 || fcode1
== BUILT_IN_POWL
)
10597 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10598 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10599 if (TREE_CODE (arg11
) == REAL_CST
10600 && !TREE_OVERFLOW (arg11
)
10601 && operand_equal_p (arg0
, arg10
, 0))
10603 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10607 c
= TREE_REAL_CST (arg11
);
10608 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10609 arg
= build_real (type
, c
);
10610 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10614 /* Optimize pow(x,c)*x as pow(x,c+1). */
10615 if (fcode0
== BUILT_IN_POW
10616 || fcode0
== BUILT_IN_POWF
10617 || fcode0
== BUILT_IN_POWL
)
10619 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10620 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10621 if (TREE_CODE (arg01
) == REAL_CST
10622 && !TREE_OVERFLOW (arg01
)
10623 && operand_equal_p (arg1
, arg00
, 0))
10625 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10629 c
= TREE_REAL_CST (arg01
);
10630 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10631 arg
= build_real (type
, c
);
10632 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10636 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10637 if (optimize_function_for_speed_p (cfun
)
10638 && operand_equal_p (arg0
, arg1
, 0))
10640 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10644 tree arg
= build_real (type
, dconst2
);
10645 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10654 if (integer_all_onesp (arg1
))
10655 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10656 if (integer_zerop (arg1
))
10657 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10658 if (operand_equal_p (arg0
, arg1
, 0))
10659 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10661 /* ~X | X is -1. */
10662 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10663 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10665 t1
= build_zero_cst (type
);
10666 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10667 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10670 /* X | ~X is -1. */
10671 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10672 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10674 t1
= build_zero_cst (type
);
10675 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10676 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10679 /* Canonicalize (X & C1) | C2. */
10680 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10681 && TREE_CODE (arg1
) == INTEGER_CST
10682 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10684 unsigned HOST_WIDE_INT hi1
, lo1
, hi2
, lo2
, hi3
, lo3
, mlo
, mhi
;
10685 int width
= TYPE_PRECISION (type
), w
;
10686 hi1
= TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1));
10687 lo1
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
10688 hi2
= TREE_INT_CST_HIGH (arg1
);
10689 lo2
= TREE_INT_CST_LOW (arg1
);
10691 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10692 if ((hi1
& hi2
) == hi1
&& (lo1
& lo2
) == lo1
)
10693 return omit_one_operand_loc (loc
, type
, arg1
,
10694 TREE_OPERAND (arg0
, 0));
10696 if (width
> HOST_BITS_PER_WIDE_INT
)
10698 mhi
= (unsigned HOST_WIDE_INT
) -1
10699 >> (2 * HOST_BITS_PER_WIDE_INT
- width
);
10705 mlo
= (unsigned HOST_WIDE_INT
) -1
10706 >> (HOST_BITS_PER_WIDE_INT
- width
);
10709 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10710 if ((~(hi1
| hi2
) & mhi
) == 0 && (~(lo1
| lo2
) & mlo
) == 0)
10711 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10712 TREE_OPERAND (arg0
, 0), arg1
);
10714 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10715 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10716 mode which allows further optimizations. */
10723 for (w
= BITS_PER_UNIT
;
10724 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
10727 unsigned HOST_WIDE_INT mask
10728 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
10729 if (((lo1
| lo2
) & mask
) == mask
10730 && (lo1
& ~mask
) == 0 && hi1
== 0)
10737 if (hi3
!= hi1
|| lo3
!= lo1
)
10738 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10739 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10740 TREE_OPERAND (arg0
, 0),
10741 build_int_cst_wide (type
,
10746 /* (X & Y) | Y is (X, Y). */
10747 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10748 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10749 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
10750 /* (X & Y) | X is (Y, X). */
10751 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10752 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10753 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10754 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
10755 /* X | (X & Y) is (Y, X). */
10756 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10757 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10758 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10759 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
10760 /* X | (Y & X) is (Y, X). */
10761 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10762 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10763 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10764 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
10766 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
10767 if (t1
!= NULL_TREE
)
10770 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10772 This results in more efficient code for machines without a NAND
10773 instruction. Combine will canonicalize to the first form
10774 which will allow use of NAND instructions provided by the
10775 backend if they exist. */
10776 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10777 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10780 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10781 build2 (BIT_AND_EXPR
, type
,
10782 fold_convert_loc (loc
, type
,
10783 TREE_OPERAND (arg0
, 0)),
10784 fold_convert_loc (loc
, type
,
10785 TREE_OPERAND (arg1
, 0))));
10788 /* See if this can be simplified into a rotate first. If that
10789 is unsuccessful continue in the association code. */
10793 if (integer_zerop (arg1
))
10794 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10795 if (integer_all_onesp (arg1
))
10796 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
10797 if (operand_equal_p (arg0
, arg1
, 0))
10798 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10800 /* ~X ^ X is -1. */
10801 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10802 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10804 t1
= build_zero_cst (type
);
10805 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10806 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10809 /* X ^ ~X is -1. */
10810 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10811 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10813 t1
= build_zero_cst (type
);
10814 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10815 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10818 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10819 with a constant, and the two constants have no bits in common,
10820 we should treat this as a BIT_IOR_EXPR since this may produce more
10821 simplifications. */
10822 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10823 && TREE_CODE (arg1
) == BIT_AND_EXPR
10824 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10825 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10826 && integer_zerop (const_binop (BIT_AND_EXPR
,
10827 TREE_OPERAND (arg0
, 1),
10828 TREE_OPERAND (arg1
, 1))))
10830 code
= BIT_IOR_EXPR
;
10834 /* (X | Y) ^ X -> Y & ~ X*/
10835 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10836 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10838 tree t2
= TREE_OPERAND (arg0
, 1);
10839 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10841 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10842 fold_convert_loc (loc
, type
, t2
),
10843 fold_convert_loc (loc
, type
, t1
));
10847 /* (Y | X) ^ X -> Y & ~ X*/
10848 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10849 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10851 tree t2
= TREE_OPERAND (arg0
, 0);
10852 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10854 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10855 fold_convert_loc (loc
, type
, t2
),
10856 fold_convert_loc (loc
, type
, t1
));
10860 /* X ^ (X | Y) -> Y & ~ X*/
10861 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10862 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
10864 tree t2
= TREE_OPERAND (arg1
, 1);
10865 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10867 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10868 fold_convert_loc (loc
, type
, t2
),
10869 fold_convert_loc (loc
, type
, t1
));
10873 /* X ^ (Y | X) -> Y & ~ X*/
10874 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10875 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
10877 tree t2
= TREE_OPERAND (arg1
, 0);
10878 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10880 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10881 fold_convert_loc (loc
, type
, t2
),
10882 fold_convert_loc (loc
, type
, t1
));
10886 /* Convert ~X ^ ~Y to X ^ Y. */
10887 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10888 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10889 return fold_build2_loc (loc
, code
, type
,
10890 fold_convert_loc (loc
, type
,
10891 TREE_OPERAND (arg0
, 0)),
10892 fold_convert_loc (loc
, type
,
10893 TREE_OPERAND (arg1
, 0)));
10895 /* Convert ~X ^ C to X ^ ~C. */
10896 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10897 && TREE_CODE (arg1
) == INTEGER_CST
)
10898 return fold_build2_loc (loc
, code
, type
,
10899 fold_convert_loc (loc
, type
,
10900 TREE_OPERAND (arg0
, 0)),
10901 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
10903 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10904 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10905 && integer_onep (TREE_OPERAND (arg0
, 1))
10906 && integer_onep (arg1
))
10907 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
10908 build_int_cst (TREE_TYPE (arg0
), 0));
10910 /* Fold (X & Y) ^ Y as ~X & Y. */
10911 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10912 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10914 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10915 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10916 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10917 fold_convert_loc (loc
, type
, arg1
));
10919 /* Fold (X & Y) ^ X as ~Y & X. */
10920 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10921 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10922 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10924 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10925 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10926 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10927 fold_convert_loc (loc
, type
, arg1
));
10929 /* Fold X ^ (X & Y) as X & ~Y. */
10930 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10931 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10933 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10934 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10935 fold_convert_loc (loc
, type
, arg0
),
10936 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
10938 /* Fold X ^ (Y & X) as ~Y & X. */
10939 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10940 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10941 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10943 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10944 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10945 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10946 fold_convert_loc (loc
, type
, arg0
));
10949 /* See if this can be simplified into a rotate first. If that
10950 is unsuccessful continue in the association code. */
10954 if (integer_all_onesp (arg1
))
10955 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10956 if (integer_zerop (arg1
))
10957 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10958 if (operand_equal_p (arg0
, arg1
, 0))
10959 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10961 /* ~X & X is always zero. */
10962 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10963 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10964 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
10966 /* X & ~X is always zero. */
10967 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10968 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10969 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10971 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10972 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10973 && TREE_CODE (arg1
) == INTEGER_CST
10974 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10976 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
10977 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10978 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10979 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
10980 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
10982 fold_convert_loc (loc
, type
,
10983 fold_build2_loc (loc
, BIT_IOR_EXPR
,
10984 type
, tmp2
, tmp3
));
10987 /* (X | Y) & Y is (X, Y). */
10988 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10989 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10990 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
10991 /* (X | Y) & X is (Y, X). */
10992 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10993 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10994 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10995 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
10996 /* X & (X | Y) is (Y, X). */
10997 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10998 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10999 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11000 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11001 /* X & (Y | X) is (Y, X). */
11002 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11003 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11004 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11005 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11007 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11008 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11009 && integer_onep (TREE_OPERAND (arg0
, 1))
11010 && integer_onep (arg1
))
11012 tem
= TREE_OPERAND (arg0
, 0);
11013 return fold_build2_loc (loc
, EQ_EXPR
, type
,
11014 fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
11015 build_int_cst (TREE_TYPE (tem
), 1)),
11016 build_int_cst (TREE_TYPE (tem
), 0));
11018 /* Fold ~X & 1 as (X & 1) == 0. */
11019 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11020 && integer_onep (arg1
))
11022 tem
= TREE_OPERAND (arg0
, 0);
11023 return fold_build2_loc (loc
, EQ_EXPR
, type
,
11024 fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
11025 build_int_cst (TREE_TYPE (tem
), 1)),
11026 build_int_cst (TREE_TYPE (tem
), 0));
11029 /* Fold (X ^ Y) & Y as ~X & Y. */
11030 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11031 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11033 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11034 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11035 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11036 fold_convert_loc (loc
, type
, arg1
));
11038 /* Fold (X ^ Y) & X as ~Y & X. */
11039 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11040 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11041 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11043 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11044 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11045 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11046 fold_convert_loc (loc
, type
, arg1
));
11048 /* Fold X & (X ^ Y) as X & ~Y. */
11049 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11050 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11052 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11053 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11054 fold_convert_loc (loc
, type
, arg0
),
11055 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11057 /* Fold X & (Y ^ X) as ~Y & X. */
11058 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11059 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11060 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11062 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11063 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11064 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11065 fold_convert_loc (loc
, type
, arg0
));
11068 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11069 ((A & N) + B) & M -> (A + B) & M
11070 Similarly if (N & M) == 0,
11071 ((A | N) + B) & M -> (A + B) & M
11072 and for - instead of + (or unary - instead of +)
11073 and/or ^ instead of |.
11074 If B is constant and (B & M) == 0, fold into A & M. */
11075 if (host_integerp (arg1
, 1))
11077 unsigned HOST_WIDE_INT cst1
= tree_low_cst (arg1
, 1);
11078 if (~cst1
&& (cst1
& (cst1
+ 1)) == 0
11079 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11080 && (TREE_CODE (arg0
) == PLUS_EXPR
11081 || TREE_CODE (arg0
) == MINUS_EXPR
11082 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11083 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11084 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11088 unsigned HOST_WIDE_INT cst0
;
11090 /* Now we know that arg0 is (C + D) or (C - D) or
11091 -C and arg1 (M) is == (1LL << cst) - 1.
11092 Store C into PMOP[0] and D into PMOP[1]. */
11093 pmop
[0] = TREE_OPERAND (arg0
, 0);
11095 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11097 pmop
[1] = TREE_OPERAND (arg0
, 1);
11101 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11102 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11106 for (; which
>= 0; which
--)
11107 switch (TREE_CODE (pmop
[which
]))
11112 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11115 /* tree_low_cst not used, because we don't care about
11117 cst0
= TREE_INT_CST_LOW (TREE_OPERAND (pmop
[which
], 1));
11119 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11124 else if (cst0
!= 0)
11126 /* If C or D is of the form (A & N) where
11127 (N & M) == M, or of the form (A | N) or
11128 (A ^ N) where (N & M) == 0, replace it with A. */
11129 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11132 /* If C or D is a N where (N & M) == 0, it can be
11133 omitted (assumed 0). */
11134 if ((TREE_CODE (arg0
) == PLUS_EXPR
11135 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11136 && (TREE_INT_CST_LOW (pmop
[which
]) & cst1
) == 0)
11137 pmop
[which
] = NULL
;
11143 /* Only build anything new if we optimized one or both arguments
11145 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11146 || (TREE_CODE (arg0
) != NEGATE_EXPR
11147 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11149 tree utype
= TREE_TYPE (arg0
);
11150 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11152 /* Perform the operations in a type that has defined
11153 overflow behavior. */
11154 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11155 if (pmop
[0] != NULL
)
11156 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11157 if (pmop
[1] != NULL
)
11158 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11161 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11162 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11163 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11165 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11166 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11168 else if (pmop
[0] != NULL
)
11170 else if (pmop
[1] != NULL
)
11173 return build_int_cst (type
, 0);
11175 else if (pmop
[0] == NULL
)
11176 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11178 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11180 /* TEM is now the new binary +, - or unary - replacement. */
11181 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11182 fold_convert_loc (loc
, utype
, arg1
));
11183 return fold_convert_loc (loc
, type
, tem
);
11188 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11189 if (t1
!= NULL_TREE
)
11191 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11192 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11193 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11196 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11198 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
11199 && (~TREE_INT_CST_LOW (arg1
)
11200 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
11202 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11205 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11207 This results in more efficient code for machines without a NOR
11208 instruction. Combine will canonicalize to the first form
11209 which will allow use of NOR instructions provided by the
11210 backend if they exist. */
11211 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11212 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11214 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11215 build2 (BIT_IOR_EXPR
, type
,
11216 fold_convert_loc (loc
, type
,
11217 TREE_OPERAND (arg0
, 0)),
11218 fold_convert_loc (loc
, type
,
11219 TREE_OPERAND (arg1
, 0))));
11222 /* If arg0 is derived from the address of an object or function, we may
11223 be able to fold this expression using the object or function's
11225 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
11227 unsigned HOST_WIDE_INT modulus
, residue
;
11228 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
11230 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11231 integer_onep (arg1
));
11233 /* This works because modulus is a power of 2. If this weren't the
11234 case, we'd have to replace it by its greatest power-of-2
11235 divisor: modulus & -modulus. */
11237 return build_int_cst (type
, residue
& low
);
11240 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11241 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11242 if the new mask might be further optimized. */
11243 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11244 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11245 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
11246 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
11247 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
11248 < TYPE_PRECISION (TREE_TYPE (arg0
))
11249 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11250 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
11252 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
11253 unsigned HOST_WIDE_INT mask
11254 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11255 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11256 tree shift_type
= TREE_TYPE (arg0
);
11258 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11259 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11260 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11261 && TYPE_PRECISION (TREE_TYPE (arg0
))
11262 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
11264 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11265 tree arg00
= TREE_OPERAND (arg0
, 0);
11266 /* See if more bits can be proven as zero because of
11268 if (TREE_CODE (arg00
) == NOP_EXPR
11269 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11271 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11272 if (TYPE_PRECISION (inner_type
)
11273 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
11274 && TYPE_PRECISION (inner_type
) < prec
)
11276 prec
= TYPE_PRECISION (inner_type
);
11277 /* See if we can shorten the right shift. */
11279 shift_type
= inner_type
;
11282 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11283 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11284 zerobits
<<= prec
- shiftc
;
11285 /* For arithmetic shift if sign bit could be set, zerobits
11286 can contain actually sign bits, so no transformation is
11287 possible, unless MASK masks them all away. In that
11288 case the shift needs to be converted into logical shift. */
11289 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11290 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11292 if ((mask
& zerobits
) == 0)
11293 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11299 /* ((X << 16) & 0xff00) is (X, 0). */
11300 if ((mask
& zerobits
) == mask
)
11301 return omit_one_operand_loc (loc
, type
,
11302 build_int_cst (type
, 0), arg0
);
11304 newmask
= mask
| zerobits
;
11305 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11309 /* Only do the transformation if NEWMASK is some integer
11311 for (prec
= BITS_PER_UNIT
;
11312 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11313 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11315 if (prec
< HOST_BITS_PER_WIDE_INT
11316 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11320 if (shift_type
!= TREE_TYPE (arg0
))
11322 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11323 fold_convert_loc (loc
, shift_type
,
11324 TREE_OPERAND (arg0
, 0)),
11325 TREE_OPERAND (arg0
, 1));
11326 tem
= fold_convert_loc (loc
, type
, tem
);
11330 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11331 if (!tree_int_cst_equal (newmaskt
, arg1
))
11332 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11340 /* Don't touch a floating-point divide by zero unless the mode
11341 of the constant can represent infinity. */
11342 if (TREE_CODE (arg1
) == REAL_CST
11343 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11344 && real_zerop (arg1
))
11347 /* Optimize A / A to 1.0 if we don't care about
11348 NaNs or Infinities. Skip the transformation
11349 for non-real operands. */
11350 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11351 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11352 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11353 && operand_equal_p (arg0
, arg1
, 0))
11355 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11357 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11360 /* The complex version of the above A / A optimization. */
11361 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11362 && operand_equal_p (arg0
, arg1
, 0))
11364 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11365 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11366 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11368 tree r
= build_real (elem_type
, dconst1
);
11369 /* omit_two_operands will call fold_convert for us. */
11370 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11374 /* (-A) / (-B) -> A / B */
11375 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11376 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11377 TREE_OPERAND (arg0
, 0),
11378 negate_expr (arg1
));
11379 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11380 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11381 negate_expr (arg0
),
11382 TREE_OPERAND (arg1
, 0));
11384 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11385 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11386 && real_onep (arg1
))
11387 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11389 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11390 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11391 && real_minus_onep (arg1
))
11392 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
11393 negate_expr (arg0
)));
11395 /* If ARG1 is a constant, we can convert this to a multiply by the
11396 reciprocal. This does not have the same rounding properties,
11397 so only do this if -freciprocal-math. We can actually
11398 always safely do it if ARG1 is a power of two, but it's hard to
11399 tell if it is or not in a portable manner. */
11400 if (TREE_CODE (arg1
) == REAL_CST
)
11402 if (flag_reciprocal_math
11403 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
11405 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
11406 /* Find the reciprocal if optimizing and the result is exact. */
11410 r
= TREE_REAL_CST (arg1
);
11411 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
11413 tem
= build_real (type
, r
);
11414 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11415 fold_convert_loc (loc
, type
, arg0
), tem
);
11419 /* Convert A/B/C to A/(B*C). */
11420 if (flag_reciprocal_math
11421 && TREE_CODE (arg0
) == RDIV_EXPR
)
11422 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11423 fold_build2_loc (loc
, MULT_EXPR
, type
,
11424 TREE_OPERAND (arg0
, 1), arg1
));
11426 /* Convert A/(B/C) to (A/B)*C. */
11427 if (flag_reciprocal_math
11428 && TREE_CODE (arg1
) == RDIV_EXPR
)
11429 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11430 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11431 TREE_OPERAND (arg1
, 0)),
11432 TREE_OPERAND (arg1
, 1));
11434 /* Convert C1/(X*C2) into (C1/C2)/X. */
11435 if (flag_reciprocal_math
11436 && TREE_CODE (arg1
) == MULT_EXPR
11437 && TREE_CODE (arg0
) == REAL_CST
11438 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11440 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11441 TREE_OPERAND (arg1
, 1));
11443 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11444 TREE_OPERAND (arg1
, 0));
11447 if (flag_unsafe_math_optimizations
)
11449 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11450 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11452 /* Optimize sin(x)/cos(x) as tan(x). */
11453 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11454 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11455 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11456 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11457 CALL_EXPR_ARG (arg1
, 0), 0))
11459 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11461 if (tanfn
!= NULL_TREE
)
11462 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11465 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11466 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11467 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11468 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11469 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11470 CALL_EXPR_ARG (arg1
, 0), 0))
11472 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11474 if (tanfn
!= NULL_TREE
)
11476 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11477 CALL_EXPR_ARG (arg0
, 0));
11478 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11479 build_real (type
, dconst1
), tmp
);
11483 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11484 NaNs or Infinities. */
11485 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11486 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11487 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11489 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11490 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11492 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11493 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11494 && operand_equal_p (arg00
, arg01
, 0))
11496 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11498 if (cosfn
!= NULL_TREE
)
11499 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11503 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11504 NaNs or Infinities. */
11505 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11506 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11507 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11509 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11510 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11512 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11513 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11514 && operand_equal_p (arg00
, arg01
, 0))
11516 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11518 if (cosfn
!= NULL_TREE
)
11520 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11521 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11522 build_real (type
, dconst1
),
11528 /* Optimize pow(x,c)/x as pow(x,c-1). */
11529 if (fcode0
== BUILT_IN_POW
11530 || fcode0
== BUILT_IN_POWF
11531 || fcode0
== BUILT_IN_POWL
)
11533 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11534 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11535 if (TREE_CODE (arg01
) == REAL_CST
11536 && !TREE_OVERFLOW (arg01
)
11537 && operand_equal_p (arg1
, arg00
, 0))
11539 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11543 c
= TREE_REAL_CST (arg01
);
11544 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11545 arg
= build_real (type
, c
);
11546 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11550 /* Optimize a/root(b/c) into a*root(c/b). */
11551 if (BUILTIN_ROOT_P (fcode1
))
11553 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11555 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11557 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11558 tree b
= TREE_OPERAND (rootarg
, 0);
11559 tree c
= TREE_OPERAND (rootarg
, 1);
11561 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
11563 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
11564 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
11568 /* Optimize x/expN(y) into x*expN(-y). */
11569 if (BUILTIN_EXPONENT_P (fcode1
))
11571 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11572 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11573 arg1
= build_call_expr_loc (loc
,
11575 fold_convert_loc (loc
, type
, arg
));
11576 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11579 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11580 if (fcode1
== BUILT_IN_POW
11581 || fcode1
== BUILT_IN_POWF
11582 || fcode1
== BUILT_IN_POWL
)
11584 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11585 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11586 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11587 tree neg11
= fold_convert_loc (loc
, type
,
11588 negate_expr (arg11
));
11589 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
11590 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11595 case TRUNC_DIV_EXPR
:
11596 /* Optimize (X & (-A)) / A where A is a power of 2,
11598 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11599 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
11600 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
11602 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
11603 arg1
, TREE_OPERAND (arg0
, 1));
11604 if (sum
&& integer_zerop (sum
)) {
11605 unsigned long pow2
;
11607 if (TREE_INT_CST_LOW (arg1
))
11608 pow2
= exact_log2 (TREE_INT_CST_LOW (arg1
));
11610 pow2
= exact_log2 (TREE_INT_CST_HIGH (arg1
))
11611 + HOST_BITS_PER_WIDE_INT
;
11613 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11614 TREE_OPERAND (arg0
, 0),
11615 build_int_cst (NULL_TREE
, pow2
));
11621 case FLOOR_DIV_EXPR
:
11622 /* Simplify A / (B << N) where A and B are positive and B is
11623 a power of 2, to A >> (N + log2(B)). */
11624 strict_overflow_p
= false;
11625 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11626 && (TYPE_UNSIGNED (type
)
11627 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11629 tree sval
= TREE_OPERAND (arg1
, 0);
11630 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11632 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11633 unsigned long pow2
;
11635 if (TREE_INT_CST_LOW (sval
))
11636 pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
11638 pow2
= exact_log2 (TREE_INT_CST_HIGH (sval
))
11639 + HOST_BITS_PER_WIDE_INT
;
11641 if (strict_overflow_p
)
11642 fold_overflow_warning (("assuming signed overflow does not "
11643 "occur when simplifying A / (B << N)"),
11644 WARN_STRICT_OVERFLOW_MISC
);
11646 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11647 sh_cnt
, build_int_cst (NULL_TREE
, pow2
));
11648 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11649 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11653 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11654 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11655 if (INTEGRAL_TYPE_P (type
)
11656 && TYPE_UNSIGNED (type
)
11657 && code
== FLOOR_DIV_EXPR
)
11658 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
11662 case ROUND_DIV_EXPR
:
11663 case CEIL_DIV_EXPR
:
11664 case EXACT_DIV_EXPR
:
11665 if (integer_onep (arg1
))
11666 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11667 if (integer_zerop (arg1
))
11669 /* X / -1 is -X. */
11670 if (!TYPE_UNSIGNED (type
)
11671 && TREE_CODE (arg1
) == INTEGER_CST
11672 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11673 && TREE_INT_CST_HIGH (arg1
) == -1)
11674 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
11676 /* Convert -A / -B to A / B when the type is signed and overflow is
11678 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11679 && TREE_CODE (arg0
) == NEGATE_EXPR
11680 && negate_expr_p (arg1
))
11682 if (INTEGRAL_TYPE_P (type
))
11683 fold_overflow_warning (("assuming signed overflow does not occur "
11684 "when distributing negation across "
11686 WARN_STRICT_OVERFLOW_MISC
);
11687 return fold_build2_loc (loc
, code
, type
,
11688 fold_convert_loc (loc
, type
,
11689 TREE_OPERAND (arg0
, 0)),
11690 fold_convert_loc (loc
, type
,
11691 negate_expr (arg1
)));
11693 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11694 && TREE_CODE (arg1
) == NEGATE_EXPR
11695 && negate_expr_p (arg0
))
11697 if (INTEGRAL_TYPE_P (type
))
11698 fold_overflow_warning (("assuming signed overflow does not occur "
11699 "when distributing negation across "
11701 WARN_STRICT_OVERFLOW_MISC
);
11702 return fold_build2_loc (loc
, code
, type
,
11703 fold_convert_loc (loc
, type
,
11704 negate_expr (arg0
)),
11705 fold_convert_loc (loc
, type
,
11706 TREE_OPERAND (arg1
, 0)));
11709 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11710 operation, EXACT_DIV_EXPR.
11712 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11713 At one time others generated faster code, it's not clear if they do
11714 after the last round to changes to the DIV code in expmed.c. */
11715 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11716 && multiple_of_p (type
, arg0
, arg1
))
11717 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11719 strict_overflow_p
= false;
11720 if (TREE_CODE (arg1
) == INTEGER_CST
11721 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11722 &strict_overflow_p
)))
11724 if (strict_overflow_p
)
11725 fold_overflow_warning (("assuming signed overflow does not occur "
11726 "when simplifying division"),
11727 WARN_STRICT_OVERFLOW_MISC
);
11728 return fold_convert_loc (loc
, type
, tem
);
11733 case CEIL_MOD_EXPR
:
11734 case FLOOR_MOD_EXPR
:
11735 case ROUND_MOD_EXPR
:
11736 case TRUNC_MOD_EXPR
:
11737 /* X % 1 is always zero, but be sure to preserve any side
11739 if (integer_onep (arg1
))
11740 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11742 /* X % 0, return X % 0 unchanged so that we can get the
11743 proper warnings and errors. */
11744 if (integer_zerop (arg1
))
11747 /* 0 % X is always zero, but be sure to preserve any side
11748 effects in X. Place this after checking for X == 0. */
11749 if (integer_zerop (arg0
))
11750 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11752 /* X % -1 is zero. */
11753 if (!TYPE_UNSIGNED (type
)
11754 && TREE_CODE (arg1
) == INTEGER_CST
11755 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11756 && TREE_INT_CST_HIGH (arg1
) == -1)
11757 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11759 /* X % -C is the same as X % C. */
11760 if (code
== TRUNC_MOD_EXPR
11761 && !TYPE_UNSIGNED (type
)
11762 && TREE_CODE (arg1
) == INTEGER_CST
11763 && !TREE_OVERFLOW (arg1
)
11764 && TREE_INT_CST_HIGH (arg1
) < 0
11765 && !TYPE_OVERFLOW_TRAPS (type
)
11766 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11767 && !sign_bit_p (arg1
, arg1
))
11768 return fold_build2_loc (loc
, code
, type
,
11769 fold_convert_loc (loc
, type
, arg0
),
11770 fold_convert_loc (loc
, type
,
11771 negate_expr (arg1
)));
11773 /* X % -Y is the same as X % Y. */
11774 if (code
== TRUNC_MOD_EXPR
11775 && !TYPE_UNSIGNED (type
)
11776 && TREE_CODE (arg1
) == NEGATE_EXPR
11777 && !TYPE_OVERFLOW_TRAPS (type
))
11778 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
11779 fold_convert_loc (loc
, type
,
11780 TREE_OPERAND (arg1
, 0)));
11782 strict_overflow_p
= false;
11783 if (TREE_CODE (arg1
) == INTEGER_CST
11784 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11785 &strict_overflow_p
)))
11787 if (strict_overflow_p
)
11788 fold_overflow_warning (("assuming signed overflow does not occur "
11789 "when simplifying modulus"),
11790 WARN_STRICT_OVERFLOW_MISC
);
11791 return fold_convert_loc (loc
, type
, tem
);
11794 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11795 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11796 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
11797 && (TYPE_UNSIGNED (type
)
11798 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11801 /* Also optimize A % (C << N) where C is a power of 2,
11802 to A & ((C << N) - 1). */
11803 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
11804 c
= TREE_OPERAND (arg1
, 0);
11806 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
11809 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
11810 build_int_cst (TREE_TYPE (arg1
), 1));
11811 if (strict_overflow_p
)
11812 fold_overflow_warning (("assuming signed overflow does not "
11813 "occur when simplifying "
11814 "X % (power of two)"),
11815 WARN_STRICT_OVERFLOW_MISC
);
11816 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11817 fold_convert_loc (loc
, type
, arg0
),
11818 fold_convert_loc (loc
, type
, mask
));
11826 if (integer_all_onesp (arg0
))
11827 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11831 /* Optimize -1 >> x for arithmetic right shifts. */
11832 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
11833 && tree_expr_nonnegative_p (arg1
))
11834 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11835 /* ... fall through ... */
11839 if (integer_zerop (arg1
))
11840 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11841 if (integer_zerop (arg0
))
11842 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11844 /* Since negative shift count is not well-defined,
11845 don't try to compute it in the compiler. */
11846 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11849 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11850 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
11851 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11852 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11853 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11855 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
11856 + TREE_INT_CST_LOW (arg1
));
11858 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11859 being well defined. */
11860 if (low
>= TYPE_PRECISION (type
))
11862 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
11863 low
= low
% TYPE_PRECISION (type
);
11864 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
11865 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 0),
11866 TREE_OPERAND (arg0
, 0));
11868 low
= TYPE_PRECISION (type
) - 1;
11871 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
11872 build_int_cst (type
, low
));
11875 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11876 into x & ((unsigned)-1 >> c) for unsigned types. */
11877 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
11878 || (TYPE_UNSIGNED (type
)
11879 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
11880 && host_integerp (arg1
, false)
11881 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11882 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11883 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11885 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
11886 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
11892 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11894 lshift
= build_int_cst (type
, -1);
11895 lshift
= int_const_binop (code
, lshift
, arg1
, 0);
11897 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
11901 /* Rewrite an LROTATE_EXPR by a constant into an
11902 RROTATE_EXPR by a new constant. */
11903 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
11905 tree tem
= build_int_cst (TREE_TYPE (arg1
),
11906 TYPE_PRECISION (type
));
11907 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
11908 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
11911 /* If we have a rotate of a bit operation with the rotate count and
11912 the second operand of the bit operation both constant,
11913 permute the two operations. */
11914 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11915 && (TREE_CODE (arg0
) == BIT_AND_EXPR
11916 || TREE_CODE (arg0
) == BIT_IOR_EXPR
11917 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11918 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11919 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
11920 fold_build2_loc (loc
, code
, type
,
11921 TREE_OPERAND (arg0
, 0), arg1
),
11922 fold_build2_loc (loc
, code
, type
,
11923 TREE_OPERAND (arg0
, 1), arg1
));
11925 /* Two consecutive rotates adding up to the precision of the
11926 type can be ignored. */
11927 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11928 && TREE_CODE (arg0
) == RROTATE_EXPR
11929 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11930 && TREE_INT_CST_HIGH (arg1
) == 0
11931 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
11932 && ((TREE_INT_CST_LOW (arg1
)
11933 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
11934 == (unsigned int) TYPE_PRECISION (type
)))
11935 return TREE_OPERAND (arg0
, 0);
11937 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11938 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11939 if the latter can be further optimized. */
11940 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
11941 && TREE_CODE (arg0
) == BIT_AND_EXPR
11942 && TREE_CODE (arg1
) == INTEGER_CST
11943 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11945 tree mask
= fold_build2_loc (loc
, code
, type
,
11946 fold_convert_loc (loc
, type
,
11947 TREE_OPERAND (arg0
, 1)),
11949 tree shift
= fold_build2_loc (loc
, code
, type
,
11950 fold_convert_loc (loc
, type
,
11951 TREE_OPERAND (arg0
, 0)),
11953 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
11961 if (operand_equal_p (arg0
, arg1
, 0))
11962 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11963 if (INTEGRAL_TYPE_P (type
)
11964 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
11965 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11966 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
11972 if (operand_equal_p (arg0
, arg1
, 0))
11973 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11974 if (INTEGRAL_TYPE_P (type
)
11975 && TYPE_MAX_VALUE (type
)
11976 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
11977 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11978 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
11983 case TRUTH_ANDIF_EXPR
:
11984 /* Note that the operands of this must be ints
11985 and their values must be 0 or 1.
11986 ("true" is a fixed value perhaps depending on the language.) */
11987 /* If first arg is constant zero, return it. */
11988 if (integer_zerop (arg0
))
11989 return fold_convert_loc (loc
, type
, arg0
);
11990 case TRUTH_AND_EXPR
:
11991 /* If either arg is constant true, drop it. */
11992 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11993 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
11994 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
11995 /* Preserve sequence points. */
11996 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11997 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11998 /* If second arg is constant zero, result is zero, but first arg
11999 must be evaluated. */
12000 if (integer_zerop (arg1
))
12001 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12002 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12003 case will be handled here. */
12004 if (integer_zerop (arg0
))
12005 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12007 /* !X && X is always false. */
12008 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12009 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12010 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12011 /* X && !X is always false. */
12012 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12013 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12014 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12016 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12017 means A >= Y && A != MAX, but in this case we know that
12020 if (!TREE_SIDE_EFFECTS (arg0
)
12021 && !TREE_SIDE_EFFECTS (arg1
))
12023 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12024 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12025 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12027 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12028 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12029 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12033 /* We only do these simplifications if we are optimizing. */
12037 /* Check for things like (A || B) && (A || C). We can convert this
12038 to A || (B && C). Note that either operator can be any of the four
12039 truth and/or operations and the transformation will still be
12040 valid. Also note that we only care about order for the
12041 ANDIF and ORIF operators. If B contains side effects, this
12042 might change the truth-value of A. */
12043 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
12044 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
12045 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
12046 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
12047 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
12048 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
12050 tree a00
= TREE_OPERAND (arg0
, 0);
12051 tree a01
= TREE_OPERAND (arg0
, 1);
12052 tree a10
= TREE_OPERAND (arg1
, 0);
12053 tree a11
= TREE_OPERAND (arg1
, 1);
12054 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
12055 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
12056 && (code
== TRUTH_AND_EXPR
12057 || code
== TRUTH_OR_EXPR
));
12059 if (operand_equal_p (a00
, a10
, 0))
12060 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
12061 fold_build2_loc (loc
, code
, type
, a01
, a11
));
12062 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
12063 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
12064 fold_build2_loc (loc
, code
, type
, a01
, a10
));
12065 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
12066 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
12067 fold_build2_loc (loc
, code
, type
, a00
, a11
));
12069 /* This case if tricky because we must either have commutative
12070 operators or else A10 must not have side-effects. */
12072 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
12073 && operand_equal_p (a01
, a11
, 0))
12074 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12075 fold_build2_loc (loc
, code
, type
, a00
, a10
),
12079 /* See if we can build a range comparison. */
12080 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
12083 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
12084 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
12086 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
12088 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12091 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
12092 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
12094 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
12096 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12099 /* Check for the possibility of merging component references. If our
12100 lhs is another similar operation, try to merge its rhs with our
12101 rhs. Then try to merge our lhs and rhs. */
12102 if (TREE_CODE (arg0
) == code
12103 && 0 != (tem
= fold_truthop (loc
, code
, type
,
12104 TREE_OPERAND (arg0
, 1), arg1
)))
12105 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12107 if ((tem
= fold_truthop (loc
, code
, type
, arg0
, arg1
)) != 0)
12112 case TRUTH_ORIF_EXPR
:
12113 /* Note that the operands of this must be ints
12114 and their values must be 0 or true.
12115 ("true" is a fixed value perhaps depending on the language.) */
12116 /* If first arg is constant true, return it. */
12117 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12118 return fold_convert_loc (loc
, type
, arg0
);
12119 case TRUTH_OR_EXPR
:
12120 /* If either arg is constant zero, drop it. */
12121 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12122 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12123 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12124 /* Preserve sequence points. */
12125 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12126 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12127 /* If second arg is constant true, result is true, but we must
12128 evaluate first arg. */
12129 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12130 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12131 /* Likewise for first arg, but note this only occurs here for
12133 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12134 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12136 /* !X || X is always true. */
12137 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12138 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12139 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12140 /* X || !X is always true. */
12141 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12142 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12143 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12147 case TRUTH_XOR_EXPR
:
12148 /* If the second arg is constant zero, drop it. */
12149 if (integer_zerop (arg1
))
12150 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12151 /* If the second arg is constant true, this is a logical inversion. */
12152 if (integer_onep (arg1
))
12154 /* Only call invert_truthvalue if operand is a truth value. */
12155 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
12156 tem
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
12158 tem
= invert_truthvalue_loc (loc
, arg0
);
12159 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12161 /* Identical arguments cancel to zero. */
12162 if (operand_equal_p (arg0
, arg1
, 0))
12163 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12165 /* !X ^ X is always true. */
12166 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12167 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12168 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12170 /* X ^ !X is always true. */
12171 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12172 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12173 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12179 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12180 if (tem
!= NULL_TREE
)
12183 /* bool_var != 0 becomes bool_var. */
12184 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12185 && code
== NE_EXPR
)
12186 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12188 /* bool_var == 1 becomes bool_var. */
12189 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12190 && code
== EQ_EXPR
)
12191 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12193 /* bool_var != 1 becomes !bool_var. */
12194 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12195 && code
== NE_EXPR
)
12196 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
12197 fold_convert_loc (loc
, type
, arg0
));
12199 /* bool_var == 0 becomes !bool_var. */
12200 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12201 && code
== EQ_EXPR
)
12202 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
12203 fold_convert_loc (loc
, type
, arg0
));
12205 /* !exp != 0 becomes !exp */
12206 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12207 && code
== NE_EXPR
)
12208 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12210 /* If this is an equality comparison of the address of two non-weak,
12211 unaliased symbols neither of which are extern (since we do not
12212 have access to attributes for externs), then we know the result. */
12213 if (TREE_CODE (arg0
) == ADDR_EXPR
12214 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12215 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12216 && ! lookup_attribute ("alias",
12217 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12218 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12219 && TREE_CODE (arg1
) == ADDR_EXPR
12220 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12221 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12222 && ! lookup_attribute ("alias",
12223 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12224 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12226 /* We know that we're looking at the address of two
12227 non-weak, unaliased, static _DECL nodes.
12229 It is both wasteful and incorrect to call operand_equal_p
12230 to compare the two ADDR_EXPR nodes. It is wasteful in that
12231 all we need to do is test pointer equality for the arguments
12232 to the two ADDR_EXPR nodes. It is incorrect to use
12233 operand_equal_p as that function is NOT equivalent to a
12234 C equality test. It can in fact return false for two
12235 objects which would test as equal using the C equality
12237 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12238 return constant_boolean_node (equal
12239 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12243 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12244 a MINUS_EXPR of a constant, we can convert it into a comparison with
12245 a revised constant as long as no overflow occurs. */
12246 if (TREE_CODE (arg1
) == INTEGER_CST
12247 && (TREE_CODE (arg0
) == PLUS_EXPR
12248 || TREE_CODE (arg0
) == MINUS_EXPR
)
12249 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12250 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
12251 ? MINUS_EXPR
: PLUS_EXPR
,
12252 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12254 TREE_OPERAND (arg0
, 1)))
12255 && !TREE_OVERFLOW (tem
))
12256 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12258 /* Similarly for a NEGATE_EXPR. */
12259 if (TREE_CODE (arg0
) == NEGATE_EXPR
12260 && TREE_CODE (arg1
) == INTEGER_CST
12261 && 0 != (tem
= negate_expr (arg1
))
12262 && TREE_CODE (tem
) == INTEGER_CST
12263 && !TREE_OVERFLOW (tem
))
12264 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12266 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12267 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12268 && TREE_CODE (arg1
) == INTEGER_CST
12269 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12270 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12271 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12272 fold_convert_loc (loc
,
12275 TREE_OPERAND (arg0
, 1)));
12277 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12278 if ((TREE_CODE (arg0
) == PLUS_EXPR
12279 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12280 || TREE_CODE (arg0
) == MINUS_EXPR
)
12281 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12282 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12283 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12285 tree val
= TREE_OPERAND (arg0
, 1);
12286 return omit_two_operands_loc (loc
, type
,
12287 fold_build2_loc (loc
, code
, type
,
12289 build_int_cst (TREE_TYPE (val
),
12291 TREE_OPERAND (arg0
, 0), arg1
);
12294 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12295 if (TREE_CODE (arg0
) == MINUS_EXPR
12296 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12297 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0)
12298 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
12300 return omit_two_operands_loc (loc
, type
,
12302 ? boolean_true_node
: boolean_false_node
,
12303 TREE_OPERAND (arg0
, 1), arg1
);
12306 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12307 for !=. Don't do this for ordered comparisons due to overflow. */
12308 if (TREE_CODE (arg0
) == MINUS_EXPR
12309 && integer_zerop (arg1
))
12310 return fold_build2_loc (loc
, code
, type
,
12311 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
12313 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12314 if (TREE_CODE (arg0
) == ABS_EXPR
12315 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12316 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12318 /* If this is an EQ or NE comparison with zero and ARG0 is
12319 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12320 two operations, but the latter can be done in one less insn
12321 on machines that have only two-operand insns or on which a
12322 constant cannot be the first operand. */
12323 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12324 && integer_zerop (arg1
))
12326 tree arg00
= TREE_OPERAND (arg0
, 0);
12327 tree arg01
= TREE_OPERAND (arg0
, 1);
12328 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12329 && integer_onep (TREE_OPERAND (arg00
, 0)))
12331 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12332 arg01
, TREE_OPERAND (arg00
, 1));
12333 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12334 build_int_cst (TREE_TYPE (arg0
), 1));
12335 return fold_build2_loc (loc
, code
, type
,
12336 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12339 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12340 && integer_onep (TREE_OPERAND (arg01
, 0)))
12342 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12343 arg00
, TREE_OPERAND (arg01
, 1));
12344 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12345 build_int_cst (TREE_TYPE (arg0
), 1));
12346 return fold_build2_loc (loc
, code
, type
,
12347 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12352 /* If this is an NE or EQ comparison of zero against the result of a
12353 signed MOD operation whose second operand is a power of 2, make
12354 the MOD operation unsigned since it is simpler and equivalent. */
12355 if (integer_zerop (arg1
)
12356 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12357 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12358 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12359 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12360 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12361 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12363 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12364 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12365 fold_convert_loc (loc
, newtype
,
12366 TREE_OPERAND (arg0
, 0)),
12367 fold_convert_loc (loc
, newtype
,
12368 TREE_OPERAND (arg0
, 1)));
12370 return fold_build2_loc (loc
, code
, type
, newmod
,
12371 fold_convert_loc (loc
, newtype
, arg1
));
12374 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12375 C1 is a valid shift constant, and C2 is a power of two, i.e.
12377 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12378 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12379 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12381 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12382 && integer_zerop (arg1
))
12384 tree itype
= TREE_TYPE (arg0
);
12385 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
12386 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12388 /* Check for a valid shift count. */
12389 if (TREE_INT_CST_HIGH (arg001
) == 0
12390 && TREE_INT_CST_LOW (arg001
) < prec
)
12392 tree arg01
= TREE_OPERAND (arg0
, 1);
12393 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12394 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12395 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12396 can be rewritten as (X & (C2 << C1)) != 0. */
12397 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12399 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12400 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12401 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12403 /* Otherwise, for signed (arithmetic) shifts,
12404 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12405 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12406 else if (!TYPE_UNSIGNED (itype
))
12407 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12408 arg000
, build_int_cst (itype
, 0));
12409 /* Otherwise, of unsigned (logical) shifts,
12410 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12411 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12413 return omit_one_operand_loc (loc
, type
,
12414 code
== EQ_EXPR
? integer_one_node
12415 : integer_zero_node
,
12420 /* If this is an NE comparison of zero with an AND of one, remove the
12421 comparison since the AND will give the correct value. */
12422 if (code
== NE_EXPR
12423 && integer_zerop (arg1
)
12424 && TREE_CODE (arg0
) == BIT_AND_EXPR
12425 && integer_onep (TREE_OPERAND (arg0
, 1)))
12426 return fold_convert_loc (loc
, type
, arg0
);
12428 /* If we have (A & C) == C where C is a power of 2, convert this into
12429 (A & C) != 0. Similarly for NE_EXPR. */
12430 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12431 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12432 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12433 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12434 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12435 integer_zero_node
));
12437 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12438 bit, then fold the expression into A < 0 or A >= 0. */
12439 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12443 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12444 Similarly for NE_EXPR. */
12445 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12446 && TREE_CODE (arg1
) == INTEGER_CST
12447 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12449 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12450 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12451 TREE_OPERAND (arg0
, 1));
12452 tree dandnotc
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12454 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12455 if (integer_nonzerop (dandnotc
))
12456 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12459 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12460 Similarly for NE_EXPR. */
12461 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12462 && TREE_CODE (arg1
) == INTEGER_CST
12463 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12465 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12466 tree candnotd
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12467 TREE_OPERAND (arg0
, 1), notd
);
12468 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12469 if (integer_nonzerop (candnotd
))
12470 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12473 /* If this is a comparison of a field, we may be able to simplify it. */
12474 if ((TREE_CODE (arg0
) == COMPONENT_REF
12475 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12476 /* Handle the constant case even without -O
12477 to make sure the warnings are given. */
12478 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12480 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12485 /* Optimize comparisons of strlen vs zero to a compare of the
12486 first character of the string vs zero. To wit,
12487 strlen(ptr) == 0 => *ptr == 0
12488 strlen(ptr) != 0 => *ptr != 0
12489 Other cases should reduce to one of these two (or a constant)
12490 due to the return value of strlen being unsigned. */
12491 if (TREE_CODE (arg0
) == CALL_EXPR
12492 && integer_zerop (arg1
))
12494 tree fndecl
= get_callee_fndecl (arg0
);
12497 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12498 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12499 && call_expr_nargs (arg0
) == 1
12500 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12502 tree iref
= build_fold_indirect_ref_loc (loc
,
12503 CALL_EXPR_ARG (arg0
, 0));
12504 return fold_build2_loc (loc
, code
, type
, iref
,
12505 build_int_cst (TREE_TYPE (iref
), 0));
12509 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12510 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12511 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12512 && integer_zerop (arg1
)
12513 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12515 tree arg00
= TREE_OPERAND (arg0
, 0);
12516 tree arg01
= TREE_OPERAND (arg0
, 1);
12517 tree itype
= TREE_TYPE (arg00
);
12518 if (TREE_INT_CST_HIGH (arg01
) == 0
12519 && TREE_INT_CST_LOW (arg01
)
12520 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
12522 if (TYPE_UNSIGNED (itype
))
12524 itype
= signed_type_for (itype
);
12525 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12527 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12528 type
, arg00
, build_int_cst (itype
, 0));
12532 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12533 if (integer_zerop (arg1
)
12534 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12535 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12536 TREE_OPERAND (arg0
, 1));
12538 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12539 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12540 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12541 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12542 build_int_cst (TREE_TYPE (arg1
), 0));
12543 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12544 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12545 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12546 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12547 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12548 build_int_cst (TREE_TYPE (arg1
), 0));
12550 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12551 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12552 && TREE_CODE (arg1
) == INTEGER_CST
12553 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12554 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12555 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12556 TREE_OPERAND (arg0
, 1), arg1
));
12558 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12559 (X & C) == 0 when C is a single bit. */
12560 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12561 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12562 && integer_zerop (arg1
)
12563 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12565 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12566 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12567 TREE_OPERAND (arg0
, 1));
12568 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12572 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12573 constant C is a power of two, i.e. a single bit. */
12574 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12575 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12576 && integer_zerop (arg1
)
12577 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12578 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12579 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12581 tree arg00
= TREE_OPERAND (arg0
, 0);
12582 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12583 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12586 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12587 when is C is a power of two, i.e. a single bit. */
12588 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12589 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12590 && integer_zerop (arg1
)
12591 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12592 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12593 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12595 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12596 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
12597 arg000
, TREE_OPERAND (arg0
, 1));
12598 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12599 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12602 if (integer_zerop (arg1
)
12603 && tree_expr_nonzero_p (arg0
))
12605 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12606 return omit_one_operand_loc (loc
, type
, res
, arg0
);
12609 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12610 if (TREE_CODE (arg0
) == NEGATE_EXPR
12611 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12612 return fold_build2_loc (loc
, code
, type
,
12613 TREE_OPERAND (arg0
, 0),
12614 TREE_OPERAND (arg1
, 0));
12616 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12617 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12618 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12620 tree arg00
= TREE_OPERAND (arg0
, 0);
12621 tree arg01
= TREE_OPERAND (arg0
, 1);
12622 tree arg10
= TREE_OPERAND (arg1
, 0);
12623 tree arg11
= TREE_OPERAND (arg1
, 1);
12624 tree itype
= TREE_TYPE (arg0
);
12626 if (operand_equal_p (arg01
, arg11
, 0))
12627 return fold_build2_loc (loc
, code
, type
,
12628 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12629 fold_build2_loc (loc
,
12630 BIT_XOR_EXPR
, itype
,
12633 build_int_cst (itype
, 0));
12635 if (operand_equal_p (arg01
, arg10
, 0))
12636 return fold_build2_loc (loc
, code
, type
,
12637 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12638 fold_build2_loc (loc
,
12639 BIT_XOR_EXPR
, itype
,
12642 build_int_cst (itype
, 0));
12644 if (operand_equal_p (arg00
, arg11
, 0))
12645 return fold_build2_loc (loc
, code
, type
,
12646 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12647 fold_build2_loc (loc
,
12648 BIT_XOR_EXPR
, itype
,
12651 build_int_cst (itype
, 0));
12653 if (operand_equal_p (arg00
, arg10
, 0))
12654 return fold_build2_loc (loc
, code
, type
,
12655 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12656 fold_build2_loc (loc
,
12657 BIT_XOR_EXPR
, itype
,
12660 build_int_cst (itype
, 0));
12663 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12664 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12666 tree arg00
= TREE_OPERAND (arg0
, 0);
12667 tree arg01
= TREE_OPERAND (arg0
, 1);
12668 tree arg10
= TREE_OPERAND (arg1
, 0);
12669 tree arg11
= TREE_OPERAND (arg1
, 1);
12670 tree itype
= TREE_TYPE (arg0
);
12672 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12673 operand_equal_p guarantees no side-effects so we don't need
12674 to use omit_one_operand on Z. */
12675 if (operand_equal_p (arg01
, arg11
, 0))
12676 return fold_build2_loc (loc
, code
, type
, arg00
, arg10
);
12677 if (operand_equal_p (arg01
, arg10
, 0))
12678 return fold_build2_loc (loc
, code
, type
, arg00
, arg11
);
12679 if (operand_equal_p (arg00
, arg11
, 0))
12680 return fold_build2_loc (loc
, code
, type
, arg01
, arg10
);
12681 if (operand_equal_p (arg00
, arg10
, 0))
12682 return fold_build2_loc (loc
, code
, type
, arg01
, arg11
);
12684 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12685 if (TREE_CODE (arg01
) == INTEGER_CST
12686 && TREE_CODE (arg11
) == INTEGER_CST
)
12687 return fold_build2_loc (loc
, code
, type
,
12688 fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
,
12689 fold_build2_loc (loc
,
12690 BIT_XOR_EXPR
, itype
,
12695 /* Attempt to simplify equality/inequality comparisons of complex
12696 values. Only lower the comparison if the result is known or
12697 can be simplified to a single scalar comparison. */
12698 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12699 || TREE_CODE (arg0
) == COMPLEX_CST
)
12700 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12701 || TREE_CODE (arg1
) == COMPLEX_CST
))
12703 tree real0
, imag0
, real1
, imag1
;
12706 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12708 real0
= TREE_OPERAND (arg0
, 0);
12709 imag0
= TREE_OPERAND (arg0
, 1);
12713 real0
= TREE_REALPART (arg0
);
12714 imag0
= TREE_IMAGPART (arg0
);
12717 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12719 real1
= TREE_OPERAND (arg1
, 0);
12720 imag1
= TREE_OPERAND (arg1
, 1);
12724 real1
= TREE_REALPART (arg1
);
12725 imag1
= TREE_IMAGPART (arg1
);
12728 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
12729 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12731 if (integer_zerop (rcond
))
12733 if (code
== EQ_EXPR
)
12734 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12736 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
12740 if (code
== NE_EXPR
)
12741 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12743 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
12747 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
12748 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12750 if (integer_zerop (icond
))
12752 if (code
== EQ_EXPR
)
12753 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12755 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
12759 if (code
== NE_EXPR
)
12760 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12762 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
12773 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12774 if (tem
!= NULL_TREE
)
12777 /* Transform comparisons of the form X +- C CMP X. */
12778 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12779 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12780 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12781 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
12782 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12783 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
12785 tree arg01
= TREE_OPERAND (arg0
, 1);
12786 enum tree_code code0
= TREE_CODE (arg0
);
12789 if (TREE_CODE (arg01
) == REAL_CST
)
12790 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12792 is_positive
= tree_int_cst_sgn (arg01
);
12794 /* (X - c) > X becomes false. */
12795 if (code
== GT_EXPR
12796 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12797 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12799 if (TREE_CODE (arg01
) == INTEGER_CST
12800 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12801 fold_overflow_warning (("assuming signed overflow does not "
12802 "occur when assuming that (X - c) > X "
12803 "is always false"),
12804 WARN_STRICT_OVERFLOW_ALL
);
12805 return constant_boolean_node (0, type
);
12808 /* Likewise (X + c) < X becomes false. */
12809 if (code
== LT_EXPR
12810 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12811 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12813 if (TREE_CODE (arg01
) == INTEGER_CST
12814 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12815 fold_overflow_warning (("assuming signed overflow does not "
12816 "occur when assuming that "
12817 "(X + c) < X is always false"),
12818 WARN_STRICT_OVERFLOW_ALL
);
12819 return constant_boolean_node (0, type
);
12822 /* Convert (X - c) <= X to true. */
12823 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12825 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12826 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12828 if (TREE_CODE (arg01
) == INTEGER_CST
12829 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12830 fold_overflow_warning (("assuming signed overflow does not "
12831 "occur when assuming that "
12832 "(X - c) <= X is always true"),
12833 WARN_STRICT_OVERFLOW_ALL
);
12834 return constant_boolean_node (1, type
);
12837 /* Convert (X + c) >= X to true. */
12838 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12840 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12841 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12843 if (TREE_CODE (arg01
) == INTEGER_CST
12844 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12845 fold_overflow_warning (("assuming signed overflow does not "
12846 "occur when assuming that "
12847 "(X + c) >= X is always true"),
12848 WARN_STRICT_OVERFLOW_ALL
);
12849 return constant_boolean_node (1, type
);
12852 if (TREE_CODE (arg01
) == INTEGER_CST
)
12854 /* Convert X + c > X and X - c < X to true for integers. */
12855 if (code
== GT_EXPR
12856 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12857 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12859 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12860 fold_overflow_warning (("assuming signed overflow does "
12861 "not occur when assuming that "
12862 "(X + c) > X is always true"),
12863 WARN_STRICT_OVERFLOW_ALL
);
12864 return constant_boolean_node (1, type
);
12867 if (code
== LT_EXPR
12868 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12869 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12871 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12872 fold_overflow_warning (("assuming signed overflow does "
12873 "not occur when assuming that "
12874 "(X - c) < X is always true"),
12875 WARN_STRICT_OVERFLOW_ALL
);
12876 return constant_boolean_node (1, type
);
12879 /* Convert X + c <= X and X - c >= X to false for integers. */
12880 if (code
== LE_EXPR
12881 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12882 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12884 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12885 fold_overflow_warning (("assuming signed overflow does "
12886 "not occur when assuming that "
12887 "(X + c) <= X is always false"),
12888 WARN_STRICT_OVERFLOW_ALL
);
12889 return constant_boolean_node (0, type
);
12892 if (code
== GE_EXPR
12893 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12894 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12896 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12897 fold_overflow_warning (("assuming signed overflow does "
12898 "not occur when assuming that "
12899 "(X - c) >= X is always false"),
12900 WARN_STRICT_OVERFLOW_ALL
);
12901 return constant_boolean_node (0, type
);
12906 /* Comparisons with the highest or lowest possible integer of
12907 the specified precision will have known values. */
12909 tree arg1_type
= TREE_TYPE (arg1
);
12910 unsigned int width
= TYPE_PRECISION (arg1_type
);
12912 if (TREE_CODE (arg1
) == INTEGER_CST
12913 && width
<= 2 * HOST_BITS_PER_WIDE_INT
12914 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
12916 HOST_WIDE_INT signed_max_hi
;
12917 unsigned HOST_WIDE_INT signed_max_lo
;
12918 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
12920 if (width
<= HOST_BITS_PER_WIDE_INT
)
12922 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
12927 if (TYPE_UNSIGNED (arg1_type
))
12929 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
12935 max_lo
= signed_max_lo
;
12936 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
12942 width
-= HOST_BITS_PER_WIDE_INT
;
12943 signed_max_lo
= -1;
12944 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
12949 if (TYPE_UNSIGNED (arg1_type
))
12951 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
12956 max_hi
= signed_max_hi
;
12957 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
12961 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
12962 && TREE_INT_CST_LOW (arg1
) == max_lo
)
12966 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12969 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
12972 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12975 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
12977 /* The GE_EXPR and LT_EXPR cases above are not normally
12978 reached because of previous transformations. */
12983 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12985 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
12989 arg1
= const_binop (PLUS_EXPR
, arg1
,
12990 build_int_cst (TREE_TYPE (arg1
), 1));
12991 return fold_build2_loc (loc
, EQ_EXPR
, type
,
12992 fold_convert_loc (loc
,
12993 TREE_TYPE (arg1
), arg0
),
12996 arg1
= const_binop (PLUS_EXPR
, arg1
,
12997 build_int_cst (TREE_TYPE (arg1
), 1));
12998 return fold_build2_loc (loc
, NE_EXPR
, type
,
12999 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13005 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13007 && TREE_INT_CST_LOW (arg1
) == min_lo
)
13011 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13014 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13017 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13020 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13025 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13027 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
13031 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13032 return fold_build2_loc (loc
, NE_EXPR
, type
,
13033 fold_convert_loc (loc
,
13034 TREE_TYPE (arg1
), arg0
),
13037 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13038 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13039 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13046 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
13047 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
13048 && TYPE_UNSIGNED (arg1_type
)
13049 /* We will flip the signedness of the comparison operator
13050 associated with the mode of arg1, so the sign bit is
13051 specified by this mode. Check that arg1 is the signed
13052 max associated with this sign bit. */
13053 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
13054 /* signed_type does not work on pointer types. */
13055 && INTEGRAL_TYPE_P (arg1_type
))
13057 /* The following case also applies to X < signed_max+1
13058 and X >= signed_max+1 because previous transformations. */
13059 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13062 st
= signed_type_for (TREE_TYPE (arg1
));
13063 return fold_build2_loc (loc
,
13064 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13065 type
, fold_convert_loc (loc
, st
, arg0
),
13066 build_int_cst (st
, 0));
13072 /* If we are comparing an ABS_EXPR with a constant, we can
13073 convert all the cases into explicit comparisons, but they may
13074 well not be faster than doing the ABS and one comparison.
13075 But ABS (X) <= C is a range comparison, which becomes a subtraction
13076 and a comparison, and is probably faster. */
13077 if (code
== LE_EXPR
13078 && TREE_CODE (arg1
) == INTEGER_CST
13079 && TREE_CODE (arg0
) == ABS_EXPR
13080 && ! TREE_SIDE_EFFECTS (arg0
)
13081 && (0 != (tem
= negate_expr (arg1
)))
13082 && TREE_CODE (tem
) == INTEGER_CST
13083 && !TREE_OVERFLOW (tem
))
13084 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13085 build2 (GE_EXPR
, type
,
13086 TREE_OPERAND (arg0
, 0), tem
),
13087 build2 (LE_EXPR
, type
,
13088 TREE_OPERAND (arg0
, 0), arg1
));
13090 /* Convert ABS_EXPR<x> >= 0 to true. */
13091 strict_overflow_p
= false;
13092 if (code
== GE_EXPR
13093 && (integer_zerop (arg1
)
13094 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13095 && real_zerop (arg1
)))
13096 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13098 if (strict_overflow_p
)
13099 fold_overflow_warning (("assuming signed overflow does not occur "
13100 "when simplifying comparison of "
13101 "absolute value and zero"),
13102 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13103 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13106 /* Convert ABS_EXPR<x> < 0 to false. */
13107 strict_overflow_p
= false;
13108 if (code
== LT_EXPR
13109 && (integer_zerop (arg1
) || real_zerop (arg1
))
13110 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13112 if (strict_overflow_p
)
13113 fold_overflow_warning (("assuming signed overflow does not occur "
13114 "when simplifying comparison of "
13115 "absolute value and zero"),
13116 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13117 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13120 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13121 and similarly for >= into !=. */
13122 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13123 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13124 && TREE_CODE (arg1
) == LSHIFT_EXPR
13125 && integer_onep (TREE_OPERAND (arg1
, 0)))
13127 tem
= build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13128 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13129 TREE_OPERAND (arg1
, 1)),
13130 build_int_cst (TREE_TYPE (arg0
), 0));
13131 goto fold_binary_exit
;
13134 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13135 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13136 && CONVERT_EXPR_P (arg1
)
13137 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13138 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13140 tem
= build2 (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13141 fold_convert_loc (loc
, TREE_TYPE (arg0
),
13142 build2 (RSHIFT_EXPR
,
13143 TREE_TYPE (arg0
), arg0
,
13144 TREE_OPERAND (TREE_OPERAND (arg1
, 0),
13146 build_int_cst (TREE_TYPE (arg0
), 0));
13147 goto fold_binary_exit
;
13152 case UNORDERED_EXPR
:
13160 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13162 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13163 if (t1
!= NULL_TREE
)
13167 /* If the first operand is NaN, the result is constant. */
13168 if (TREE_CODE (arg0
) == REAL_CST
13169 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13170 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13172 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13173 ? integer_zero_node
13174 : integer_one_node
;
13175 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13178 /* If the second operand is NaN, the result is constant. */
13179 if (TREE_CODE (arg1
) == REAL_CST
13180 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13181 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13183 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13184 ? integer_zero_node
13185 : integer_one_node
;
13186 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13189 /* Simplify unordered comparison of something with itself. */
13190 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13191 && operand_equal_p (arg0
, arg1
, 0))
13192 return constant_boolean_node (1, type
);
13194 if (code
== LTGT_EXPR
13195 && !flag_trapping_math
13196 && operand_equal_p (arg0
, arg1
, 0))
13197 return constant_boolean_node (0, type
);
13199 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13201 tree targ0
= strip_float_extensions (arg0
);
13202 tree targ1
= strip_float_extensions (arg1
);
13203 tree newtype
= TREE_TYPE (targ0
);
13205 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13206 newtype
= TREE_TYPE (targ1
);
13208 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13209 return fold_build2_loc (loc
, code
, type
,
13210 fold_convert_loc (loc
, newtype
, targ0
),
13211 fold_convert_loc (loc
, newtype
, targ1
));
13216 case COMPOUND_EXPR
:
13217 /* When pedantic, a compound expression can be neither an lvalue
13218 nor an integer constant expression. */
13219 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13221 /* Don't let (0, 0) be null pointer constant. */
13222 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13223 : fold_convert_loc (loc
, type
, arg1
);
13224 return pedantic_non_lvalue_loc (loc
, tem
);
13227 if ((TREE_CODE (arg0
) == REAL_CST
13228 && TREE_CODE (arg1
) == REAL_CST
)
13229 || (TREE_CODE (arg0
) == INTEGER_CST
13230 && TREE_CODE (arg1
) == INTEGER_CST
))
13231 return build_complex (type
, arg0
, arg1
);
13235 /* An ASSERT_EXPR should never be passed to fold_binary. */
13236 gcc_unreachable ();
13240 } /* switch (code) */
13242 protected_set_expr_location (tem
, loc
);
13246 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13247 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13251 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13253 switch (TREE_CODE (*tp
))
13259 *walk_subtrees
= 0;
13261 /* ... fall through ... */
13268 /* Return whether the sub-tree ST contains a label which is accessible from
13269 outside the sub-tree. */
13272 contains_label_p (tree st
)
13275 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13278 /* Fold a ternary expression of code CODE and type TYPE with operands
13279 OP0, OP1, and OP2. Return the folded expression if folding is
13280 successful. Otherwise, return NULL_TREE. */
13283 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13284 tree op0
, tree op1
, tree op2
)
13287 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13288 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13290 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13291 && TREE_CODE_LENGTH (code
) == 3);
13293 /* Strip any conversions that don't change the mode. This is safe
13294 for every expression, except for a comparison expression because
13295 its signedness is derived from its operands. So, in the latter
13296 case, only strip conversions that don't change the signedness.
13298 Note that this is done as an internal manipulation within the
13299 constant folder, in order to find the simplest representation of
13300 the arguments so that their form can be studied. In any cases,
13301 the appropriate type conversions should be put back in the tree
13302 that will get out of the constant folder. */
13323 case COMPONENT_REF
:
13324 if (TREE_CODE (arg0
) == CONSTRUCTOR
13325 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13327 unsigned HOST_WIDE_INT idx
;
13329 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13336 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13337 so all simple results must be passed through pedantic_non_lvalue. */
13338 if (TREE_CODE (arg0
) == INTEGER_CST
)
13340 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13341 tem
= integer_zerop (arg0
) ? op2
: op1
;
13342 /* Only optimize constant conditions when the selected branch
13343 has the same type as the COND_EXPR. This avoids optimizing
13344 away "c ? x : throw", where the throw has a void type.
13345 Avoid throwing away that operand which contains label. */
13346 if ((!TREE_SIDE_EFFECTS (unused_op
)
13347 || !contains_label_p (unused_op
))
13348 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13349 || VOID_TYPE_P (type
)))
13350 return pedantic_non_lvalue_loc (loc
, tem
);
13353 if (operand_equal_p (arg1
, op2
, 0))
13354 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
13356 /* If we have A op B ? A : C, we may be able to convert this to a
13357 simpler expression, depending on the operation and the values
13358 of B and C. Signed zeros prevent all of these transformations,
13359 for reasons given above each one.
13361 Also try swapping the arguments and inverting the conditional. */
13362 if (COMPARISON_CLASS_P (arg0
)
13363 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13364 arg1
, TREE_OPERAND (arg0
, 1))
13365 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
13367 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13372 if (COMPARISON_CLASS_P (arg0
)
13373 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13375 TREE_OPERAND (arg0
, 1))
13376 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
13378 tem
= fold_truth_not_expr (loc
, arg0
);
13379 if (tem
&& COMPARISON_CLASS_P (tem
))
13381 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13387 /* If the second operand is simpler than the third, swap them
13388 since that produces better jump optimization results. */
13389 if (truth_value_p (TREE_CODE (arg0
))
13390 && tree_swap_operands_p (op1
, op2
, false))
13392 /* See if this can be inverted. If it can't, possibly because
13393 it was a floating-point inequality comparison, don't do
13395 tem
= fold_truth_not_expr (loc
, arg0
);
13397 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13400 /* Convert A ? 1 : 0 to simply A. */
13401 if (integer_onep (op1
)
13402 && integer_zerop (op2
)
13403 /* If we try to convert OP0 to our type, the
13404 call to fold will try to move the conversion inside
13405 a COND, which will recurse. In that case, the COND_EXPR
13406 is probably the best choice, so leave it alone. */
13407 && type
== TREE_TYPE (arg0
))
13408 return pedantic_non_lvalue_loc (loc
, arg0
);
13410 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13411 over COND_EXPR in cases such as floating point comparisons. */
13412 if (integer_zerop (op1
)
13413 && integer_onep (op2
)
13414 && truth_value_p (TREE_CODE (arg0
)))
13415 return pedantic_non_lvalue_loc (loc
,
13416 fold_convert_loc (loc
, type
,
13417 invert_truthvalue_loc (loc
,
13420 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13421 if (TREE_CODE (arg0
) == LT_EXPR
13422 && integer_zerop (TREE_OPERAND (arg0
, 1))
13423 && integer_zerop (op2
)
13424 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13426 /* sign_bit_p only checks ARG1 bits within A's precision.
13427 If <sign bit of A> has wider type than A, bits outside
13428 of A's precision in <sign bit of A> need to be checked.
13429 If they are all 0, this optimization needs to be done
13430 in unsigned A's type, if they are all 1 in signed A's type,
13431 otherwise this can't be done. */
13432 if (TYPE_PRECISION (TREE_TYPE (tem
))
13433 < TYPE_PRECISION (TREE_TYPE (arg1
))
13434 && TYPE_PRECISION (TREE_TYPE (tem
))
13435 < TYPE_PRECISION (type
))
13437 unsigned HOST_WIDE_INT mask_lo
;
13438 HOST_WIDE_INT mask_hi
;
13439 int inner_width
, outer_width
;
13442 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13443 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13444 if (outer_width
> TYPE_PRECISION (type
))
13445 outer_width
= TYPE_PRECISION (type
);
13447 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
13449 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
13450 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
13456 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
13457 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
13459 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
13461 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
13462 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13466 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
13467 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13469 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
13470 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
13472 tem_type
= signed_type_for (TREE_TYPE (tem
));
13473 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13475 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
13476 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
13478 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13479 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13487 fold_convert_loc (loc
, type
,
13488 fold_build2_loc (loc
, BIT_AND_EXPR
,
13489 TREE_TYPE (tem
), tem
,
13490 fold_convert_loc (loc
,
13495 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13496 already handled above. */
13497 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13498 && integer_onep (TREE_OPERAND (arg0
, 1))
13499 && integer_zerop (op2
)
13500 && integer_pow2p (arg1
))
13502 tree tem
= TREE_OPERAND (arg0
, 0);
13504 if (TREE_CODE (tem
) == RSHIFT_EXPR
13505 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
13506 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13507 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
13508 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13509 TREE_OPERAND (tem
, 0), arg1
);
13512 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13513 is probably obsolete because the first operand should be a
13514 truth value (that's why we have the two cases above), but let's
13515 leave it in until we can confirm this for all front-ends. */
13516 if (integer_zerop (op2
)
13517 && TREE_CODE (arg0
) == NE_EXPR
13518 && integer_zerop (TREE_OPERAND (arg0
, 1))
13519 && integer_pow2p (arg1
)
13520 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13521 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13522 arg1
, OEP_ONLY_CONST
))
13523 return pedantic_non_lvalue_loc (loc
,
13524 fold_convert_loc (loc
, type
,
13525 TREE_OPERAND (arg0
, 0)));
13527 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13528 if (integer_zerop (op2
)
13529 && truth_value_p (TREE_CODE (arg0
))
13530 && truth_value_p (TREE_CODE (arg1
)))
13531 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13532 fold_convert_loc (loc
, type
, arg0
),
13535 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13536 if (integer_onep (op2
)
13537 && truth_value_p (TREE_CODE (arg0
))
13538 && truth_value_p (TREE_CODE (arg1
)))
13540 /* Only perform transformation if ARG0 is easily inverted. */
13541 tem
= fold_truth_not_expr (loc
, arg0
);
13543 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
13544 fold_convert_loc (loc
, type
, tem
),
13548 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13549 if (integer_zerop (arg1
)
13550 && truth_value_p (TREE_CODE (arg0
))
13551 && truth_value_p (TREE_CODE (op2
)))
13553 /* Only perform transformation if ARG0 is easily inverted. */
13554 tem
= fold_truth_not_expr (loc
, arg0
);
13556 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13557 fold_convert_loc (loc
, type
, tem
),
13561 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13562 if (integer_onep (arg1
)
13563 && truth_value_p (TREE_CODE (arg0
))
13564 && truth_value_p (TREE_CODE (op2
)))
13565 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
13566 fold_convert_loc (loc
, type
, arg0
),
13572 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13573 of fold_ternary on them. */
13574 gcc_unreachable ();
13576 case BIT_FIELD_REF
:
13577 if ((TREE_CODE (arg0
) == VECTOR_CST
13578 || (TREE_CODE (arg0
) == CONSTRUCTOR
&& TREE_CONSTANT (arg0
)))
13579 && type
== TREE_TYPE (TREE_TYPE (arg0
)))
13581 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
13582 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
13585 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
13586 && (idx
% width
) == 0
13587 && (idx
= idx
/ width
)
13588 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
13590 tree elements
= NULL_TREE
;
13592 if (TREE_CODE (arg0
) == VECTOR_CST
)
13593 elements
= TREE_VECTOR_CST_ELTS (arg0
);
13596 unsigned HOST_WIDE_INT idx
;
13599 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0
), idx
, value
)
13600 elements
= tree_cons (NULL_TREE
, value
, elements
);
13602 while (idx
-- > 0 && elements
)
13603 elements
= TREE_CHAIN (elements
);
13605 return TREE_VALUE (elements
);
13607 return build_zero_cst (type
);
13611 /* A bit-field-ref that referenced the full argument can be stripped. */
13612 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
13613 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_low_cst (arg1
, 1)
13614 && integer_zerop (op2
))
13615 return fold_convert_loc (loc
, type
, arg0
);
13620 /* For integers we can decompose the FMA if possible. */
13621 if (TREE_CODE (arg0
) == INTEGER_CST
13622 && TREE_CODE (arg1
) == INTEGER_CST
)
13623 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
13624 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
13625 if (integer_zerop (arg2
))
13626 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
13628 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
13632 } /* switch (code) */
13635 /* Perform constant folding and related simplification of EXPR.
13636 The related simplifications include x*1 => x, x*0 => 0, etc.,
13637 and application of the associative law.
13638 NOP_EXPR conversions may be removed freely (as long as we
13639 are careful not to change the type of the overall expression).
13640 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13641 but we can constant-fold them if they have constant operands. */
13643 #ifdef ENABLE_FOLD_CHECKING
13644 # define fold(x) fold_1 (x)
13645 static tree
fold_1 (tree
);
13651 const tree t
= expr
;
13652 enum tree_code code
= TREE_CODE (t
);
13653 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13655 location_t loc
= EXPR_LOCATION (expr
);
13657 /* Return right away if a constant. */
13658 if (kind
== tcc_constant
)
13661 /* CALL_EXPR-like objects with variable numbers of operands are
13662 treated specially. */
13663 if (kind
== tcc_vl_exp
)
13665 if (code
== CALL_EXPR
)
13667 tem
= fold_call_expr (loc
, expr
, false);
13668 return tem
? tem
: expr
;
13673 if (IS_EXPR_CODE_CLASS (kind
))
13675 tree type
= TREE_TYPE (t
);
13676 tree op0
, op1
, op2
;
13678 switch (TREE_CODE_LENGTH (code
))
13681 op0
= TREE_OPERAND (t
, 0);
13682 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13683 return tem
? tem
: expr
;
13685 op0
= TREE_OPERAND (t
, 0);
13686 op1
= TREE_OPERAND (t
, 1);
13687 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13688 return tem
? tem
: expr
;
13690 op0
= TREE_OPERAND (t
, 0);
13691 op1
= TREE_OPERAND (t
, 1);
13692 op2
= TREE_OPERAND (t
, 2);
13693 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13694 return tem
? tem
: expr
;
13704 tree op0
= TREE_OPERAND (t
, 0);
13705 tree op1
= TREE_OPERAND (t
, 1);
13707 if (TREE_CODE (op1
) == INTEGER_CST
13708 && TREE_CODE (op0
) == CONSTRUCTOR
13709 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
13711 VEC(constructor_elt
,gc
) *elts
= CONSTRUCTOR_ELTS (op0
);
13712 unsigned HOST_WIDE_INT end
= VEC_length (constructor_elt
, elts
);
13713 unsigned HOST_WIDE_INT begin
= 0;
13715 /* Find a matching index by means of a binary search. */
13716 while (begin
!= end
)
13718 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
13719 tree index
= VEC_index (constructor_elt
, elts
, middle
)->index
;
13721 if (TREE_CODE (index
) == INTEGER_CST
13722 && tree_int_cst_lt (index
, op1
))
13723 begin
= middle
+ 1;
13724 else if (TREE_CODE (index
) == INTEGER_CST
13725 && tree_int_cst_lt (op1
, index
))
13727 else if (TREE_CODE (index
) == RANGE_EXPR
13728 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
13729 begin
= middle
+ 1;
13730 else if (TREE_CODE (index
) == RANGE_EXPR
13731 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
13734 return VEC_index (constructor_elt
, elts
, middle
)->value
;
13742 return fold (DECL_INITIAL (t
));
13746 } /* switch (code) */
13749 #ifdef ENABLE_FOLD_CHECKING
13752 static void fold_checksum_tree (const_tree
, struct md5_ctx
*, htab_t
);
13753 static void fold_check_failed (const_tree
, const_tree
);
13754 void print_fold_checksum (const_tree
);
13756 /* When --enable-checking=fold, compute a digest of expr before
13757 and after actual fold call to see if fold did not accidentally
13758 change original expr. */
13764 struct md5_ctx ctx
;
13765 unsigned char checksum_before
[16], checksum_after
[16];
13768 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13769 md5_init_ctx (&ctx
);
13770 fold_checksum_tree (expr
, &ctx
, ht
);
13771 md5_finish_ctx (&ctx
, checksum_before
);
13774 ret
= fold_1 (expr
);
13776 md5_init_ctx (&ctx
);
13777 fold_checksum_tree (expr
, &ctx
, ht
);
13778 md5_finish_ctx (&ctx
, checksum_after
);
13781 if (memcmp (checksum_before
, checksum_after
, 16))
13782 fold_check_failed (expr
, ret
);
13788 print_fold_checksum (const_tree expr
)
13790 struct md5_ctx ctx
;
13791 unsigned char checksum
[16], cnt
;
13794 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13795 md5_init_ctx (&ctx
);
13796 fold_checksum_tree (expr
, &ctx
, ht
);
13797 md5_finish_ctx (&ctx
, checksum
);
13799 for (cnt
= 0; cnt
< 16; ++cnt
)
13800 fprintf (stderr
, "%02x", checksum
[cnt
]);
13801 putc ('\n', stderr
);
13805 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
13807 internal_error ("fold check: original tree changed by fold");
13811 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
13814 enum tree_code code
;
13815 union tree_node buf
;
13820 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
13821 <= sizeof (struct tree_function_decl
))
13822 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
13825 slot
= (void **) htab_find_slot (ht
, expr
, INSERT
);
13828 *slot
= CONST_CAST_TREE (expr
);
13829 code
= TREE_CODE (expr
);
13830 if (TREE_CODE_CLASS (code
) == tcc_declaration
13831 && DECL_ASSEMBLER_NAME_SET_P (expr
))
13833 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13834 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13835 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
13836 expr
= (tree
) &buf
;
13838 else if (TREE_CODE_CLASS (code
) == tcc_type
13839 && (TYPE_POINTER_TO (expr
)
13840 || TYPE_REFERENCE_TO (expr
)
13841 || TYPE_CACHED_VALUES_P (expr
)
13842 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
13843 || TYPE_NEXT_VARIANT (expr
)))
13845 /* Allow these fields to be modified. */
13847 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13848 expr
= tmp
= (tree
) &buf
;
13849 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
13850 TYPE_POINTER_TO (tmp
) = NULL
;
13851 TYPE_REFERENCE_TO (tmp
) = NULL
;
13852 TYPE_NEXT_VARIANT (tmp
) = NULL
;
13853 if (TYPE_CACHED_VALUES_P (tmp
))
13855 TYPE_CACHED_VALUES_P (tmp
) = 0;
13856 TYPE_CACHED_VALUES (tmp
) = NULL
;
13859 md5_process_bytes (expr
, tree_size (expr
), ctx
);
13860 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
13861 if (TREE_CODE_CLASS (code
) != tcc_type
13862 && TREE_CODE_CLASS (code
) != tcc_declaration
13863 && code
!= TREE_LIST
13864 && code
!= SSA_NAME
)
13865 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
13866 switch (TREE_CODE_CLASS (code
))
13872 md5_process_bytes (TREE_STRING_POINTER (expr
),
13873 TREE_STRING_LENGTH (expr
), ctx
);
13876 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
13877 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
13880 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
13886 case tcc_exceptional
:
13890 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
13891 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
13892 expr
= TREE_CHAIN (expr
);
13893 goto recursive_label
;
13896 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
13897 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
13903 case tcc_expression
:
13904 case tcc_reference
:
13905 case tcc_comparison
:
13908 case tcc_statement
:
13910 len
= TREE_OPERAND_LENGTH (expr
);
13911 for (i
= 0; i
< len
; ++i
)
13912 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
13914 case tcc_declaration
:
13915 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
13916 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
13917 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
13919 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
13920 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
13921 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
13922 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
13923 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
13925 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
13926 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
13928 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
13930 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
13931 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
13932 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
13936 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
13937 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
13938 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
13939 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
13940 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
13941 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
13942 if (INTEGRAL_TYPE_P (expr
)
13943 || SCALAR_FLOAT_TYPE_P (expr
))
13945 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
13946 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
13948 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
13949 if (TREE_CODE (expr
) == RECORD_TYPE
13950 || TREE_CODE (expr
) == UNION_TYPE
13951 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
13952 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
13953 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
13960 /* Helper function for outputting the checksum of a tree T. When
13961 debugging with gdb, you can "define mynext" to be "next" followed
13962 by "call debug_fold_checksum (op0)", then just trace down till the
13965 DEBUG_FUNCTION
void
13966 debug_fold_checksum (const_tree t
)
13969 unsigned char checksum
[16];
13970 struct md5_ctx ctx
;
13971 htab_t ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13973 md5_init_ctx (&ctx
);
13974 fold_checksum_tree (t
, &ctx
, ht
);
13975 md5_finish_ctx (&ctx
, checksum
);
13978 for (i
= 0; i
< 16; i
++)
13979 fprintf (stderr
, "%d ", checksum
[i
]);
13981 fprintf (stderr
, "\n");
13986 /* Fold a unary tree expression with code CODE of type TYPE with an
13987 operand OP0. LOC is the location of the resulting expression.
13988 Return a folded expression if successful. Otherwise, return a tree
13989 expression with code CODE of type TYPE with an operand OP0. */
13992 fold_build1_stat_loc (location_t loc
,
13993 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
13996 #ifdef ENABLE_FOLD_CHECKING
13997 unsigned char checksum_before
[16], checksum_after
[16];
13998 struct md5_ctx ctx
;
14001 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14002 md5_init_ctx (&ctx
);
14003 fold_checksum_tree (op0
, &ctx
, ht
);
14004 md5_finish_ctx (&ctx
, checksum_before
);
14008 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14011 tem
= build1_stat (code
, type
, op0 PASS_MEM_STAT
);
14012 SET_EXPR_LOCATION (tem
, loc
);
14015 #ifdef ENABLE_FOLD_CHECKING
14016 md5_init_ctx (&ctx
);
14017 fold_checksum_tree (op0
, &ctx
, ht
);
14018 md5_finish_ctx (&ctx
, checksum_after
);
14021 if (memcmp (checksum_before
, checksum_after
, 16))
14022 fold_check_failed (op0
, tem
);
14027 /* Fold a binary tree expression with code CODE of type TYPE with
14028 operands OP0 and OP1. LOC is the location of the resulting
14029 expression. Return a folded expression if successful. Otherwise,
14030 return a tree expression with code CODE of type TYPE with operands
14034 fold_build2_stat_loc (location_t loc
,
14035 enum tree_code code
, tree type
, tree op0
, tree op1
14039 #ifdef ENABLE_FOLD_CHECKING
14040 unsigned char checksum_before_op0
[16],
14041 checksum_before_op1
[16],
14042 checksum_after_op0
[16],
14043 checksum_after_op1
[16];
14044 struct md5_ctx ctx
;
14047 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14048 md5_init_ctx (&ctx
);
14049 fold_checksum_tree (op0
, &ctx
, ht
);
14050 md5_finish_ctx (&ctx
, checksum_before_op0
);
14053 md5_init_ctx (&ctx
);
14054 fold_checksum_tree (op1
, &ctx
, ht
);
14055 md5_finish_ctx (&ctx
, checksum_before_op1
);
14059 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14062 tem
= build2_stat (code
, type
, op0
, op1 PASS_MEM_STAT
);
14063 SET_EXPR_LOCATION (tem
, loc
);
14066 #ifdef ENABLE_FOLD_CHECKING
14067 md5_init_ctx (&ctx
);
14068 fold_checksum_tree (op0
, &ctx
, ht
);
14069 md5_finish_ctx (&ctx
, checksum_after_op0
);
14072 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14073 fold_check_failed (op0
, tem
);
14075 md5_init_ctx (&ctx
);
14076 fold_checksum_tree (op1
, &ctx
, ht
);
14077 md5_finish_ctx (&ctx
, checksum_after_op1
);
14080 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14081 fold_check_failed (op1
, tem
);
14086 /* Fold a ternary tree expression with code CODE of type TYPE with
14087 operands OP0, OP1, and OP2. Return a folded expression if
14088 successful. Otherwise, return a tree expression with code CODE of
14089 type TYPE with operands OP0, OP1, and OP2. */
14092 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14093 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14096 #ifdef ENABLE_FOLD_CHECKING
14097 unsigned char checksum_before_op0
[16],
14098 checksum_before_op1
[16],
14099 checksum_before_op2
[16],
14100 checksum_after_op0
[16],
14101 checksum_after_op1
[16],
14102 checksum_after_op2
[16];
14103 struct md5_ctx ctx
;
14106 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14107 md5_init_ctx (&ctx
);
14108 fold_checksum_tree (op0
, &ctx
, ht
);
14109 md5_finish_ctx (&ctx
, checksum_before_op0
);
14112 md5_init_ctx (&ctx
);
14113 fold_checksum_tree (op1
, &ctx
, ht
);
14114 md5_finish_ctx (&ctx
, checksum_before_op1
);
14117 md5_init_ctx (&ctx
);
14118 fold_checksum_tree (op2
, &ctx
, ht
);
14119 md5_finish_ctx (&ctx
, checksum_before_op2
);
14123 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14124 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14127 tem
= build3_stat (code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14128 SET_EXPR_LOCATION (tem
, loc
);
14131 #ifdef ENABLE_FOLD_CHECKING
14132 md5_init_ctx (&ctx
);
14133 fold_checksum_tree (op0
, &ctx
, ht
);
14134 md5_finish_ctx (&ctx
, checksum_after_op0
);
14137 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14138 fold_check_failed (op0
, tem
);
14140 md5_init_ctx (&ctx
);
14141 fold_checksum_tree (op1
, &ctx
, ht
);
14142 md5_finish_ctx (&ctx
, checksum_after_op1
);
14145 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14146 fold_check_failed (op1
, tem
);
14148 md5_init_ctx (&ctx
);
14149 fold_checksum_tree (op2
, &ctx
, ht
);
14150 md5_finish_ctx (&ctx
, checksum_after_op2
);
14153 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14154 fold_check_failed (op2
, tem
);
14159 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14160 arguments in ARGARRAY, and a null static chain.
14161 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14162 of type TYPE from the given operands as constructed by build_call_array. */
14165 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14166 int nargs
, tree
*argarray
)
14169 #ifdef ENABLE_FOLD_CHECKING
14170 unsigned char checksum_before_fn
[16],
14171 checksum_before_arglist
[16],
14172 checksum_after_fn
[16],
14173 checksum_after_arglist
[16];
14174 struct md5_ctx ctx
;
14178 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14179 md5_init_ctx (&ctx
);
14180 fold_checksum_tree (fn
, &ctx
, ht
);
14181 md5_finish_ctx (&ctx
, checksum_before_fn
);
14184 md5_init_ctx (&ctx
);
14185 for (i
= 0; i
< nargs
; i
++)
14186 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14187 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14191 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14193 #ifdef ENABLE_FOLD_CHECKING
14194 md5_init_ctx (&ctx
);
14195 fold_checksum_tree (fn
, &ctx
, ht
);
14196 md5_finish_ctx (&ctx
, checksum_after_fn
);
14199 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14200 fold_check_failed (fn
, tem
);
14202 md5_init_ctx (&ctx
);
14203 for (i
= 0; i
< nargs
; i
++)
14204 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14205 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14208 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14209 fold_check_failed (NULL_TREE
, tem
);
14214 /* Perform constant folding and related simplification of initializer
14215 expression EXPR. These behave identically to "fold_buildN" but ignore
14216 potential run-time traps and exceptions that fold must preserve. */
14218 #define START_FOLD_INIT \
14219 int saved_signaling_nans = flag_signaling_nans;\
14220 int saved_trapping_math = flag_trapping_math;\
14221 int saved_rounding_math = flag_rounding_math;\
14222 int saved_trapv = flag_trapv;\
14223 int saved_folding_initializer = folding_initializer;\
14224 flag_signaling_nans = 0;\
14225 flag_trapping_math = 0;\
14226 flag_rounding_math = 0;\
14228 folding_initializer = 1;
14230 #define END_FOLD_INIT \
14231 flag_signaling_nans = saved_signaling_nans;\
14232 flag_trapping_math = saved_trapping_math;\
14233 flag_rounding_math = saved_rounding_math;\
14234 flag_trapv = saved_trapv;\
14235 folding_initializer = saved_folding_initializer;
14238 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14239 tree type
, tree op
)
14244 result
= fold_build1_loc (loc
, code
, type
, op
);
14251 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14252 tree type
, tree op0
, tree op1
)
14257 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14264 fold_build3_initializer_loc (location_t loc
, enum tree_code code
,
14265 tree type
, tree op0
, tree op1
, tree op2
)
14270 result
= fold_build3_loc (loc
, code
, type
, op0
, op1
, op2
);
14277 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14278 int nargs
, tree
*argarray
)
14283 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14289 #undef START_FOLD_INIT
14290 #undef END_FOLD_INIT
14292 /* Determine if first argument is a multiple of second argument. Return 0 if
14293 it is not, or we cannot easily determined it to be.
14295 An example of the sort of thing we care about (at this point; this routine
14296 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14297 fold cases do now) is discovering that
14299 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14305 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14307 This code also handles discovering that
14309 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14311 is a multiple of 8 so we don't have to worry about dealing with a
14312 possible remainder.
14314 Note that we *look* inside a SAVE_EXPR only to determine how it was
14315 calculated; it is not safe for fold to do much of anything else with the
14316 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14317 at run time. For example, the latter example above *cannot* be implemented
14318 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14319 evaluation time of the original SAVE_EXPR is not necessarily the same at
14320 the time the new expression is evaluated. The only optimization of this
14321 sort that would be valid is changing
14323 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14327 SAVE_EXPR (I) * SAVE_EXPR (J)
14329 (where the same SAVE_EXPR (J) is used in the original and the
14330 transformed version). */
14333 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
14335 if (operand_equal_p (top
, bottom
, 0))
14338 if (TREE_CODE (type
) != INTEGER_TYPE
)
14341 switch (TREE_CODE (top
))
14344 /* Bitwise and provides a power of two multiple. If the mask is
14345 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14346 if (!integer_pow2p (bottom
))
14351 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14352 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14356 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14357 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14360 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14364 op1
= TREE_OPERAND (top
, 1);
14365 /* const_binop may not detect overflow correctly,
14366 so check for it explicitly here. */
14367 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
14368 > TREE_INT_CST_LOW (op1
)
14369 && TREE_INT_CST_HIGH (op1
) == 0
14370 && 0 != (t1
= fold_convert (type
,
14371 const_binop (LSHIFT_EXPR
,
14374 && !TREE_OVERFLOW (t1
))
14375 return multiple_of_p (type
, t1
, bottom
);
14380 /* Can't handle conversions from non-integral or wider integral type. */
14381 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14382 || (TYPE_PRECISION (type
)
14383 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14386 /* .. fall through ... */
14389 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14392 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
14393 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
14396 if (TREE_CODE (bottom
) != INTEGER_CST
14397 || integer_zerop (bottom
)
14398 || (TYPE_UNSIGNED (type
)
14399 && (tree_int_cst_sgn (top
) < 0
14400 || tree_int_cst_sgn (bottom
) < 0)))
14402 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
14410 /* Return true if CODE or TYPE is known to be non-negative. */
14413 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
14415 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14416 && truth_value_p (code
))
14417 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14418 have a signed:1 type (where the value is -1 and 0). */
14423 /* Return true if (CODE OP0) is known to be non-negative. If the return
14424 value is based on the assumption that signed overflow is undefined,
14425 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14426 *STRICT_OVERFLOW_P. */
14429 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14430 bool *strict_overflow_p
)
14432 if (TYPE_UNSIGNED (type
))
14438 /* We can't return 1 if flag_wrapv is set because
14439 ABS_EXPR<INT_MIN> = INT_MIN. */
14440 if (!INTEGRAL_TYPE_P (type
))
14442 if (TYPE_OVERFLOW_UNDEFINED (type
))
14444 *strict_overflow_p
= true;
14449 case NON_LVALUE_EXPR
:
14451 case FIX_TRUNC_EXPR
:
14452 return tree_expr_nonnegative_warnv_p (op0
,
14453 strict_overflow_p
);
14457 tree inner_type
= TREE_TYPE (op0
);
14458 tree outer_type
= type
;
14460 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14462 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14463 return tree_expr_nonnegative_warnv_p (op0
,
14464 strict_overflow_p
);
14465 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14467 if (TYPE_UNSIGNED (inner_type
))
14469 return tree_expr_nonnegative_warnv_p (op0
,
14470 strict_overflow_p
);
14473 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
14475 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14476 return tree_expr_nonnegative_warnv_p (op0
,
14477 strict_overflow_p
);
14478 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14479 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14480 && TYPE_UNSIGNED (inner_type
);
14486 return tree_simple_nonnegative_warnv_p (code
, type
);
14489 /* We don't know sign of `t', so be conservative and return false. */
14493 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14494 value is based on the assumption that signed overflow is undefined,
14495 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14496 *STRICT_OVERFLOW_P. */
14499 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14500 tree op1
, bool *strict_overflow_p
)
14502 if (TYPE_UNSIGNED (type
))
14507 case POINTER_PLUS_EXPR
:
14509 if (FLOAT_TYPE_P (type
))
14510 return (tree_expr_nonnegative_warnv_p (op0
,
14512 && tree_expr_nonnegative_warnv_p (op1
,
14513 strict_overflow_p
));
14515 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14516 both unsigned and at least 2 bits shorter than the result. */
14517 if (TREE_CODE (type
) == INTEGER_TYPE
14518 && TREE_CODE (op0
) == NOP_EXPR
14519 && TREE_CODE (op1
) == NOP_EXPR
)
14521 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
14522 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
14523 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14524 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14526 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14527 TYPE_PRECISION (inner2
)) + 1;
14528 return prec
< TYPE_PRECISION (type
);
14534 if (FLOAT_TYPE_P (type
))
14536 /* x * x for floating point x is always non-negative. */
14537 if (operand_equal_p (op0
, op1
, 0))
14539 return (tree_expr_nonnegative_warnv_p (op0
,
14541 && tree_expr_nonnegative_warnv_p (op1
,
14542 strict_overflow_p
));
14545 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14546 both unsigned and their total bits is shorter than the result. */
14547 if (TREE_CODE (type
) == INTEGER_TYPE
14548 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
14549 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
14551 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
14552 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
14554 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
14555 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
14558 bool unsigned0
= TYPE_UNSIGNED (inner0
);
14559 bool unsigned1
= TYPE_UNSIGNED (inner1
);
14561 if (TREE_CODE (op0
) == INTEGER_CST
)
14562 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
14564 if (TREE_CODE (op1
) == INTEGER_CST
)
14565 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
14567 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
14568 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
14570 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
14571 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
14572 : TYPE_PRECISION (inner0
);
14574 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
14575 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
14576 : TYPE_PRECISION (inner1
);
14578 return precision0
+ precision1
< TYPE_PRECISION (type
);
14585 return (tree_expr_nonnegative_warnv_p (op0
,
14587 || tree_expr_nonnegative_warnv_p (op1
,
14588 strict_overflow_p
));
14594 case TRUNC_DIV_EXPR
:
14595 case CEIL_DIV_EXPR
:
14596 case FLOOR_DIV_EXPR
:
14597 case ROUND_DIV_EXPR
:
14598 return (tree_expr_nonnegative_warnv_p (op0
,
14600 && tree_expr_nonnegative_warnv_p (op1
,
14601 strict_overflow_p
));
14603 case TRUNC_MOD_EXPR
:
14604 case CEIL_MOD_EXPR
:
14605 case FLOOR_MOD_EXPR
:
14606 case ROUND_MOD_EXPR
:
14607 return tree_expr_nonnegative_warnv_p (op0
,
14608 strict_overflow_p
);
14610 return tree_simple_nonnegative_warnv_p (code
, type
);
14613 /* We don't know sign of `t', so be conservative and return false. */
14617 /* Return true if T is known to be non-negative. If the return
14618 value is based on the assumption that signed overflow is undefined,
14619 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14620 *STRICT_OVERFLOW_P. */
14623 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14625 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14628 switch (TREE_CODE (t
))
14631 return tree_int_cst_sgn (t
) >= 0;
14634 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
14637 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
14640 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14642 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
14643 strict_overflow_p
));
14645 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14648 /* We don't know sign of `t', so be conservative and return false. */
14652 /* Return true if T is known to be non-negative. If the return
14653 value is based on the assumption that signed overflow is undefined,
14654 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14655 *STRICT_OVERFLOW_P. */
14658 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
14659 tree arg0
, tree arg1
, bool *strict_overflow_p
)
14661 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
14662 switch (DECL_FUNCTION_CODE (fndecl
))
14664 CASE_FLT_FN (BUILT_IN_ACOS
):
14665 CASE_FLT_FN (BUILT_IN_ACOSH
):
14666 CASE_FLT_FN (BUILT_IN_CABS
):
14667 CASE_FLT_FN (BUILT_IN_COSH
):
14668 CASE_FLT_FN (BUILT_IN_ERFC
):
14669 CASE_FLT_FN (BUILT_IN_EXP
):
14670 CASE_FLT_FN (BUILT_IN_EXP10
):
14671 CASE_FLT_FN (BUILT_IN_EXP2
):
14672 CASE_FLT_FN (BUILT_IN_FABS
):
14673 CASE_FLT_FN (BUILT_IN_FDIM
):
14674 CASE_FLT_FN (BUILT_IN_HYPOT
):
14675 CASE_FLT_FN (BUILT_IN_POW10
):
14676 CASE_INT_FN (BUILT_IN_FFS
):
14677 CASE_INT_FN (BUILT_IN_PARITY
):
14678 CASE_INT_FN (BUILT_IN_POPCOUNT
):
14679 case BUILT_IN_BSWAP32
:
14680 case BUILT_IN_BSWAP64
:
14684 CASE_FLT_FN (BUILT_IN_SQRT
):
14685 /* sqrt(-0.0) is -0.0. */
14686 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
14688 return tree_expr_nonnegative_warnv_p (arg0
,
14689 strict_overflow_p
);
14691 CASE_FLT_FN (BUILT_IN_ASINH
):
14692 CASE_FLT_FN (BUILT_IN_ATAN
):
14693 CASE_FLT_FN (BUILT_IN_ATANH
):
14694 CASE_FLT_FN (BUILT_IN_CBRT
):
14695 CASE_FLT_FN (BUILT_IN_CEIL
):
14696 CASE_FLT_FN (BUILT_IN_ERF
):
14697 CASE_FLT_FN (BUILT_IN_EXPM1
):
14698 CASE_FLT_FN (BUILT_IN_FLOOR
):
14699 CASE_FLT_FN (BUILT_IN_FMOD
):
14700 CASE_FLT_FN (BUILT_IN_FREXP
):
14701 CASE_FLT_FN (BUILT_IN_LCEIL
):
14702 CASE_FLT_FN (BUILT_IN_LDEXP
):
14703 CASE_FLT_FN (BUILT_IN_LFLOOR
):
14704 CASE_FLT_FN (BUILT_IN_LLCEIL
):
14705 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
14706 CASE_FLT_FN (BUILT_IN_LLRINT
):
14707 CASE_FLT_FN (BUILT_IN_LLROUND
):
14708 CASE_FLT_FN (BUILT_IN_LRINT
):
14709 CASE_FLT_FN (BUILT_IN_LROUND
):
14710 CASE_FLT_FN (BUILT_IN_MODF
):
14711 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
14712 CASE_FLT_FN (BUILT_IN_RINT
):
14713 CASE_FLT_FN (BUILT_IN_ROUND
):
14714 CASE_FLT_FN (BUILT_IN_SCALB
):
14715 CASE_FLT_FN (BUILT_IN_SCALBLN
):
14716 CASE_FLT_FN (BUILT_IN_SCALBN
):
14717 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
14718 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
14719 CASE_FLT_FN (BUILT_IN_SINH
):
14720 CASE_FLT_FN (BUILT_IN_TANH
):
14721 CASE_FLT_FN (BUILT_IN_TRUNC
):
14722 /* True if the 1st argument is nonnegative. */
14723 return tree_expr_nonnegative_warnv_p (arg0
,
14724 strict_overflow_p
);
14726 CASE_FLT_FN (BUILT_IN_FMAX
):
14727 /* True if the 1st OR 2nd arguments are nonnegative. */
14728 return (tree_expr_nonnegative_warnv_p (arg0
,
14730 || (tree_expr_nonnegative_warnv_p (arg1
,
14731 strict_overflow_p
)));
14733 CASE_FLT_FN (BUILT_IN_FMIN
):
14734 /* True if the 1st AND 2nd arguments are nonnegative. */
14735 return (tree_expr_nonnegative_warnv_p (arg0
,
14737 && (tree_expr_nonnegative_warnv_p (arg1
,
14738 strict_overflow_p
)));
14740 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14741 /* True if the 2nd argument is nonnegative. */
14742 return tree_expr_nonnegative_warnv_p (arg1
,
14743 strict_overflow_p
);
14745 CASE_FLT_FN (BUILT_IN_POWI
):
14746 /* True if the 1st argument is nonnegative or the second
14747 argument is an even integer. */
14748 if (TREE_CODE (arg1
) == INTEGER_CST
14749 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
14751 return tree_expr_nonnegative_warnv_p (arg0
,
14752 strict_overflow_p
);
14754 CASE_FLT_FN (BUILT_IN_POW
):
14755 /* True if the 1st argument is nonnegative or the second
14756 argument is an even integer valued real. */
14757 if (TREE_CODE (arg1
) == REAL_CST
)
14762 c
= TREE_REAL_CST (arg1
);
14763 n
= real_to_integer (&c
);
14766 REAL_VALUE_TYPE cint
;
14767 real_from_integer (&cint
, VOIDmode
, n
,
14768 n
< 0 ? -1 : 0, 0);
14769 if (real_identical (&c
, &cint
))
14773 return tree_expr_nonnegative_warnv_p (arg0
,
14774 strict_overflow_p
);
14779 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
14783 /* Return true if T is known to be non-negative. If the return
14784 value is based on the assumption that signed overflow is undefined,
14785 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14786 *STRICT_OVERFLOW_P. */
14789 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14791 enum tree_code code
= TREE_CODE (t
);
14792 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14799 tree temp
= TARGET_EXPR_SLOT (t
);
14800 t
= TARGET_EXPR_INITIAL (t
);
14802 /* If the initializer is non-void, then it's a normal expression
14803 that will be assigned to the slot. */
14804 if (!VOID_TYPE_P (t
))
14805 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
14807 /* Otherwise, the initializer sets the slot in some way. One common
14808 way is an assignment statement at the end of the initializer. */
14811 if (TREE_CODE (t
) == BIND_EXPR
)
14812 t
= expr_last (BIND_EXPR_BODY (t
));
14813 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
14814 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
14815 t
= expr_last (TREE_OPERAND (t
, 0));
14816 else if (TREE_CODE (t
) == STATEMENT_LIST
)
14821 if (TREE_CODE (t
) == MODIFY_EXPR
14822 && TREE_OPERAND (t
, 0) == temp
)
14823 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14824 strict_overflow_p
);
14831 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
14832 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
14834 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
14835 get_callee_fndecl (t
),
14838 strict_overflow_p
);
14840 case COMPOUND_EXPR
:
14842 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14843 strict_overflow_p
);
14845 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
14846 strict_overflow_p
);
14848 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14849 strict_overflow_p
);
14852 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14856 /* We don't know sign of `t', so be conservative and return false. */
14860 /* Return true if T is known to be non-negative. If the return
14861 value is based on the assumption that signed overflow is undefined,
14862 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14863 *STRICT_OVERFLOW_P. */
14866 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14868 enum tree_code code
;
14869 if (t
== error_mark_node
)
14872 code
= TREE_CODE (t
);
14873 switch (TREE_CODE_CLASS (code
))
14876 case tcc_comparison
:
14877 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14879 TREE_OPERAND (t
, 0),
14880 TREE_OPERAND (t
, 1),
14881 strict_overflow_p
);
14884 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14886 TREE_OPERAND (t
, 0),
14887 strict_overflow_p
);
14890 case tcc_declaration
:
14891 case tcc_reference
:
14892 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
14900 case TRUTH_AND_EXPR
:
14901 case TRUTH_OR_EXPR
:
14902 case TRUTH_XOR_EXPR
:
14903 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14905 TREE_OPERAND (t
, 0),
14906 TREE_OPERAND (t
, 1),
14907 strict_overflow_p
);
14908 case TRUTH_NOT_EXPR
:
14909 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14911 TREE_OPERAND (t
, 0),
14912 strict_overflow_p
);
14919 case WITH_SIZE_EXPR
:
14921 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
14924 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
14928 /* Return true if `t' is known to be non-negative. Handle warnings
14929 about undefined signed overflow. */
14932 tree_expr_nonnegative_p (tree t
)
14934 bool ret
, strict_overflow_p
;
14936 strict_overflow_p
= false;
14937 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
14938 if (strict_overflow_p
)
14939 fold_overflow_warning (("assuming signed overflow does not occur when "
14940 "determining that expression is always "
14942 WARN_STRICT_OVERFLOW_MISC
);
14947 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14948 For floating point we further ensure that T is not denormal.
14949 Similar logic is present in nonzero_address in rtlanal.h.
14951 If the return value is based on the assumption that signed overflow
14952 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14953 change *STRICT_OVERFLOW_P. */
14956 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
14957 bool *strict_overflow_p
)
14962 return tree_expr_nonzero_warnv_p (op0
,
14963 strict_overflow_p
);
14967 tree inner_type
= TREE_TYPE (op0
);
14968 tree outer_type
= type
;
14970 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
14971 && tree_expr_nonzero_warnv_p (op0
,
14972 strict_overflow_p
));
14976 case NON_LVALUE_EXPR
:
14977 return tree_expr_nonzero_warnv_p (op0
,
14978 strict_overflow_p
);
14987 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14988 For floating point we further ensure that T is not denormal.
14989 Similar logic is present in nonzero_address in rtlanal.h.
14991 If the return value is based on the assumption that signed overflow
14992 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14993 change *STRICT_OVERFLOW_P. */
14996 tree_binary_nonzero_warnv_p (enum tree_code code
,
14999 tree op1
, bool *strict_overflow_p
)
15001 bool sub_strict_overflow_p
;
15004 case POINTER_PLUS_EXPR
:
15006 if (TYPE_OVERFLOW_UNDEFINED (type
))
15008 /* With the presence of negative values it is hard
15009 to say something. */
15010 sub_strict_overflow_p
= false;
15011 if (!tree_expr_nonnegative_warnv_p (op0
,
15012 &sub_strict_overflow_p
)
15013 || !tree_expr_nonnegative_warnv_p (op1
,
15014 &sub_strict_overflow_p
))
15016 /* One of operands must be positive and the other non-negative. */
15017 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15018 overflows, on a twos-complement machine the sum of two
15019 nonnegative numbers can never be zero. */
15020 return (tree_expr_nonzero_warnv_p (op0
,
15022 || tree_expr_nonzero_warnv_p (op1
,
15023 strict_overflow_p
));
15028 if (TYPE_OVERFLOW_UNDEFINED (type
))
15030 if (tree_expr_nonzero_warnv_p (op0
,
15032 && tree_expr_nonzero_warnv_p (op1
,
15033 strict_overflow_p
))
15035 *strict_overflow_p
= true;
15042 sub_strict_overflow_p
= false;
15043 if (tree_expr_nonzero_warnv_p (op0
,
15044 &sub_strict_overflow_p
)
15045 && tree_expr_nonzero_warnv_p (op1
,
15046 &sub_strict_overflow_p
))
15048 if (sub_strict_overflow_p
)
15049 *strict_overflow_p
= true;
15054 sub_strict_overflow_p
= false;
15055 if (tree_expr_nonzero_warnv_p (op0
,
15056 &sub_strict_overflow_p
))
15058 if (sub_strict_overflow_p
)
15059 *strict_overflow_p
= true;
15061 /* When both operands are nonzero, then MAX must be too. */
15062 if (tree_expr_nonzero_warnv_p (op1
,
15063 strict_overflow_p
))
15066 /* MAX where operand 0 is positive is positive. */
15067 return tree_expr_nonnegative_warnv_p (op0
,
15068 strict_overflow_p
);
15070 /* MAX where operand 1 is positive is positive. */
15071 else if (tree_expr_nonzero_warnv_p (op1
,
15072 &sub_strict_overflow_p
)
15073 && tree_expr_nonnegative_warnv_p (op1
,
15074 &sub_strict_overflow_p
))
15076 if (sub_strict_overflow_p
)
15077 *strict_overflow_p
= true;
15083 return (tree_expr_nonzero_warnv_p (op1
,
15085 || tree_expr_nonzero_warnv_p (op0
,
15086 strict_overflow_p
));
15095 /* Return true when T is an address and is known to be nonzero.
15096 For floating point we further ensure that T is not denormal.
15097 Similar logic is present in nonzero_address in rtlanal.h.
15099 If the return value is based on the assumption that signed overflow
15100 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15101 change *STRICT_OVERFLOW_P. */
15104 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15106 bool sub_strict_overflow_p
;
15107 switch (TREE_CODE (t
))
15110 return !integer_zerop (t
);
15114 tree base
= TREE_OPERAND (t
, 0);
15115 if (!DECL_P (base
))
15116 base
= get_base_address (base
);
15121 /* Weak declarations may link to NULL. Other things may also be NULL
15122 so protect with -fdelete-null-pointer-checks; but not variables
15123 allocated on the stack. */
15125 && (flag_delete_null_pointer_checks
15126 || (DECL_CONTEXT (base
)
15127 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15128 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
)))))
15129 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
15131 /* Constants are never weak. */
15132 if (CONSTANT_CLASS_P (base
))
15139 sub_strict_overflow_p
= false;
15140 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15141 &sub_strict_overflow_p
)
15142 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15143 &sub_strict_overflow_p
))
15145 if (sub_strict_overflow_p
)
15146 *strict_overflow_p
= true;
15157 /* Return true when T is an address and is known to be nonzero.
15158 For floating point we further ensure that T is not denormal.
15159 Similar logic is present in nonzero_address in rtlanal.h.
15161 If the return value is based on the assumption that signed overflow
15162 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15163 change *STRICT_OVERFLOW_P. */
15166 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15168 tree type
= TREE_TYPE (t
);
15169 enum tree_code code
;
15171 /* Doing something useful for floating point would need more work. */
15172 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
15175 code
= TREE_CODE (t
);
15176 switch (TREE_CODE_CLASS (code
))
15179 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15180 strict_overflow_p
);
15182 case tcc_comparison
:
15183 return tree_binary_nonzero_warnv_p (code
, type
,
15184 TREE_OPERAND (t
, 0),
15185 TREE_OPERAND (t
, 1),
15186 strict_overflow_p
);
15188 case tcc_declaration
:
15189 case tcc_reference
:
15190 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15198 case TRUTH_NOT_EXPR
:
15199 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15200 strict_overflow_p
);
15202 case TRUTH_AND_EXPR
:
15203 case TRUTH_OR_EXPR
:
15204 case TRUTH_XOR_EXPR
:
15205 return tree_binary_nonzero_warnv_p (code
, type
,
15206 TREE_OPERAND (t
, 0),
15207 TREE_OPERAND (t
, 1),
15208 strict_overflow_p
);
15215 case WITH_SIZE_EXPR
:
15217 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15219 case COMPOUND_EXPR
:
15222 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15223 strict_overflow_p
);
15226 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
15227 strict_overflow_p
);
15230 return alloca_call_p (t
);
15238 /* Return true when T is an address and is known to be nonzero.
15239 Handle warnings about undefined signed overflow. */
15242 tree_expr_nonzero_p (tree t
)
15244 bool ret
, strict_overflow_p
;
15246 strict_overflow_p
= false;
15247 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
15248 if (strict_overflow_p
)
15249 fold_overflow_warning (("assuming signed overflow does not occur when "
15250 "determining that expression is always "
15252 WARN_STRICT_OVERFLOW_MISC
);
15256 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15257 attempt to fold the expression to a constant without modifying TYPE,
15260 If the expression could be simplified to a constant, then return
15261 the constant. If the expression would not be simplified to a
15262 constant, then return NULL_TREE. */
15265 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15267 tree tem
= fold_binary (code
, type
, op0
, op1
);
15268 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15271 /* Given the components of a unary expression CODE, TYPE and OP0,
15272 attempt to fold the expression to a constant without modifying
15275 If the expression could be simplified to a constant, then return
15276 the constant. If the expression would not be simplified to a
15277 constant, then return NULL_TREE. */
15280 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15282 tree tem
= fold_unary (code
, type
, op0
);
15283 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15286 /* If EXP represents referencing an element in a constant string
15287 (either via pointer arithmetic or array indexing), return the
15288 tree representing the value accessed, otherwise return NULL. */
15291 fold_read_from_constant_string (tree exp
)
15293 if ((TREE_CODE (exp
) == INDIRECT_REF
15294 || TREE_CODE (exp
) == ARRAY_REF
)
15295 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15297 tree exp1
= TREE_OPERAND (exp
, 0);
15300 location_t loc
= EXPR_LOCATION (exp
);
15302 if (TREE_CODE (exp
) == INDIRECT_REF
)
15303 string
= string_constant (exp1
, &index
);
15306 tree low_bound
= array_ref_low_bound (exp
);
15307 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15309 /* Optimize the special-case of a zero lower bound.
15311 We convert the low_bound to sizetype to avoid some problems
15312 with constant folding. (E.g. suppose the lower bound is 1,
15313 and its mode is QI. Without the conversion,l (ARRAY
15314 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15315 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15316 if (! integer_zerop (low_bound
))
15317 index
= size_diffop_loc (loc
, index
,
15318 fold_convert_loc (loc
, sizetype
, low_bound
));
15324 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15325 && TREE_CODE (string
) == STRING_CST
15326 && TREE_CODE (index
) == INTEGER_CST
15327 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15328 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15330 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15331 return build_int_cst_type (TREE_TYPE (exp
),
15332 (TREE_STRING_POINTER (string
)
15333 [TREE_INT_CST_LOW (index
)]));
15338 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15339 an integer constant, real, or fixed-point constant.
15341 TYPE is the type of the result. */
15344 fold_negate_const (tree arg0
, tree type
)
15346 tree t
= NULL_TREE
;
15348 switch (TREE_CODE (arg0
))
15352 double_int val
= tree_to_double_int (arg0
);
15353 int overflow
= neg_double (val
.low
, val
.high
, &val
.low
, &val
.high
);
15355 t
= force_fit_type_double (type
, val
, 1,
15356 (overflow
| TREE_OVERFLOW (arg0
))
15357 && !TYPE_UNSIGNED (type
));
15362 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15367 FIXED_VALUE_TYPE f
;
15368 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15369 &(TREE_FIXED_CST (arg0
)), NULL
,
15370 TYPE_SATURATING (type
));
15371 t
= build_fixed (type
, f
);
15372 /* Propagate overflow flags. */
15373 if (overflow_p
| TREE_OVERFLOW (arg0
))
15374 TREE_OVERFLOW (t
) = 1;
15379 gcc_unreachable ();
15385 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15386 an integer constant or real constant.
15388 TYPE is the type of the result. */
15391 fold_abs_const (tree arg0
, tree type
)
15393 tree t
= NULL_TREE
;
15395 switch (TREE_CODE (arg0
))
15399 double_int val
= tree_to_double_int (arg0
);
15401 /* If the value is unsigned or non-negative, then the absolute value
15402 is the same as the ordinary value. */
15403 if (TYPE_UNSIGNED (type
)
15404 || !double_int_negative_p (val
))
15407 /* If the value is negative, then the absolute value is
15413 overflow
= neg_double (val
.low
, val
.high
, &val
.low
, &val
.high
);
15414 t
= force_fit_type_double (type
, val
, -1,
15415 overflow
| TREE_OVERFLOW (arg0
));
15421 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
15422 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15428 gcc_unreachable ();
15434 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15435 constant. TYPE is the type of the result. */
15438 fold_not_const (const_tree arg0
, tree type
)
15442 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
15444 val
= double_int_not (tree_to_double_int (arg0
));
15445 return force_fit_type_double (type
, val
, 0, TREE_OVERFLOW (arg0
));
15448 /* Given CODE, a relational operator, the target type, TYPE and two
15449 constant operands OP0 and OP1, return the result of the
15450 relational operation. If the result is not a compile time
15451 constant, then return NULL_TREE. */
15454 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
15456 int result
, invert
;
15458 /* From here on, the only cases we handle are when the result is
15459 known to be a constant. */
15461 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
15463 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
15464 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
15466 /* Handle the cases where either operand is a NaN. */
15467 if (real_isnan (c0
) || real_isnan (c1
))
15477 case UNORDERED_EXPR
:
15491 if (flag_trapping_math
)
15497 gcc_unreachable ();
15500 return constant_boolean_node (result
, type
);
15503 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
15506 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
15508 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
15509 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
15510 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
15513 /* Handle equality/inequality of complex constants. */
15514 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
15516 tree rcond
= fold_relational_const (code
, type
,
15517 TREE_REALPART (op0
),
15518 TREE_REALPART (op1
));
15519 tree icond
= fold_relational_const (code
, type
,
15520 TREE_IMAGPART (op0
),
15521 TREE_IMAGPART (op1
));
15522 if (code
== EQ_EXPR
)
15523 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
15524 else if (code
== NE_EXPR
)
15525 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
15530 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15532 To compute GT, swap the arguments and do LT.
15533 To compute GE, do LT and invert the result.
15534 To compute LE, swap the arguments, do LT and invert the result.
15535 To compute NE, do EQ and invert the result.
15537 Therefore, the code below must handle only EQ and LT. */
15539 if (code
== LE_EXPR
|| code
== GT_EXPR
)
15544 code
= swap_tree_comparison (code
);
15547 /* Note that it is safe to invert for real values here because we
15548 have already handled the one case that it matters. */
15551 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15554 code
= invert_tree_comparison (code
, false);
15557 /* Compute a result for LT or EQ if args permit;
15558 Otherwise return T. */
15559 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15561 if (code
== EQ_EXPR
)
15562 result
= tree_int_cst_equal (op0
, op1
);
15563 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
15564 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
15566 result
= INT_CST_LT (op0
, op1
);
15573 return constant_boolean_node (result
, type
);
15576 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15577 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15581 fold_build_cleanup_point_expr (tree type
, tree expr
)
15583 /* If the expression does not have side effects then we don't have to wrap
15584 it with a cleanup point expression. */
15585 if (!TREE_SIDE_EFFECTS (expr
))
15588 /* If the expression is a return, check to see if the expression inside the
15589 return has no side effects or the right hand side of the modify expression
15590 inside the return. If either don't have side effects set we don't need to
15591 wrap the expression in a cleanup point expression. Note we don't check the
15592 left hand side of the modify because it should always be a return decl. */
15593 if (TREE_CODE (expr
) == RETURN_EXPR
)
15595 tree op
= TREE_OPERAND (expr
, 0);
15596 if (!op
|| !TREE_SIDE_EFFECTS (op
))
15598 op
= TREE_OPERAND (op
, 1);
15599 if (!TREE_SIDE_EFFECTS (op
))
15603 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
15606 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15607 of an indirection through OP0, or NULL_TREE if no simplification is
15611 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
15617 subtype
= TREE_TYPE (sub
);
15618 if (!POINTER_TYPE_P (subtype
))
15621 if (TREE_CODE (sub
) == ADDR_EXPR
)
15623 tree op
= TREE_OPERAND (sub
, 0);
15624 tree optype
= TREE_TYPE (op
);
15625 /* *&CONST_DECL -> to the value of the const decl. */
15626 if (TREE_CODE (op
) == CONST_DECL
)
15627 return DECL_INITIAL (op
);
15628 /* *&p => p; make sure to handle *&"str"[cst] here. */
15629 if (type
== optype
)
15631 tree fop
= fold_read_from_constant_string (op
);
15637 /* *(foo *)&fooarray => fooarray[0] */
15638 else if (TREE_CODE (optype
) == ARRAY_TYPE
15639 && type
== TREE_TYPE (optype
))
15641 tree type_domain
= TYPE_DOMAIN (optype
);
15642 tree min_val
= size_zero_node
;
15643 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15644 min_val
= TYPE_MIN_VALUE (type_domain
);
15645 op0
= build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
15646 SET_EXPR_LOCATION (op0
, loc
);
15649 /* *(foo *)&complexfoo => __real__ complexfoo */
15650 else if (TREE_CODE (optype
) == COMPLEX_TYPE
15651 && type
== TREE_TYPE (optype
))
15652 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
15653 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15654 else if (TREE_CODE (optype
) == VECTOR_TYPE
15655 && type
== TREE_TYPE (optype
))
15657 tree part_width
= TYPE_SIZE (type
);
15658 tree index
= bitsize_int (0);
15659 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
15663 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15664 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
15666 tree op00
= TREE_OPERAND (sub
, 0);
15667 tree op01
= TREE_OPERAND (sub
, 1);
15670 if (TREE_CODE (op00
) == ADDR_EXPR
)
15673 op00
= TREE_OPERAND (op00
, 0);
15674 op00type
= TREE_TYPE (op00
);
15676 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15677 if (TREE_CODE (op00type
) == VECTOR_TYPE
15678 && type
== TREE_TYPE (op00type
))
15680 HOST_WIDE_INT offset
= tree_low_cst (op01
, 0);
15681 tree part_width
= TYPE_SIZE (type
);
15682 unsigned HOST_WIDE_INT part_widthi
= tree_low_cst (part_width
, 0)/BITS_PER_UNIT
;
15683 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
15684 tree index
= bitsize_int (indexi
);
15686 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (op00type
))
15687 return fold_build3_loc (loc
,
15688 BIT_FIELD_REF
, type
, op00
,
15689 part_width
, index
);
15692 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15693 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
15694 && type
== TREE_TYPE (op00type
))
15696 tree size
= TYPE_SIZE_UNIT (type
);
15697 if (tree_int_cst_equal (size
, op01
))
15698 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
15700 /* ((foo *)&fooarray)[1] => fooarray[1] */
15701 else if (TREE_CODE (op00type
) == ARRAY_TYPE
15702 && type
== TREE_TYPE (op00type
))
15704 tree type_domain
= TYPE_DOMAIN (op00type
);
15705 tree min_val
= size_zero_node
;
15706 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15707 min_val
= TYPE_MIN_VALUE (type_domain
);
15708 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
15709 TYPE_SIZE_UNIT (type
));
15710 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
15711 op0
= build4 (ARRAY_REF
, type
, op00
, op01
,
15712 NULL_TREE
, NULL_TREE
);
15713 SET_EXPR_LOCATION (op0
, loc
);
15719 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15720 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
15721 && type
== TREE_TYPE (TREE_TYPE (subtype
)))
15724 tree min_val
= size_zero_node
;
15725 sub
= build_fold_indirect_ref_loc (loc
, sub
);
15726 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
15727 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15728 min_val
= TYPE_MIN_VALUE (type_domain
);
15729 op0
= build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
15730 SET_EXPR_LOCATION (op0
, loc
);
15737 /* Builds an expression for an indirection through T, simplifying some
15741 build_fold_indirect_ref_loc (location_t loc
, tree t
)
15743 tree type
= TREE_TYPE (TREE_TYPE (t
));
15744 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
15749 t
= build1 (INDIRECT_REF
, type
, t
);
15750 SET_EXPR_LOCATION (t
, loc
);
15754 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15757 fold_indirect_ref_loc (location_t loc
, tree t
)
15759 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
15767 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15768 whose result is ignored. The type of the returned tree need not be
15769 the same as the original expression. */
15772 fold_ignored_result (tree t
)
15774 if (!TREE_SIDE_EFFECTS (t
))
15775 return integer_zero_node
;
15778 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
15781 t
= TREE_OPERAND (t
, 0);
15785 case tcc_comparison
:
15786 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15787 t
= TREE_OPERAND (t
, 0);
15788 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
15789 t
= TREE_OPERAND (t
, 1);
15794 case tcc_expression
:
15795 switch (TREE_CODE (t
))
15797 case COMPOUND_EXPR
:
15798 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15800 t
= TREE_OPERAND (t
, 0);
15804 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15805 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15807 t
= TREE_OPERAND (t
, 0);
15820 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15821 This can only be applied to objects of a sizetype. */
15824 round_up_loc (location_t loc
, tree value
, int divisor
)
15826 tree div
= NULL_TREE
;
15828 gcc_assert (divisor
> 0);
15832 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15833 have to do anything. Only do this when we are not given a const,
15834 because in that case, this check is more expensive than just
15836 if (TREE_CODE (value
) != INTEGER_CST
)
15838 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15840 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15844 /* If divisor is a power of two, simplify this to bit manipulation. */
15845 if (divisor
== (divisor
& -divisor
))
15847 if (TREE_CODE (value
) == INTEGER_CST
)
15849 double_int val
= tree_to_double_int (value
);
15852 if ((val
.low
& (divisor
- 1)) == 0)
15855 overflow_p
= TREE_OVERFLOW (value
);
15856 val
.low
&= ~(divisor
- 1);
15857 val
.low
+= divisor
;
15865 return force_fit_type_double (TREE_TYPE (value
), val
,
15872 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
15873 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
15874 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15875 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
15881 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15882 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
15883 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
15889 /* Likewise, but round down. */
15892 round_down_loc (location_t loc
, tree value
, int divisor
)
15894 tree div
= NULL_TREE
;
15896 gcc_assert (divisor
> 0);
15900 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15901 have to do anything. Only do this when we are not given a const,
15902 because in that case, this check is more expensive than just
15904 if (TREE_CODE (value
) != INTEGER_CST
)
15906 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15908 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15912 /* If divisor is a power of two, simplify this to bit manipulation. */
15913 if (divisor
== (divisor
& -divisor
))
15917 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15918 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
15923 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15924 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
15925 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
15931 /* Returns the pointer to the base of the object addressed by EXP and
15932 extracts the information about the offset of the access, storing it
15933 to PBITPOS and POFFSET. */
15936 split_address_to_core_and_offset (tree exp
,
15937 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
15940 enum machine_mode mode
;
15941 int unsignedp
, volatilep
;
15942 HOST_WIDE_INT bitsize
;
15943 location_t loc
= EXPR_LOCATION (exp
);
15945 if (TREE_CODE (exp
) == ADDR_EXPR
)
15947 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
15948 poffset
, &mode
, &unsignedp
, &volatilep
,
15950 core
= build_fold_addr_expr_loc (loc
, core
);
15956 *poffset
= NULL_TREE
;
15962 /* Returns true if addresses of E1 and E2 differ by a constant, false
15963 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15966 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
15969 HOST_WIDE_INT bitpos1
, bitpos2
;
15970 tree toffset1
, toffset2
, tdiff
, type
;
15972 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
15973 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
15975 if (bitpos1
% BITS_PER_UNIT
!= 0
15976 || bitpos2
% BITS_PER_UNIT
!= 0
15977 || !operand_equal_p (core1
, core2
, 0))
15980 if (toffset1
&& toffset2
)
15982 type
= TREE_TYPE (toffset1
);
15983 if (type
!= TREE_TYPE (toffset2
))
15984 toffset2
= fold_convert (type
, toffset2
);
15986 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
15987 if (!cst_and_fits_in_hwi (tdiff
))
15990 *diff
= int_cst_value (tdiff
);
15992 else if (toffset1
|| toffset2
)
15994 /* If only one of the offsets is non-constant, the difference cannot
16001 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16005 /* Simplify the floating point expression EXP when the sign of the
16006 result is not significant. Return NULL_TREE if no simplification
16010 fold_strip_sign_ops (tree exp
)
16013 location_t loc
= EXPR_LOCATION (exp
);
16015 switch (TREE_CODE (exp
))
16019 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16020 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16024 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16026 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16027 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16028 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16029 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16030 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16031 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16034 case COMPOUND_EXPR
:
16035 arg0
= TREE_OPERAND (exp
, 0);
16036 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16038 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16042 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16043 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16045 return fold_build3_loc (loc
,
16046 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16047 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16048 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16053 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16056 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16057 /* Strip copysign function call, return the 1st argument. */
16058 arg0
= CALL_EXPR_ARG (exp
, 0);
16059 arg1
= CALL_EXPR_ARG (exp
, 1);
16060 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16063 /* Strip sign ops from the argument of "odd" math functions. */
16064 if (negate_mathfn_p (fcode
))
16066 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16068 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);