1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer
= 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code
{
91 static bool negate_mathfn_p (enum built_in_function
);
92 static bool negate_expr_p (tree
);
93 static tree
negate_expr (tree
);
94 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
95 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
96 static tree
const_binop (enum tree_code
, tree
, tree
);
97 static enum comparison_code
comparison_to_compcode (enum tree_code
);
98 static enum tree_code
compcode_to_comparison (enum comparison_code
);
99 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
100 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
101 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
102 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
103 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
104 static tree
make_bit_field_ref (location_t
, tree
, tree
,
105 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
106 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
108 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
110 enum machine_mode
*, int *, int *,
112 static int all_ones_mask_p (const_tree
, int);
113 static tree
sign_bit_p (tree
, const_tree
);
114 static int simple_operand_p (const_tree
);
115 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
116 static tree
range_predecessor (tree
);
117 static tree
range_successor (tree
);
118 extern tree
make_range (tree
, int *, tree
*, tree
*, bool *);
119 extern bool merge_ranges (int *, tree
*, tree
*, int, tree
, tree
, int,
121 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
122 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
123 static tree
unextend (tree
, int, int, tree
);
124 static tree
fold_truthop (location_t
, enum tree_code
, tree
, tree
, tree
);
125 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
127 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
128 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
129 static tree
fold_binary_op_with_conditional_arg (location_t
,
130 enum tree_code
, tree
,
133 static tree
fold_mathfn_compare (location_t
,
134 enum built_in_function
, enum tree_code
,
136 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
137 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
138 static bool reorder_operands_p (const_tree
, const_tree
);
139 static tree
fold_negate_const (tree
, tree
);
140 static tree
fold_not_const (const_tree
, tree
);
141 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
142 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
145 /* Similar to protected_set_expr_location, but never modify x in place,
146 if location can and needs to be set, unshare it. */
149 protected_set_expr_location_unshare (tree x
, location_t loc
)
151 if (CAN_HAVE_LOCATION_P (x
)
152 && EXPR_LOCATION (x
) != loc
153 && !(TREE_CODE (x
) == SAVE_EXPR
154 || TREE_CODE (x
) == TARGET_EXPR
155 || TREE_CODE (x
) == BIND_EXPR
))
158 SET_EXPR_LOCATION (x
, loc
);
164 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
165 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
166 and SUM1. Then this yields nonzero if overflow occurred during the
169 Overflow occurs if A and B have the same sign, but A and SUM differ in
170 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
172 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
174 /* If ARG2 divides ARG1 with zero remainder, carries out the division
175 of type CODE and returns the quotient.
176 Otherwise returns NULL_TREE. */
179 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
184 /* The sign of the division is according to operand two, that
185 does the correct thing for POINTER_PLUS_EXPR where we want
186 a signed division. */
187 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
188 if (TREE_CODE (TREE_TYPE (arg2
)) == INTEGER_TYPE
189 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2
)))
192 quo
= double_int_divmod (tree_to_double_int (arg1
),
193 tree_to_double_int (arg2
),
196 if (double_int_zero_p (rem
))
197 return build_int_cst_wide (TREE_TYPE (arg1
), quo
.low
, quo
.high
);
202 /* This is nonzero if we should defer warnings about undefined
203 overflow. This facility exists because these warnings are a
204 special case. The code to estimate loop iterations does not want
205 to issue any warnings, since it works with expressions which do not
206 occur in user code. Various bits of cleanup code call fold(), but
207 only use the result if it has certain characteristics (e.g., is a
208 constant); that code only wants to issue a warning if the result is
211 static int fold_deferring_overflow_warnings
;
213 /* If a warning about undefined overflow is deferred, this is the
214 warning. Note that this may cause us to turn two warnings into
215 one, but that is fine since it is sufficient to only give one
216 warning per expression. */
218 static const char* fold_deferred_overflow_warning
;
220 /* If a warning about undefined overflow is deferred, this is the
221 level at which the warning should be emitted. */
223 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
225 /* Start deferring overflow warnings. We could use a stack here to
226 permit nested calls, but at present it is not necessary. */
229 fold_defer_overflow_warnings (void)
231 ++fold_deferring_overflow_warnings
;
234 /* Stop deferring overflow warnings. If there is a pending warning,
235 and ISSUE is true, then issue the warning if appropriate. STMT is
236 the statement with which the warning should be associated (used for
237 location information); STMT may be NULL. CODE is the level of the
238 warning--a warn_strict_overflow_code value. This function will use
239 the smaller of CODE and the deferred code when deciding whether to
240 issue the warning. CODE may be zero to mean to always use the
244 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
249 gcc_assert (fold_deferring_overflow_warnings
> 0);
250 --fold_deferring_overflow_warnings
;
251 if (fold_deferring_overflow_warnings
> 0)
253 if (fold_deferred_overflow_warning
!= NULL
255 && code
< (int) fold_deferred_overflow_code
)
256 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
260 warnmsg
= fold_deferred_overflow_warning
;
261 fold_deferred_overflow_warning
= NULL
;
263 if (!issue
|| warnmsg
== NULL
)
266 if (gimple_no_warning_p (stmt
))
269 /* Use the smallest code level when deciding to issue the
271 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
272 code
= fold_deferred_overflow_code
;
274 if (!issue_strict_overflow_warning (code
))
278 locus
= input_location
;
280 locus
= gimple_location (stmt
);
281 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
284 /* Stop deferring overflow warnings, ignoring any deferred
288 fold_undefer_and_ignore_overflow_warnings (void)
290 fold_undefer_overflow_warnings (false, NULL
, 0);
293 /* Whether we are deferring overflow warnings. */
296 fold_deferring_overflow_warnings_p (void)
298 return fold_deferring_overflow_warnings
> 0;
301 /* This is called when we fold something based on the fact that signed
302 overflow is undefined. */
305 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
307 if (fold_deferring_overflow_warnings
> 0)
309 if (fold_deferred_overflow_warning
== NULL
310 || wc
< fold_deferred_overflow_code
)
312 fold_deferred_overflow_warning
= gmsgid
;
313 fold_deferred_overflow_code
= wc
;
316 else if (issue_strict_overflow_warning (wc
))
317 warning (OPT_Wstrict_overflow
, gmsgid
);
320 /* Return true if the built-in mathematical function specified by CODE
321 is odd, i.e. -f(x) == f(-x). */
324 negate_mathfn_p (enum built_in_function code
)
328 CASE_FLT_FN (BUILT_IN_ASIN
):
329 CASE_FLT_FN (BUILT_IN_ASINH
):
330 CASE_FLT_FN (BUILT_IN_ATAN
):
331 CASE_FLT_FN (BUILT_IN_ATANH
):
332 CASE_FLT_FN (BUILT_IN_CASIN
):
333 CASE_FLT_FN (BUILT_IN_CASINH
):
334 CASE_FLT_FN (BUILT_IN_CATAN
):
335 CASE_FLT_FN (BUILT_IN_CATANH
):
336 CASE_FLT_FN (BUILT_IN_CBRT
):
337 CASE_FLT_FN (BUILT_IN_CPROJ
):
338 CASE_FLT_FN (BUILT_IN_CSIN
):
339 CASE_FLT_FN (BUILT_IN_CSINH
):
340 CASE_FLT_FN (BUILT_IN_CTAN
):
341 CASE_FLT_FN (BUILT_IN_CTANH
):
342 CASE_FLT_FN (BUILT_IN_ERF
):
343 CASE_FLT_FN (BUILT_IN_LLROUND
):
344 CASE_FLT_FN (BUILT_IN_LROUND
):
345 CASE_FLT_FN (BUILT_IN_ROUND
):
346 CASE_FLT_FN (BUILT_IN_SIN
):
347 CASE_FLT_FN (BUILT_IN_SINH
):
348 CASE_FLT_FN (BUILT_IN_TAN
):
349 CASE_FLT_FN (BUILT_IN_TANH
):
350 CASE_FLT_FN (BUILT_IN_TRUNC
):
353 CASE_FLT_FN (BUILT_IN_LLRINT
):
354 CASE_FLT_FN (BUILT_IN_LRINT
):
355 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
356 CASE_FLT_FN (BUILT_IN_RINT
):
357 return !flag_rounding_math
;
365 /* Check whether we may negate an integer constant T without causing
369 may_negate_without_overflow_p (const_tree t
)
371 unsigned HOST_WIDE_INT val
;
375 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
377 type
= TREE_TYPE (t
);
378 if (TYPE_UNSIGNED (type
))
381 prec
= TYPE_PRECISION (type
);
382 if (prec
> HOST_BITS_PER_WIDE_INT
)
384 if (TREE_INT_CST_LOW (t
) != 0)
386 prec
-= HOST_BITS_PER_WIDE_INT
;
387 val
= TREE_INT_CST_HIGH (t
);
390 val
= TREE_INT_CST_LOW (t
);
391 if (prec
< HOST_BITS_PER_WIDE_INT
)
392 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
393 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
396 /* Determine whether an expression T can be cheaply negated using
397 the function negate_expr without introducing undefined overflow. */
400 negate_expr_p (tree t
)
407 type
= TREE_TYPE (t
);
410 switch (TREE_CODE (t
))
413 if (TYPE_OVERFLOW_WRAPS (type
))
416 /* Check that -CST will not overflow type. */
417 return may_negate_without_overflow_p (t
);
419 return (INTEGRAL_TYPE_P (type
)
420 && TYPE_OVERFLOW_WRAPS (type
));
427 /* We want to canonicalize to positive real constants. Pretend
428 that only negative ones can be easily negated. */
429 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
432 return negate_expr_p (TREE_REALPART (t
))
433 && negate_expr_p (TREE_IMAGPART (t
));
436 return negate_expr_p (TREE_OPERAND (t
, 0))
437 && negate_expr_p (TREE_OPERAND (t
, 1));
440 return negate_expr_p (TREE_OPERAND (t
, 0));
443 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
444 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
446 /* -(A + B) -> (-B) - A. */
447 if (negate_expr_p (TREE_OPERAND (t
, 1))
448 && reorder_operands_p (TREE_OPERAND (t
, 0),
449 TREE_OPERAND (t
, 1)))
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t
, 0));
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
457 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
458 && reorder_operands_p (TREE_OPERAND (t
, 0),
459 TREE_OPERAND (t
, 1));
462 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
469 return negate_expr_p (TREE_OPERAND (t
, 1))
470 || negate_expr_p (TREE_OPERAND (t
, 0));
478 /* In general we can't negate A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. But if overflow is
481 undefined, we can negate, because - (INT_MIN / 1) is an
483 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
484 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
486 return negate_expr_p (TREE_OPERAND (t
, 1))
487 || negate_expr_p (TREE_OPERAND (t
, 0));
490 /* Negate -((double)float) as (double)(-float). */
491 if (TREE_CODE (type
) == REAL_TYPE
)
493 tree tem
= strip_float_extensions (t
);
495 return negate_expr_p (tem
);
500 /* Negate -f(x) as f(-x). */
501 if (negate_mathfn_p (builtin_mathfn_code (t
)))
502 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
506 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
507 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
509 tree op1
= TREE_OPERAND (t
, 1);
510 if (TREE_INT_CST_HIGH (op1
) == 0
511 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
512 == TREE_INT_CST_LOW (op1
))
523 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
524 simplification is possible.
525 If negate_expr_p would return true for T, NULL_TREE will never be
529 fold_negate_expr (location_t loc
, tree t
)
531 tree type
= TREE_TYPE (t
);
534 switch (TREE_CODE (t
))
536 /* Convert - (~A) to A + 1. */
538 if (INTEGRAL_TYPE_P (type
))
539 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
540 build_int_cst (type
, 1));
544 tem
= fold_negate_const (t
, type
);
545 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
546 || !TYPE_OVERFLOW_TRAPS (type
))
551 tem
= fold_negate_const (t
, type
);
552 /* Two's complement FP formats, such as c4x, may overflow. */
553 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
558 tem
= fold_negate_const (t
, type
);
563 tree rpart
= negate_expr (TREE_REALPART (t
));
564 tree ipart
= negate_expr (TREE_IMAGPART (t
));
566 if ((TREE_CODE (rpart
) == REAL_CST
567 && TREE_CODE (ipart
) == REAL_CST
)
568 || (TREE_CODE (rpart
) == INTEGER_CST
569 && TREE_CODE (ipart
) == INTEGER_CST
))
570 return build_complex (type
, rpart
, ipart
);
575 if (negate_expr_p (t
))
576 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
577 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
578 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
582 if (negate_expr_p (t
))
583 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
584 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
588 return TREE_OPERAND (t
, 0);
591 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
592 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
594 /* -(A + B) -> (-B) - A. */
595 if (negate_expr_p (TREE_OPERAND (t
, 1))
596 && reorder_operands_p (TREE_OPERAND (t
, 0),
597 TREE_OPERAND (t
, 1)))
599 tem
= negate_expr (TREE_OPERAND (t
, 1));
600 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
601 tem
, TREE_OPERAND (t
, 0));
604 /* -(A + B) -> (-A) - B. */
605 if (negate_expr_p (TREE_OPERAND (t
, 0)))
607 tem
= negate_expr (TREE_OPERAND (t
, 0));
608 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
609 tem
, TREE_OPERAND (t
, 1));
615 /* - (A - B) -> B - A */
616 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
617 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
618 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
619 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
620 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
624 if (TYPE_UNSIGNED (type
))
630 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
632 tem
= TREE_OPERAND (t
, 1);
633 if (negate_expr_p (tem
))
634 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
635 TREE_OPERAND (t
, 0), negate_expr (tem
));
636 tem
= TREE_OPERAND (t
, 0);
637 if (negate_expr_p (tem
))
638 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
639 negate_expr (tem
), TREE_OPERAND (t
, 1));
648 /* In general we can't negate A / B, because if A is INT_MIN and
649 B is 1, we may turn this into INT_MIN / -1 which is undefined
650 and actually traps on some architectures. But if overflow is
651 undefined, we can negate, because - (INT_MIN / 1) is an
653 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
655 const char * const warnmsg
= G_("assuming signed overflow does not "
656 "occur when negating a division");
657 tem
= TREE_OPERAND (t
, 1);
658 if (negate_expr_p (tem
))
660 if (INTEGRAL_TYPE_P (type
)
661 && (TREE_CODE (tem
) != INTEGER_CST
662 || integer_onep (tem
)))
663 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
664 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
665 TREE_OPERAND (t
, 0), negate_expr (tem
));
667 tem
= TREE_OPERAND (t
, 0);
668 if (negate_expr_p (tem
))
670 if (INTEGRAL_TYPE_P (type
)
671 && (TREE_CODE (tem
) != INTEGER_CST
672 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
673 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
674 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
675 negate_expr (tem
), TREE_OPERAND (t
, 1));
681 /* Convert -((double)float) into (double)(-float). */
682 if (TREE_CODE (type
) == REAL_TYPE
)
684 tem
= strip_float_extensions (t
);
685 if (tem
!= t
&& negate_expr_p (tem
))
686 return fold_convert_loc (loc
, type
, negate_expr (tem
));
691 /* Negate -f(x) as f(-x). */
692 if (negate_mathfn_p (builtin_mathfn_code (t
))
693 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
697 fndecl
= get_callee_fndecl (t
);
698 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
699 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
704 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
705 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
707 tree op1
= TREE_OPERAND (t
, 1);
708 if (TREE_INT_CST_HIGH (op1
) == 0
709 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
710 == TREE_INT_CST_LOW (op1
))
712 tree ntype
= TYPE_UNSIGNED (type
)
713 ? signed_type_for (type
)
714 : unsigned_type_for (type
);
715 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
716 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
717 return fold_convert_loc (loc
, type
, temp
);
729 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
730 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
742 loc
= EXPR_LOCATION (t
);
743 type
= TREE_TYPE (t
);
746 tem
= fold_negate_expr (loc
, t
);
748 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
749 return fold_convert_loc (loc
, type
, tem
);
752 /* Split a tree IN into a constant, literal and variable parts that could be
753 combined with CODE to make IN. "constant" means an expression with
754 TREE_CONSTANT but that isn't an actual constant. CODE must be a
755 commutative arithmetic operation. Store the constant part into *CONP,
756 the literal in *LITP and return the variable part. If a part isn't
757 present, set it to null. If the tree does not decompose in this way,
758 return the entire tree as the variable part and the other parts as null.
760 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
761 case, we negate an operand that was subtracted. Except if it is a
762 literal for which we use *MINUS_LITP instead.
764 If NEGATE_P is true, we are negating all of IN, again except a literal
765 for which we use *MINUS_LITP instead.
767 If IN is itself a literal or constant, return it as appropriate.
769 Note that we do not guarantee that any of the three values will be the
770 same type as IN, but they will have the same signedness and mode. */
773 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
774 tree
*minus_litp
, int negate_p
)
782 /* Strip any conversions that don't change the machine mode or signedness. */
783 STRIP_SIGN_NOPS (in
);
785 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
786 || TREE_CODE (in
) == FIXED_CST
)
788 else if (TREE_CODE (in
) == code
789 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
790 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
791 /* We can associate addition and subtraction together (even
792 though the C standard doesn't say so) for integers because
793 the value is not affected. For reals, the value might be
794 affected, so we can't. */
795 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
796 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
798 tree op0
= TREE_OPERAND (in
, 0);
799 tree op1
= TREE_OPERAND (in
, 1);
800 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
801 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
803 /* First see if either of the operands is a literal, then a constant. */
804 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
805 || TREE_CODE (op0
) == FIXED_CST
)
806 *litp
= op0
, op0
= 0;
807 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
808 || TREE_CODE (op1
) == FIXED_CST
)
809 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
811 if (op0
!= 0 && TREE_CONSTANT (op0
))
812 *conp
= op0
, op0
= 0;
813 else if (op1
!= 0 && TREE_CONSTANT (op1
))
814 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
816 /* If we haven't dealt with either operand, this is not a case we can
817 decompose. Otherwise, VAR is either of the ones remaining, if any. */
818 if (op0
!= 0 && op1
!= 0)
823 var
= op1
, neg_var_p
= neg1_p
;
825 /* Now do any needed negations. */
827 *minus_litp
= *litp
, *litp
= 0;
829 *conp
= negate_expr (*conp
);
831 var
= negate_expr (var
);
833 else if (TREE_CONSTANT (in
))
841 *minus_litp
= *litp
, *litp
= 0;
842 else if (*minus_litp
)
843 *litp
= *minus_litp
, *minus_litp
= 0;
844 *conp
= negate_expr (*conp
);
845 var
= negate_expr (var
);
851 /* Re-associate trees split by the above function. T1 and T2 are
852 either expressions to associate or null. Return the new
853 expression, if any. LOC is the location of the new expression. If
854 we build an operation, do it in TYPE and with CODE. */
857 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
864 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
865 try to fold this since we will have infinite recursion. But do
866 deal with any NEGATE_EXPRs. */
867 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
868 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
870 if (code
== PLUS_EXPR
)
872 if (TREE_CODE (t1
) == NEGATE_EXPR
)
873 return build2_loc (loc
, MINUS_EXPR
, type
,
874 fold_convert_loc (loc
, type
, t2
),
875 fold_convert_loc (loc
, type
,
876 TREE_OPERAND (t1
, 0)));
877 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
878 return build2_loc (loc
, MINUS_EXPR
, type
,
879 fold_convert_loc (loc
, type
, t1
),
880 fold_convert_loc (loc
, type
,
881 TREE_OPERAND (t2
, 0)));
882 else if (integer_zerop (t2
))
883 return fold_convert_loc (loc
, type
, t1
);
885 else if (code
== MINUS_EXPR
)
887 if (integer_zerop (t2
))
888 return fold_convert_loc (loc
, type
, t1
);
891 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
892 fold_convert_loc (loc
, type
, t2
));
895 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
896 fold_convert_loc (loc
, type
, t2
));
899 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
900 for use in int_const_binop, size_binop and size_diffop. */
903 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
905 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
907 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
922 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
923 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
924 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
928 /* Combine two integer constants ARG1 and ARG2 under operation CODE
929 to produce a new constant. Return NULL_TREE if we don't know how
930 to evaluate CODE at compile-time.
932 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
935 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
, int notrunc
)
937 double_int op1
, op2
, res
, tmp
;
939 tree type
= TREE_TYPE (arg1
);
940 bool uns
= TYPE_UNSIGNED (type
);
942 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
943 bool overflow
= false;
945 op1
= tree_to_double_int (arg1
);
946 op2
= tree_to_double_int (arg2
);
951 res
= double_int_ior (op1
, op2
);
955 res
= double_int_xor (op1
, op2
);
959 res
= double_int_and (op1
, op2
);
963 res
= double_int_rshift (op1
, double_int_to_shwi (op2
),
964 TYPE_PRECISION (type
), !uns
);
968 /* It's unclear from the C standard whether shifts can overflow.
969 The following code ignores overflow; perhaps a C standard
970 interpretation ruling is needed. */
971 res
= double_int_lshift (op1
, double_int_to_shwi (op2
),
972 TYPE_PRECISION (type
), !uns
);
976 res
= double_int_rrotate (op1
, double_int_to_shwi (op2
),
977 TYPE_PRECISION (type
));
981 res
= double_int_lrotate (op1
, double_int_to_shwi (op2
),
982 TYPE_PRECISION (type
));
986 overflow
= add_double (op1
.low
, op1
.high
, op2
.low
, op2
.high
,
987 &res
.low
, &res
.high
);
991 neg_double (op2
.low
, op2
.high
, &res
.low
, &res
.high
);
992 add_double (op1
.low
, op1
.high
, res
.low
, res
.high
,
993 &res
.low
, &res
.high
);
994 overflow
= OVERFLOW_SUM_SIGN (res
.high
, op2
.high
, op1
.high
);
998 overflow
= mul_double (op1
.low
, op1
.high
, op2
.low
, op2
.high
,
999 &res
.low
, &res
.high
);
1002 case TRUNC_DIV_EXPR
:
1003 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1004 case EXACT_DIV_EXPR
:
1005 /* This is a shortcut for a common special case. */
1006 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1007 && !TREE_OVERFLOW (arg1
)
1008 && !TREE_OVERFLOW (arg2
)
1009 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1011 if (code
== CEIL_DIV_EXPR
)
1012 op1
.low
+= op2
.low
- 1;
1014 res
.low
= op1
.low
/ op2
.low
, res
.high
= 0;
1018 /* ... fall through ... */
1020 case ROUND_DIV_EXPR
:
1021 if (double_int_zero_p (op2
))
1023 if (double_int_one_p (op2
))
1028 if (double_int_equal_p (op1
, op2
)
1029 && ! double_int_zero_p (op1
))
1031 res
= double_int_one
;
1034 overflow
= div_and_round_double (code
, uns
,
1035 op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1036 &res
.low
, &res
.high
,
1037 &tmp
.low
, &tmp
.high
);
1040 case TRUNC_MOD_EXPR
:
1041 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1042 /* This is a shortcut for a common special case. */
1043 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1044 && !TREE_OVERFLOW (arg1
)
1045 && !TREE_OVERFLOW (arg2
)
1046 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1048 if (code
== CEIL_MOD_EXPR
)
1049 op1
.low
+= op2
.low
- 1;
1050 res
.low
= op1
.low
% op2
.low
, res
.high
= 0;
1054 /* ... fall through ... */
1056 case ROUND_MOD_EXPR
:
1057 if (double_int_zero_p (op2
))
1059 overflow
= div_and_round_double (code
, uns
,
1060 op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1061 &tmp
.low
, &tmp
.high
,
1062 &res
.low
, &res
.high
);
1066 res
= double_int_min (op1
, op2
, uns
);
1070 res
= double_int_max (op1
, op2
, uns
);
1079 t
= build_int_cst_wide (TREE_TYPE (arg1
), res
.low
, res
.high
);
1081 /* Propagate overflow flags ourselves. */
1082 if (((!uns
|| is_sizetype
) && overflow
)
1083 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1086 TREE_OVERFLOW (t
) = 1;
1090 t
= force_fit_type_double (TREE_TYPE (arg1
), res
, 1,
1091 ((!uns
|| is_sizetype
) && overflow
)
1092 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1097 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1098 constant. We assume ARG1 and ARG2 have the same data type, or at least
1099 are the same kind of constant and the same machine mode. Return zero if
1100 combining the constants is not allowed in the current operating mode. */
1103 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1105 /* Sanity check for the recursive cases. */
1112 if (TREE_CODE (arg1
) == INTEGER_CST
)
1113 return int_const_binop (code
, arg1
, arg2
, 0);
1115 if (TREE_CODE (arg1
) == REAL_CST
)
1117 enum machine_mode mode
;
1120 REAL_VALUE_TYPE value
;
1121 REAL_VALUE_TYPE result
;
1125 /* The following codes are handled by real_arithmetic. */
1140 d1
= TREE_REAL_CST (arg1
);
1141 d2
= TREE_REAL_CST (arg2
);
1143 type
= TREE_TYPE (arg1
);
1144 mode
= TYPE_MODE (type
);
1146 /* Don't perform operation if we honor signaling NaNs and
1147 either operand is a NaN. */
1148 if (HONOR_SNANS (mode
)
1149 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1152 /* Don't perform operation if it would raise a division
1153 by zero exception. */
1154 if (code
== RDIV_EXPR
1155 && REAL_VALUES_EQUAL (d2
, dconst0
)
1156 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1159 /* If either operand is a NaN, just return it. Otherwise, set up
1160 for floating-point trap; we return an overflow. */
1161 if (REAL_VALUE_ISNAN (d1
))
1163 else if (REAL_VALUE_ISNAN (d2
))
1166 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1167 real_convert (&result
, mode
, &value
);
1169 /* Don't constant fold this floating point operation if
1170 the result has overflowed and flag_trapping_math. */
1171 if (flag_trapping_math
1172 && MODE_HAS_INFINITIES (mode
)
1173 && REAL_VALUE_ISINF (result
)
1174 && !REAL_VALUE_ISINF (d1
)
1175 && !REAL_VALUE_ISINF (d2
))
1178 /* Don't constant fold this floating point operation if the
1179 result may dependent upon the run-time rounding mode and
1180 flag_rounding_math is set, or if GCC's software emulation
1181 is unable to accurately represent the result. */
1182 if ((flag_rounding_math
1183 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1184 && (inexact
|| !real_identical (&result
, &value
)))
1187 t
= build_real (type
, result
);
1189 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1193 if (TREE_CODE (arg1
) == FIXED_CST
)
1195 FIXED_VALUE_TYPE f1
;
1196 FIXED_VALUE_TYPE f2
;
1197 FIXED_VALUE_TYPE result
;
1202 /* The following codes are handled by fixed_arithmetic. */
1208 case TRUNC_DIV_EXPR
:
1209 f2
= TREE_FIXED_CST (arg2
);
1214 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1215 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1223 f1
= TREE_FIXED_CST (arg1
);
1224 type
= TREE_TYPE (arg1
);
1225 sat_p
= TYPE_SATURATING (type
);
1226 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1227 t
= build_fixed (type
, result
);
1228 /* Propagate overflow flags. */
1229 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1230 TREE_OVERFLOW (t
) = 1;
1234 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1236 tree type
= TREE_TYPE (arg1
);
1237 tree r1
= TREE_REALPART (arg1
);
1238 tree i1
= TREE_IMAGPART (arg1
);
1239 tree r2
= TREE_REALPART (arg2
);
1240 tree i2
= TREE_IMAGPART (arg2
);
1247 real
= const_binop (code
, r1
, r2
);
1248 imag
= const_binop (code
, i1
, i2
);
1252 if (COMPLEX_FLOAT_TYPE_P (type
))
1253 return do_mpc_arg2 (arg1
, arg2
, type
,
1254 /* do_nonfinite= */ folding_initializer
,
1257 real
= const_binop (MINUS_EXPR
,
1258 const_binop (MULT_EXPR
, r1
, r2
),
1259 const_binop (MULT_EXPR
, i1
, i2
));
1260 imag
= const_binop (PLUS_EXPR
,
1261 const_binop (MULT_EXPR
, r1
, i2
),
1262 const_binop (MULT_EXPR
, i1
, r2
));
1266 if (COMPLEX_FLOAT_TYPE_P (type
))
1267 return do_mpc_arg2 (arg1
, arg2
, type
,
1268 /* do_nonfinite= */ folding_initializer
,
1271 case TRUNC_DIV_EXPR
:
1273 case FLOOR_DIV_EXPR
:
1274 case ROUND_DIV_EXPR
:
1275 if (flag_complex_method
== 0)
1277 /* Keep this algorithm in sync with
1278 tree-complex.c:expand_complex_div_straight().
1280 Expand complex division to scalars, straightforward algorithm.
1281 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1285 = const_binop (PLUS_EXPR
,
1286 const_binop (MULT_EXPR
, r2
, r2
),
1287 const_binop (MULT_EXPR
, i2
, i2
));
1289 = const_binop (PLUS_EXPR
,
1290 const_binop (MULT_EXPR
, r1
, r2
),
1291 const_binop (MULT_EXPR
, i1
, i2
));
1293 = const_binop (MINUS_EXPR
,
1294 const_binop (MULT_EXPR
, i1
, r2
),
1295 const_binop (MULT_EXPR
, r1
, i2
));
1297 real
= const_binop (code
, t1
, magsquared
);
1298 imag
= const_binop (code
, t2
, magsquared
);
1302 /* Keep this algorithm in sync with
1303 tree-complex.c:expand_complex_div_wide().
1305 Expand complex division to scalars, modified algorithm to minimize
1306 overflow with wide input ranges. */
1307 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1308 fold_abs_const (r2
, TREE_TYPE (type
)),
1309 fold_abs_const (i2
, TREE_TYPE (type
)));
1311 if (integer_nonzerop (compare
))
1313 /* In the TRUE branch, we compute
1315 div = (br * ratio) + bi;
1316 tr = (ar * ratio) + ai;
1317 ti = (ai * ratio) - ar;
1320 tree ratio
= const_binop (code
, r2
, i2
);
1321 tree div
= const_binop (PLUS_EXPR
, i2
,
1322 const_binop (MULT_EXPR
, r2
, ratio
));
1323 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1324 real
= const_binop (PLUS_EXPR
, real
, i1
);
1325 real
= const_binop (code
, real
, div
);
1327 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1328 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1329 imag
= const_binop (code
, imag
, div
);
1333 /* In the FALSE branch, we compute
1335 divisor = (d * ratio) + c;
1336 tr = (b * ratio) + a;
1337 ti = b - (a * ratio);
1340 tree ratio
= const_binop (code
, i2
, r2
);
1341 tree div
= const_binop (PLUS_EXPR
, r2
,
1342 const_binop (MULT_EXPR
, i2
, ratio
));
1344 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1345 real
= const_binop (PLUS_EXPR
, real
, r1
);
1346 real
= const_binop (code
, real
, div
);
1348 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1349 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1350 imag
= const_binop (code
, imag
, div
);
1360 return build_complex (type
, real
, imag
);
1363 if (TREE_CODE (arg1
) == VECTOR_CST
)
1365 tree type
= TREE_TYPE(arg1
);
1366 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1367 tree elements1
, elements2
, list
= NULL_TREE
;
1369 if(TREE_CODE(arg2
) != VECTOR_CST
)
1372 elements1
= TREE_VECTOR_CST_ELTS (arg1
);
1373 elements2
= TREE_VECTOR_CST_ELTS (arg2
);
1375 for (i
= 0; i
< count
; i
++)
1377 tree elem1
, elem2
, elem
;
1379 /* The trailing elements can be empty and should be treated as 0 */
1381 elem1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1384 elem1
= TREE_VALUE(elements1
);
1385 elements1
= TREE_CHAIN (elements1
);
1389 elem2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1392 elem2
= TREE_VALUE(elements2
);
1393 elements2
= TREE_CHAIN (elements2
);
1396 elem
= const_binop (code
, elem1
, elem2
);
1398 /* It is possible that const_binop cannot handle the given
1399 code and return NULL_TREE */
1400 if(elem
== NULL_TREE
)
1403 list
= tree_cons (NULL_TREE
, elem
, list
);
1405 return build_vector(type
, nreverse(list
));
1410 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1411 indicates which particular sizetype to create. */
1414 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1416 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1419 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1420 is a tree code. The type of the result is taken from the operands.
1421 Both must be equivalent integer types, ala int_binop_types_match_p.
1422 If the operands are constant, so is the result. */
1425 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1427 tree type
= TREE_TYPE (arg0
);
1429 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1430 return error_mark_node
;
1432 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1435 /* Handle the special case of two integer constants faster. */
1436 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1438 /* And some specific cases even faster than that. */
1439 if (code
== PLUS_EXPR
)
1441 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1443 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1446 else if (code
== MINUS_EXPR
)
1448 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1451 else if (code
== MULT_EXPR
)
1453 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1457 /* Handle general case of two integer constants. */
1458 return int_const_binop (code
, arg0
, arg1
, 0);
1461 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1464 /* Given two values, either both of sizetype or both of bitsizetype,
1465 compute the difference between the two values. Return the value
1466 in signed type corresponding to the type of the operands. */
1469 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1471 tree type
= TREE_TYPE (arg0
);
1474 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1477 /* If the type is already signed, just do the simple thing. */
1478 if (!TYPE_UNSIGNED (type
))
1479 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1481 if (type
== sizetype
)
1483 else if (type
== bitsizetype
)
1484 ctype
= sbitsizetype
;
1486 ctype
= signed_type_for (type
);
1488 /* If either operand is not a constant, do the conversions to the signed
1489 type and subtract. The hardware will do the right thing with any
1490 overflow in the subtraction. */
1491 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1492 return size_binop_loc (loc
, MINUS_EXPR
,
1493 fold_convert_loc (loc
, ctype
, arg0
),
1494 fold_convert_loc (loc
, ctype
, arg1
));
1496 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1497 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1498 overflow) and negate (which can't either). Special-case a result
1499 of zero while we're here. */
1500 if (tree_int_cst_equal (arg0
, arg1
))
1501 return build_int_cst (ctype
, 0);
1502 else if (tree_int_cst_lt (arg1
, arg0
))
1503 return fold_convert_loc (loc
, ctype
,
1504 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1506 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1507 fold_convert_loc (loc
, ctype
,
1508 size_binop_loc (loc
,
1513 /* A subroutine of fold_convert_const handling conversions of an
1514 INTEGER_CST to another integer type. */
1517 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1521 /* Given an integer constant, make new constant with new type,
1522 appropriately sign-extended or truncated. */
1523 t
= force_fit_type_double (type
, tree_to_double_int (arg1
),
1524 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1525 (TREE_INT_CST_HIGH (arg1
) < 0
1526 && (TYPE_UNSIGNED (type
)
1527 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1528 | TREE_OVERFLOW (arg1
));
1533 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1534 to an integer type. */
1537 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1542 /* The following code implements the floating point to integer
1543 conversion rules required by the Java Language Specification,
1544 that IEEE NaNs are mapped to zero and values that overflow
1545 the target precision saturate, i.e. values greater than
1546 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1547 are mapped to INT_MIN. These semantics are allowed by the
1548 C and C++ standards that simply state that the behavior of
1549 FP-to-integer conversion is unspecified upon overflow. */
1553 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1557 case FIX_TRUNC_EXPR
:
1558 real_trunc (&r
, VOIDmode
, &x
);
1565 /* If R is NaN, return zero and show we have an overflow. */
1566 if (REAL_VALUE_ISNAN (r
))
1569 val
= double_int_zero
;
1572 /* See if R is less than the lower bound or greater than the
1577 tree lt
= TYPE_MIN_VALUE (type
);
1578 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1579 if (REAL_VALUES_LESS (r
, l
))
1582 val
= tree_to_double_int (lt
);
1588 tree ut
= TYPE_MAX_VALUE (type
);
1591 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1592 if (REAL_VALUES_LESS (u
, r
))
1595 val
= tree_to_double_int (ut
);
1601 real_to_integer2 ((HOST_WIDE_INT
*) &val
.low
, &val
.high
, &r
);
1603 t
= force_fit_type_double (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1607 /* A subroutine of fold_convert_const handling conversions of a
1608 FIXED_CST to an integer type. */
1611 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1614 double_int temp
, temp_trunc
;
1617 /* Right shift FIXED_CST to temp by fbit. */
1618 temp
= TREE_FIXED_CST (arg1
).data
;
1619 mode
= TREE_FIXED_CST (arg1
).mode
;
1620 if (GET_MODE_FBIT (mode
) < 2 * HOST_BITS_PER_WIDE_INT
)
1622 temp
= double_int_rshift (temp
, GET_MODE_FBIT (mode
),
1623 HOST_BITS_PER_DOUBLE_INT
,
1624 SIGNED_FIXED_POINT_MODE_P (mode
));
1626 /* Left shift temp to temp_trunc by fbit. */
1627 temp_trunc
= double_int_lshift (temp
, GET_MODE_FBIT (mode
),
1628 HOST_BITS_PER_DOUBLE_INT
,
1629 SIGNED_FIXED_POINT_MODE_P (mode
));
1633 temp
= double_int_zero
;
1634 temp_trunc
= double_int_zero
;
1637 /* If FIXED_CST is negative, we need to round the value toward 0.
1638 By checking if the fractional bits are not zero to add 1 to temp. */
1639 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1640 && double_int_negative_p (temp_trunc
)
1641 && !double_int_equal_p (TREE_FIXED_CST (arg1
).data
, temp_trunc
))
1642 temp
= double_int_add (temp
, double_int_one
);
1644 /* Given a fixed-point constant, make new constant with new type,
1645 appropriately sign-extended or truncated. */
1646 t
= force_fit_type_double (type
, temp
, -1,
1647 (double_int_negative_p (temp
)
1648 && (TYPE_UNSIGNED (type
)
1649 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1650 | TREE_OVERFLOW (arg1
));
1655 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1656 to another floating point type. */
1659 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1661 REAL_VALUE_TYPE value
;
1664 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1665 t
= build_real (type
, value
);
1667 /* If converting an infinity or NAN to a representation that doesn't
1668 have one, set the overflow bit so that we can produce some kind of
1669 error message at the appropriate point if necessary. It's not the
1670 most user-friendly message, but it's better than nothing. */
1671 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1672 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1673 TREE_OVERFLOW (t
) = 1;
1674 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1675 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1676 TREE_OVERFLOW (t
) = 1;
1677 /* Regular overflow, conversion produced an infinity in a mode that
1678 can't represent them. */
1679 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1680 && REAL_VALUE_ISINF (value
)
1681 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1682 TREE_OVERFLOW (t
) = 1;
1684 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1688 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1689 to a floating point type. */
1692 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1694 REAL_VALUE_TYPE value
;
1697 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1698 t
= build_real (type
, value
);
1700 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1704 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1705 to another fixed-point type. */
1708 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1710 FIXED_VALUE_TYPE value
;
1714 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1715 TYPE_SATURATING (type
));
1716 t
= build_fixed (type
, value
);
1718 /* Propagate overflow flags. */
1719 if (overflow_p
| TREE_OVERFLOW (arg1
))
1720 TREE_OVERFLOW (t
) = 1;
1724 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1725 to a fixed-point type. */
1728 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1730 FIXED_VALUE_TYPE value
;
1734 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
1735 TREE_INT_CST (arg1
),
1736 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1737 TYPE_SATURATING (type
));
1738 t
= build_fixed (type
, value
);
1740 /* Propagate overflow flags. */
1741 if (overflow_p
| TREE_OVERFLOW (arg1
))
1742 TREE_OVERFLOW (t
) = 1;
1746 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1747 to a fixed-point type. */
1750 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1752 FIXED_VALUE_TYPE value
;
1756 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1757 &TREE_REAL_CST (arg1
),
1758 TYPE_SATURATING (type
));
1759 t
= build_fixed (type
, value
);
1761 /* Propagate overflow flags. */
1762 if (overflow_p
| TREE_OVERFLOW (arg1
))
1763 TREE_OVERFLOW (t
) = 1;
1767 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1768 type TYPE. If no simplification can be done return NULL_TREE. */
1771 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1773 if (TREE_TYPE (arg1
) == type
)
1776 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1777 || TREE_CODE (type
) == OFFSET_TYPE
)
1779 if (TREE_CODE (arg1
) == INTEGER_CST
)
1780 return fold_convert_const_int_from_int (type
, arg1
);
1781 else if (TREE_CODE (arg1
) == REAL_CST
)
1782 return fold_convert_const_int_from_real (code
, type
, arg1
);
1783 else if (TREE_CODE (arg1
) == FIXED_CST
)
1784 return fold_convert_const_int_from_fixed (type
, arg1
);
1786 else if (TREE_CODE (type
) == REAL_TYPE
)
1788 if (TREE_CODE (arg1
) == INTEGER_CST
)
1789 return build_real_from_int_cst (type
, arg1
);
1790 else if (TREE_CODE (arg1
) == REAL_CST
)
1791 return fold_convert_const_real_from_real (type
, arg1
);
1792 else if (TREE_CODE (arg1
) == FIXED_CST
)
1793 return fold_convert_const_real_from_fixed (type
, arg1
);
1795 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1797 if (TREE_CODE (arg1
) == FIXED_CST
)
1798 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1799 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1800 return fold_convert_const_fixed_from_int (type
, arg1
);
1801 else if (TREE_CODE (arg1
) == REAL_CST
)
1802 return fold_convert_const_fixed_from_real (type
, arg1
);
1807 /* Construct a vector of zero elements of vector type TYPE. */
1810 build_zero_vector (tree type
)
1814 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1815 return build_vector_from_val (type
, t
);
1818 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1821 fold_convertible_p (const_tree type
, const_tree arg
)
1823 tree orig
= TREE_TYPE (arg
);
1828 if (TREE_CODE (arg
) == ERROR_MARK
1829 || TREE_CODE (type
) == ERROR_MARK
1830 || TREE_CODE (orig
) == ERROR_MARK
)
1833 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1836 switch (TREE_CODE (type
))
1838 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1839 case POINTER_TYPE
: case REFERENCE_TYPE
:
1841 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1842 || TREE_CODE (orig
) == OFFSET_TYPE
)
1844 return (TREE_CODE (orig
) == VECTOR_TYPE
1845 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1848 case FIXED_POINT_TYPE
:
1852 return TREE_CODE (type
) == TREE_CODE (orig
);
1859 /* Convert expression ARG to type TYPE. Used by the middle-end for
1860 simple conversions in preference to calling the front-end's convert. */
1863 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1865 tree orig
= TREE_TYPE (arg
);
1871 if (TREE_CODE (arg
) == ERROR_MARK
1872 || TREE_CODE (type
) == ERROR_MARK
1873 || TREE_CODE (orig
) == ERROR_MARK
)
1874 return error_mark_node
;
1876 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1877 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1879 switch (TREE_CODE (type
))
1882 case REFERENCE_TYPE
:
1883 /* Handle conversions between pointers to different address spaces. */
1884 if (POINTER_TYPE_P (orig
)
1885 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1886 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1887 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1890 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1892 if (TREE_CODE (arg
) == INTEGER_CST
)
1894 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1895 if (tem
!= NULL_TREE
)
1898 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1899 || TREE_CODE (orig
) == OFFSET_TYPE
)
1900 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1901 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1902 return fold_convert_loc (loc
, type
,
1903 fold_build1_loc (loc
, REALPART_EXPR
,
1904 TREE_TYPE (orig
), arg
));
1905 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1906 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1907 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1910 if (TREE_CODE (arg
) == INTEGER_CST
)
1912 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1913 if (tem
!= NULL_TREE
)
1916 else if (TREE_CODE (arg
) == REAL_CST
)
1918 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1919 if (tem
!= NULL_TREE
)
1922 else if (TREE_CODE (arg
) == FIXED_CST
)
1924 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1925 if (tem
!= NULL_TREE
)
1929 switch (TREE_CODE (orig
))
1932 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1933 case POINTER_TYPE
: case REFERENCE_TYPE
:
1934 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1937 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1939 case FIXED_POINT_TYPE
:
1940 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1943 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1944 return fold_convert_loc (loc
, type
, tem
);
1950 case FIXED_POINT_TYPE
:
1951 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1952 || TREE_CODE (arg
) == REAL_CST
)
1954 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1955 if (tem
!= NULL_TREE
)
1956 goto fold_convert_exit
;
1959 switch (TREE_CODE (orig
))
1961 case FIXED_POINT_TYPE
:
1966 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1969 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1970 return fold_convert_loc (loc
, type
, tem
);
1977 switch (TREE_CODE (orig
))
1980 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1981 case POINTER_TYPE
: case REFERENCE_TYPE
:
1983 case FIXED_POINT_TYPE
:
1984 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
1985 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
1986 fold_convert_loc (loc
, TREE_TYPE (type
),
1987 integer_zero_node
));
1992 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1994 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1995 TREE_OPERAND (arg
, 0));
1996 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1997 TREE_OPERAND (arg
, 1));
1998 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2001 arg
= save_expr (arg
);
2002 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
2003 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
2004 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
2005 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
2006 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2014 if (integer_zerop (arg
))
2015 return build_zero_vector (type
);
2016 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2017 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2018 || TREE_CODE (orig
) == VECTOR_TYPE
);
2019 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2022 tem
= fold_ignored_result (arg
);
2023 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2029 protected_set_expr_location_unshare (tem
, loc
);
2033 /* Return false if expr can be assumed not to be an lvalue, true
2037 maybe_lvalue_p (const_tree x
)
2039 /* We only need to wrap lvalue tree codes. */
2040 switch (TREE_CODE (x
))
2053 case ARRAY_RANGE_REF
:
2059 case PREINCREMENT_EXPR
:
2060 case PREDECREMENT_EXPR
:
2062 case TRY_CATCH_EXPR
:
2063 case WITH_CLEANUP_EXPR
:
2072 /* Assume the worst for front-end tree codes. */
2073 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2081 /* Return an expr equal to X but certainly not valid as an lvalue. */
2084 non_lvalue_loc (location_t loc
, tree x
)
2086 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2091 if (! maybe_lvalue_p (x
))
2093 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2096 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2097 Zero means allow extended lvalues. */
2099 int pedantic_lvalues
;
2101 /* When pedantic, return an expr equal to X but certainly not valid as a
2102 pedantic lvalue. Otherwise, return X. */
2105 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2107 if (pedantic_lvalues
)
2108 return non_lvalue_loc (loc
, x
);
2110 return protected_set_expr_location_unshare (x
, loc
);
2113 /* Given a tree comparison code, return the code that is the logical inverse
2114 of the given code. It is not safe to do this for floating-point
2115 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2116 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2119 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2121 if (honor_nans
&& flag_trapping_math
)
2131 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2133 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2135 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2137 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2151 return UNORDERED_EXPR
;
2152 case UNORDERED_EXPR
:
2153 return ORDERED_EXPR
;
2159 /* Similar, but return the comparison that results if the operands are
2160 swapped. This is safe for floating-point. */
2163 swap_tree_comparison (enum tree_code code
)
2170 case UNORDERED_EXPR
:
2196 /* Convert a comparison tree code from an enum tree_code representation
2197 into a compcode bit-based encoding. This function is the inverse of
2198 compcode_to_comparison. */
2200 static enum comparison_code
2201 comparison_to_compcode (enum tree_code code
)
2218 return COMPCODE_ORD
;
2219 case UNORDERED_EXPR
:
2220 return COMPCODE_UNORD
;
2222 return COMPCODE_UNLT
;
2224 return COMPCODE_UNEQ
;
2226 return COMPCODE_UNLE
;
2228 return COMPCODE_UNGT
;
2230 return COMPCODE_LTGT
;
2232 return COMPCODE_UNGE
;
2238 /* Convert a compcode bit-based encoding of a comparison operator back
2239 to GCC's enum tree_code representation. This function is the
2240 inverse of comparison_to_compcode. */
2242 static enum tree_code
2243 compcode_to_comparison (enum comparison_code code
)
2260 return ORDERED_EXPR
;
2261 case COMPCODE_UNORD
:
2262 return UNORDERED_EXPR
;
2280 /* Return a tree for the comparison which is the combination of
2281 doing the AND or OR (depending on CODE) of the two operations LCODE
2282 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2283 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2284 if this makes the transformation invalid. */
2287 combine_comparisons (location_t loc
,
2288 enum tree_code code
, enum tree_code lcode
,
2289 enum tree_code rcode
, tree truth_type
,
2290 tree ll_arg
, tree lr_arg
)
2292 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2293 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2294 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2299 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2300 compcode
= lcompcode
& rcompcode
;
2303 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2304 compcode
= lcompcode
| rcompcode
;
2313 /* Eliminate unordered comparisons, as well as LTGT and ORD
2314 which are not used unless the mode has NaNs. */
2315 compcode
&= ~COMPCODE_UNORD
;
2316 if (compcode
== COMPCODE_LTGT
)
2317 compcode
= COMPCODE_NE
;
2318 else if (compcode
== COMPCODE_ORD
)
2319 compcode
= COMPCODE_TRUE
;
2321 else if (flag_trapping_math
)
2323 /* Check that the original operation and the optimized ones will trap
2324 under the same condition. */
2325 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2326 && (lcompcode
!= COMPCODE_EQ
)
2327 && (lcompcode
!= COMPCODE_ORD
);
2328 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2329 && (rcompcode
!= COMPCODE_EQ
)
2330 && (rcompcode
!= COMPCODE_ORD
);
2331 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2332 && (compcode
!= COMPCODE_EQ
)
2333 && (compcode
!= COMPCODE_ORD
);
2335 /* In a short-circuited boolean expression the LHS might be
2336 such that the RHS, if evaluated, will never trap. For
2337 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2338 if neither x nor y is NaN. (This is a mixed blessing: for
2339 example, the expression above will never trap, hence
2340 optimizing it to x < y would be invalid). */
2341 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2342 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2345 /* If the comparison was short-circuited, and only the RHS
2346 trapped, we may now generate a spurious trap. */
2348 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2351 /* If we changed the conditions that cause a trap, we lose. */
2352 if ((ltrap
|| rtrap
) != trap
)
2356 if (compcode
== COMPCODE_TRUE
)
2357 return constant_boolean_node (true, truth_type
);
2358 else if (compcode
== COMPCODE_FALSE
)
2359 return constant_boolean_node (false, truth_type
);
2362 enum tree_code tcode
;
2364 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2365 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2369 /* Return nonzero if two operands (typically of the same tree node)
2370 are necessarily equal. If either argument has side-effects this
2371 function returns zero. FLAGS modifies behavior as follows:
2373 If OEP_ONLY_CONST is set, only return nonzero for constants.
2374 This function tests whether the operands are indistinguishable;
2375 it does not test whether they are equal using C's == operation.
2376 The distinction is important for IEEE floating point, because
2377 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2378 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2380 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2381 even though it may hold multiple values during a function.
2382 This is because a GCC tree node guarantees that nothing else is
2383 executed between the evaluation of its "operands" (which may often
2384 be evaluated in arbitrary order). Hence if the operands themselves
2385 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2386 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2387 unset means assuming isochronic (or instantaneous) tree equivalence.
2388 Unless comparing arbitrary expression trees, such as from different
2389 statements, this flag can usually be left unset.
2391 If OEP_PURE_SAME is set, then pure functions with identical arguments
2392 are considered the same. It is used when the caller has other ways
2393 to ensure that global memory is unchanged in between.
2395 If OEP_ALLOW_NULL is set, this routine will not crash on NULL operands,
2396 and two NULL operands are considered equal. This flag is usually set
2397 in the context of frontend when ARG0 and/or ARG1 may be NULL mostly due
2398 to recursion on partially built expressions (e.g. a CAST_EXPR on a NULL
2399 tree.) In this case, we certainly don't want the compiler to crash and
2400 it's OK to consider two NULL operands equal. On the other hand, when
2401 called in the context of code generation and optimization, if NULL
2402 operands are not expected, silently ignoring them could be dangerous
2403 and might cause problems downstream that are hard to find/debug. In that
2404 case, the flag should probably not be set. */
2407 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2409 /* If either is NULL, they must be both NULL to be equal. We only do this
2410 check when OEP_ALLOW_NULL is set. */
2411 if ((flags
& OEP_ALLOW_NULL
) && (!arg0
|| !arg1
))
2412 return arg0
== arg1
;
2414 /* If either is ERROR_MARK, they aren't equal. */
2415 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2416 || TREE_TYPE (arg0
) == error_mark_node
2417 || TREE_TYPE (arg1
) == error_mark_node
)
2420 /* Similar, if either does not have a type (like a released SSA name),
2421 they aren't equal. */
2422 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2424 /* If the caller chooses to allow the comparison of operands without
2425 types, we will continue the comparison only when both of them don't
2427 if (!(flags
& OEP_ALLOW_NO_TYPE
) || TREE_TYPE (arg0
) || TREE_TYPE (arg1
))
2431 /* Check equality of integer constants before bailing out due to
2432 precision differences. */
2433 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2434 return tree_int_cst_equal (arg0
, arg1
);
2436 /* If both types don't have the same signedness, then we can't consider
2437 them equal. We must check this before the STRIP_NOPS calls
2438 because they may change the signedness of the arguments. As pointers
2439 strictly don't have a signedness, require either two pointers or
2440 two non-pointers as well. */
2441 if (TREE_TYPE (arg0
)
2442 && (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2443 || POINTER_TYPE_P (TREE_TYPE (arg0
))
2444 != POINTER_TYPE_P (TREE_TYPE (arg1
))))
2447 /* We cannot consider pointers to different address space equal. */
2448 if (TREE_TYPE (arg0
)
2449 && (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2450 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2451 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
))))))
2454 /* If both types don't have the same precision, then it is not safe
2456 if (TREE_TYPE (arg0
)
2457 && (TYPE_PRECISION (TREE_TYPE (arg0
))
2458 != TYPE_PRECISION (TREE_TYPE (arg1
))))
2464 /* In case both args are comparisons but with different comparison
2465 code, try to swap the comparison operands of one arg to produce
2466 a match and compare that variant. */
2467 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2468 && COMPARISON_CLASS_P (arg0
)
2469 && COMPARISON_CLASS_P (arg1
))
2471 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2473 if (TREE_CODE (arg0
) == swap_code
)
2474 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2475 TREE_OPERAND (arg1
, 1), flags
)
2476 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2477 TREE_OPERAND (arg1
, 0), flags
);
2480 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2481 /* This is needed for conversions and for COMPONENT_REF.
2482 Might as well play it safe and always test this. */
2483 || (TREE_TYPE (arg0
)
2484 && (TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2485 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2486 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))))
2489 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2490 We don't care about side effects in that case because the SAVE_EXPR
2491 takes care of that for us. In all other cases, two expressions are
2492 equal if they have no side effects. If we have two identical
2493 expressions with side effects that should be treated the same due
2494 to the only side effects being identical SAVE_EXPR's, that will
2495 be detected in the recursive calls below. */
2496 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2497 && (TREE_CODE (arg0
) == SAVE_EXPR
2498 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2501 /* Next handle constant cases, those for which we can return 1 even
2502 if ONLY_CONST is set. */
2503 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2504 switch (TREE_CODE (arg0
))
2507 return tree_int_cst_equal (arg0
, arg1
);
2510 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2511 TREE_FIXED_CST (arg1
));
2514 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2515 TREE_REAL_CST (arg1
)))
2519 if (TREE_TYPE (arg0
)
2520 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2522 /* If we do not distinguish between signed and unsigned zero,
2523 consider them equal. */
2524 if (real_zerop (arg0
) && real_zerop (arg1
))
2533 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2534 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2537 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2540 v1
= TREE_CHAIN (v1
);
2541 v2
= TREE_CHAIN (v2
);
2548 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2550 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2554 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2555 && ! memcmp (TREE_STRING_POINTER (arg0
),
2556 TREE_STRING_POINTER (arg1
),
2557 TREE_STRING_LENGTH (arg0
)));
2560 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2566 if (flags
& OEP_ONLY_CONST
)
2569 /* Define macros to test an operand from arg0 and arg1 for equality and a
2570 variant that allows null and views null as being different from any
2571 non-null value. In the latter case, if either is null, the both
2572 must be; otherwise, do the normal comparison. */
2573 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2574 TREE_OPERAND (arg1, N), flags)
2576 #define OP_SAME_WITH_NULL(N) \
2577 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2578 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2580 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2583 /* Two conversions are equal only if signedness and modes match. */
2584 switch (TREE_CODE (arg0
))
2587 case FIX_TRUNC_EXPR
:
2588 if (TREE_TYPE (arg0
)
2589 && (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2590 != TYPE_UNSIGNED (TREE_TYPE (arg1
))))
2600 case tcc_comparison
:
2602 if (OP_SAME (0) && OP_SAME (1))
2605 /* For commutative ops, allow the other order. */
2606 return (commutative_tree_code (TREE_CODE (arg0
))
2607 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2608 TREE_OPERAND (arg1
, 1), flags
)
2609 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2610 TREE_OPERAND (arg1
, 0), flags
));
2613 /* If either of the pointer (or reference) expressions we are
2614 dereferencing contain a side effect, these cannot be equal. */
2615 if (TREE_SIDE_EFFECTS (arg0
)
2616 || TREE_SIDE_EFFECTS (arg1
))
2619 switch (TREE_CODE (arg0
))
2627 /* Require equal access sizes, and similar pointer types.
2628 We can have incomplete types for array references of
2629 variable-sized arrays from the Fortran frontent
2631 return (TREE_TYPE (arg0
)
2632 && (TYPE_SIZE (TREE_TYPE (arg0
))
2633 == TYPE_SIZE (TREE_TYPE (arg1
))
2634 || (TYPE_SIZE (TREE_TYPE (arg0
))
2635 && TYPE_SIZE (TREE_TYPE (arg1
))
2636 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2637 TYPE_SIZE (TREE_TYPE (arg1
)),
2639 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
2640 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
2641 && OP_SAME (0) && OP_SAME (1));
2644 case ARRAY_RANGE_REF
:
2645 /* Operands 2 and 3 may be null.
2646 Compare the array index by value if it is constant first as we
2647 may have different types but same value here. */
2649 && (tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2650 TREE_OPERAND (arg1
, 1))
2652 && OP_SAME_WITH_NULL (2)
2653 && OP_SAME_WITH_NULL (3));
2656 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2657 may be NULL when we're called to compare MEM_EXPRs. */
2658 return OP_SAME_WITH_NULL (0)
2660 && OP_SAME_WITH_NULL (2);
2663 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2669 case tcc_expression
:
2670 switch (TREE_CODE (arg0
))
2673 case TRUTH_NOT_EXPR
:
2676 case TRUTH_ANDIF_EXPR
:
2677 case TRUTH_ORIF_EXPR
:
2678 return OP_SAME (0) && OP_SAME (1);
2681 case WIDEN_MULT_PLUS_EXPR
:
2682 case WIDEN_MULT_MINUS_EXPR
:
2685 /* The multiplcation operands are commutative. */
2688 case TRUTH_AND_EXPR
:
2690 case TRUTH_XOR_EXPR
:
2691 if (OP_SAME (0) && OP_SAME (1))
2694 /* Otherwise take into account this is a commutative operation. */
2695 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2696 TREE_OPERAND (arg1
, 1), flags
)
2697 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2698 TREE_OPERAND (arg1
, 0), flags
));
2703 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2710 switch (TREE_CODE (arg0
))
2713 /* If the CALL_EXPRs call different functions, then they
2714 clearly can not be equal. */
2715 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2720 unsigned int cef
= call_expr_flags (arg0
);
2721 if (flags
& OEP_PURE_SAME
)
2722 cef
&= ECF_CONST
| ECF_PURE
;
2729 /* Now see if all the arguments are the same. */
2731 const_call_expr_arg_iterator iter0
, iter1
;
2733 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2734 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2736 a0
= next_const_call_expr_arg (&iter0
),
2737 a1
= next_const_call_expr_arg (&iter1
))
2738 if (! operand_equal_p (a0
, a1
, flags
))
2741 /* If we get here and both argument lists are exhausted
2742 then the CALL_EXPRs are equal. */
2743 return ! (a0
|| a1
);
2749 case tcc_declaration
:
2750 /* Consider __builtin_sqrt equal to sqrt. */
2751 return (TREE_CODE (arg0
) == FUNCTION_DECL
2752 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2753 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2754 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2761 #undef OP_SAME_WITH_NULL
2764 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2765 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2767 When in doubt, return 0. */
2770 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2772 int unsignedp1
, unsignedpo
;
2773 tree primarg0
, primarg1
, primother
;
2774 unsigned int correct_width
;
2776 if (operand_equal_p (arg0
, arg1
, 0))
2779 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2780 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2783 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2784 and see if the inner values are the same. This removes any
2785 signedness comparison, which doesn't matter here. */
2786 primarg0
= arg0
, primarg1
= arg1
;
2787 STRIP_NOPS (primarg0
);
2788 STRIP_NOPS (primarg1
);
2789 if (operand_equal_p (primarg0
, primarg1
, 0))
2792 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2793 actual comparison operand, ARG0.
2795 First throw away any conversions to wider types
2796 already present in the operands. */
2798 primarg1
= get_narrower (arg1
, &unsignedp1
);
2799 primother
= get_narrower (other
, &unsignedpo
);
2801 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2802 if (unsignedp1
== unsignedpo
2803 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2804 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2806 tree type
= TREE_TYPE (arg0
);
2808 /* Make sure shorter operand is extended the right way
2809 to match the longer operand. */
2810 primarg1
= fold_convert (signed_or_unsigned_type_for
2811 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2813 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2820 /* See if ARG is an expression that is either a comparison or is performing
2821 arithmetic on comparisons. The comparisons must only be comparing
2822 two different values, which will be stored in *CVAL1 and *CVAL2; if
2823 they are nonzero it means that some operands have already been found.
2824 No variables may be used anywhere else in the expression except in the
2825 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2826 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2828 If this is true, return 1. Otherwise, return zero. */
2831 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2833 enum tree_code code
= TREE_CODE (arg
);
2834 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2836 /* We can handle some of the tcc_expression cases here. */
2837 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2839 else if (tclass
== tcc_expression
2840 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2841 || code
== COMPOUND_EXPR
))
2842 tclass
= tcc_binary
;
2844 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2845 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2847 /* If we've already found a CVAL1 or CVAL2, this expression is
2848 two complex to handle. */
2849 if (*cval1
|| *cval2
)
2859 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2862 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2863 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2864 cval1
, cval2
, save_p
));
2869 case tcc_expression
:
2870 if (code
== COND_EXPR
)
2871 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2872 cval1
, cval2
, save_p
)
2873 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2874 cval1
, cval2
, save_p
)
2875 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2876 cval1
, cval2
, save_p
));
2879 case tcc_comparison
:
2880 /* First see if we can handle the first operand, then the second. For
2881 the second operand, we know *CVAL1 can't be zero. It must be that
2882 one side of the comparison is each of the values; test for the
2883 case where this isn't true by failing if the two operands
2886 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2887 TREE_OPERAND (arg
, 1), 0))
2891 *cval1
= TREE_OPERAND (arg
, 0);
2892 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2894 else if (*cval2
== 0)
2895 *cval2
= TREE_OPERAND (arg
, 0);
2896 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2901 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2903 else if (*cval2
== 0)
2904 *cval2
= TREE_OPERAND (arg
, 1);
2905 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2917 /* ARG is a tree that is known to contain just arithmetic operations and
2918 comparisons. Evaluate the operations in the tree substituting NEW0 for
2919 any occurrence of OLD0 as an operand of a comparison and likewise for
2923 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2924 tree old1
, tree new1
)
2926 tree type
= TREE_TYPE (arg
);
2927 enum tree_code code
= TREE_CODE (arg
);
2928 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2930 /* We can handle some of the tcc_expression cases here. */
2931 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2933 else if (tclass
== tcc_expression
2934 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2935 tclass
= tcc_binary
;
2940 return fold_build1_loc (loc
, code
, type
,
2941 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2942 old0
, new0
, old1
, new1
));
2945 return fold_build2_loc (loc
, code
, type
,
2946 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2947 old0
, new0
, old1
, new1
),
2948 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2949 old0
, new0
, old1
, new1
));
2951 case tcc_expression
:
2955 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
2959 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
2963 return fold_build3_loc (loc
, code
, type
,
2964 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2965 old0
, new0
, old1
, new1
),
2966 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2967 old0
, new0
, old1
, new1
),
2968 eval_subst (loc
, TREE_OPERAND (arg
, 2),
2969 old0
, new0
, old1
, new1
));
2973 /* Fall through - ??? */
2975 case tcc_comparison
:
2977 tree arg0
= TREE_OPERAND (arg
, 0);
2978 tree arg1
= TREE_OPERAND (arg
, 1);
2980 /* We need to check both for exact equality and tree equality. The
2981 former will be true if the operand has a side-effect. In that
2982 case, we know the operand occurred exactly once. */
2984 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2986 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2989 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2991 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2994 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
3002 /* Return a tree for the case when the result of an expression is RESULT
3003 converted to TYPE and OMITTED was previously an operand of the expression
3004 but is now not needed (e.g., we folded OMITTED * 0).
3006 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3007 the conversion of RESULT to TYPE. */
3010 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
3012 tree t
= fold_convert_loc (loc
, type
, result
);
3014 /* If the resulting operand is an empty statement, just return the omitted
3015 statement casted to void. */
3016 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3017 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3018 fold_ignored_result (omitted
));
3020 if (TREE_SIDE_EFFECTS (omitted
))
3021 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3022 fold_ignored_result (omitted
), t
);
3024 return non_lvalue_loc (loc
, t
);
3027 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3030 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
3033 tree t
= fold_convert_loc (loc
, type
, result
);
3035 /* If the resulting operand is an empty statement, just return the omitted
3036 statement casted to void. */
3037 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
3038 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3039 fold_ignored_result (omitted
));
3041 if (TREE_SIDE_EFFECTS (omitted
))
3042 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3043 fold_ignored_result (omitted
), t
);
3045 return pedantic_non_lvalue_loc (loc
, t
);
3048 /* Return a tree for the case when the result of an expression is RESULT
3049 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3050 of the expression but are now not needed.
3052 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3053 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3054 evaluated before OMITTED2. Otherwise, if neither has side effects,
3055 just do the conversion of RESULT to TYPE. */
3058 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3059 tree omitted1
, tree omitted2
)
3061 tree t
= fold_convert_loc (loc
, type
, result
);
3063 if (TREE_SIDE_EFFECTS (omitted2
))
3064 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3065 if (TREE_SIDE_EFFECTS (omitted1
))
3066 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3068 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3072 /* Return a simplified tree node for the truth-negation of ARG. This
3073 never alters ARG itself. We assume that ARG is an operation that
3074 returns a truth value (0 or 1).
3076 FIXME: one would think we would fold the result, but it causes
3077 problems with the dominator optimizer. */
3080 fold_truth_not_expr (location_t loc
, tree arg
)
3082 tree type
= TREE_TYPE (arg
);
3083 enum tree_code code
= TREE_CODE (arg
);
3084 location_t loc1
, loc2
;
3086 /* If this is a comparison, we can simply invert it, except for
3087 floating-point non-equality comparisons, in which case we just
3088 enclose a TRUTH_NOT_EXPR around what we have. */
3090 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3092 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3093 if (FLOAT_TYPE_P (op_type
)
3094 && flag_trapping_math
3095 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3096 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3099 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3100 if (code
== ERROR_MARK
)
3103 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3104 TREE_OPERAND (arg
, 1));
3110 return constant_boolean_node (integer_zerop (arg
), type
);
3112 case TRUTH_AND_EXPR
:
3113 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3114 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3115 if (loc1
== UNKNOWN_LOCATION
)
3117 if (loc2
== UNKNOWN_LOCATION
)
3119 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3120 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3121 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3124 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3125 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3126 if (loc1
== UNKNOWN_LOCATION
)
3128 if (loc2
== UNKNOWN_LOCATION
)
3130 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3131 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3132 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3134 case TRUTH_XOR_EXPR
:
3135 /* Here we can invert either operand. We invert the first operand
3136 unless the second operand is a TRUTH_NOT_EXPR in which case our
3137 result is the XOR of the first operand with the inside of the
3138 negation of the second operand. */
3140 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3141 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3142 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3144 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3145 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3146 TREE_OPERAND (arg
, 1));
3148 case TRUTH_ANDIF_EXPR
:
3149 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3150 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3151 if (loc1
== UNKNOWN_LOCATION
)
3153 if (loc2
== UNKNOWN_LOCATION
)
3155 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3156 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3157 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3159 case TRUTH_ORIF_EXPR
:
3160 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3161 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3162 if (loc1
== UNKNOWN_LOCATION
)
3164 if (loc2
== UNKNOWN_LOCATION
)
3166 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3167 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3168 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3170 case TRUTH_NOT_EXPR
:
3171 return TREE_OPERAND (arg
, 0);
3175 tree arg1
= TREE_OPERAND (arg
, 1);
3176 tree arg2
= TREE_OPERAND (arg
, 2);
3178 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3179 loc2
= EXPR_LOCATION (TREE_OPERAND (arg
, 2));
3180 if (loc1
== UNKNOWN_LOCATION
)
3182 if (loc2
== UNKNOWN_LOCATION
)
3185 /* A COND_EXPR may have a throw as one operand, which
3186 then has void type. Just leave void operands
3188 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3189 VOID_TYPE_P (TREE_TYPE (arg1
))
3190 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3191 VOID_TYPE_P (TREE_TYPE (arg2
))
3192 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3196 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 1));
3197 if (loc1
== UNKNOWN_LOCATION
)
3199 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3200 TREE_OPERAND (arg
, 0),
3201 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3203 case NON_LVALUE_EXPR
:
3204 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3205 if (loc1
== UNKNOWN_LOCATION
)
3207 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3210 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3211 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3213 /* ... fall through ... */
3216 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3217 if (loc1
== UNKNOWN_LOCATION
)
3219 return build1_loc (loc
, TREE_CODE (arg
), type
,
3220 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3223 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3225 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3228 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3230 case CLEANUP_POINT_EXPR
:
3231 loc1
= EXPR_LOCATION (TREE_OPERAND (arg
, 0));
3232 if (loc1
== UNKNOWN_LOCATION
)
3234 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3235 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3242 /* Return a simplified tree node for the truth-negation of ARG. This
3243 never alters ARG itself. We assume that ARG is an operation that
3244 returns a truth value (0 or 1).
3246 FIXME: one would think we would fold the result, but it causes
3247 problems with the dominator optimizer. */
3250 invert_truthvalue_loc (location_t loc
, tree arg
)
3254 if (TREE_CODE (arg
) == ERROR_MARK
)
3257 tem
= fold_truth_not_expr (loc
, arg
);
3259 tem
= build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3264 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3265 operands are another bit-wise operation with a common input. If so,
3266 distribute the bit operations to save an operation and possibly two if
3267 constants are involved. For example, convert
3268 (A | B) & (A | C) into A | (B & C)
3269 Further simplification will occur if B and C are constants.
3271 If this optimization cannot be done, 0 will be returned. */
3274 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3275 tree arg0
, tree arg1
)
3280 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3281 || TREE_CODE (arg0
) == code
3282 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3283 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3286 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3288 common
= TREE_OPERAND (arg0
, 0);
3289 left
= TREE_OPERAND (arg0
, 1);
3290 right
= TREE_OPERAND (arg1
, 1);
3292 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3294 common
= TREE_OPERAND (arg0
, 0);
3295 left
= TREE_OPERAND (arg0
, 1);
3296 right
= TREE_OPERAND (arg1
, 0);
3298 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3300 common
= TREE_OPERAND (arg0
, 1);
3301 left
= TREE_OPERAND (arg0
, 0);
3302 right
= TREE_OPERAND (arg1
, 1);
3304 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3306 common
= TREE_OPERAND (arg0
, 1);
3307 left
= TREE_OPERAND (arg0
, 0);
3308 right
= TREE_OPERAND (arg1
, 0);
3313 common
= fold_convert_loc (loc
, type
, common
);
3314 left
= fold_convert_loc (loc
, type
, left
);
3315 right
= fold_convert_loc (loc
, type
, right
);
3316 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3317 fold_build2_loc (loc
, code
, type
, left
, right
));
3320 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3321 with code CODE. This optimization is unsafe. */
3323 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3324 tree arg0
, tree arg1
)
3326 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3327 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3329 /* (A / C) +- (B / C) -> (A +- B) / C. */
3331 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3332 TREE_OPERAND (arg1
, 1), 0))
3333 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3334 fold_build2_loc (loc
, code
, type
,
3335 TREE_OPERAND (arg0
, 0),
3336 TREE_OPERAND (arg1
, 0)),
3337 TREE_OPERAND (arg0
, 1));
3339 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3340 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3341 TREE_OPERAND (arg1
, 0), 0)
3342 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3343 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3345 REAL_VALUE_TYPE r0
, r1
;
3346 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3347 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3349 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3351 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3352 real_arithmetic (&r0
, code
, &r0
, &r1
);
3353 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3354 TREE_OPERAND (arg0
, 0),
3355 build_real (type
, r0
));
3361 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3362 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3365 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3366 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3368 tree result
, bftype
;
3372 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3373 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3374 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3375 && host_integerp (size
, 0)
3376 && tree_low_cst (size
, 0) == bitsize
)
3377 return fold_convert_loc (loc
, type
, inner
);
3381 if (TYPE_PRECISION (bftype
) != bitsize
3382 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3383 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3385 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3386 size_int (bitsize
), bitsize_int (bitpos
));
3389 result
= fold_convert_loc (loc
, type
, result
);
3394 /* Optimize a bit-field compare.
3396 There are two cases: First is a compare against a constant and the
3397 second is a comparison of two items where the fields are at the same
3398 bit position relative to the start of a chunk (byte, halfword, word)
3399 large enough to contain it. In these cases we can avoid the shift
3400 implicit in bitfield extractions.
3402 For constants, we emit a compare of the shifted constant with the
3403 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3404 compared. For two fields at the same position, we do the ANDs with the
3405 similar mask and compare the result of the ANDs.
3407 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3408 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3409 are the left and right operands of the comparison, respectively.
3411 If the optimization described above can be done, we return the resulting
3412 tree. Otherwise we return zero. */
3415 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3416 tree compare_type
, tree lhs
, tree rhs
)
3418 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3419 tree type
= TREE_TYPE (lhs
);
3420 tree signed_type
, unsigned_type
;
3421 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3422 enum machine_mode lmode
, rmode
, nmode
;
3423 int lunsignedp
, runsignedp
;
3424 int lvolatilep
= 0, rvolatilep
= 0;
3425 tree linner
, rinner
= NULL_TREE
;
3429 /* Get all the information about the extractions being done. If the bit size
3430 if the same as the size of the underlying object, we aren't doing an
3431 extraction at all and so can do nothing. We also don't want to
3432 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3433 then will no longer be able to replace it. */
3434 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3435 &lunsignedp
, &lvolatilep
, false);
3436 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3437 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3442 /* If this is not a constant, we can only do something if bit positions,
3443 sizes, and signedness are the same. */
3444 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3445 &runsignedp
, &rvolatilep
, false);
3447 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3448 || lunsignedp
!= runsignedp
|| offset
!= 0
3449 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3453 /* See if we can find a mode to refer to this field. We should be able to,
3454 but fail if we can't. */
3456 && GET_MODE_BITSIZE (lmode
) > 0
3457 && flag_strict_volatile_bitfields
> 0)
3460 nmode
= get_best_mode (lbitsize
, lbitpos
,
3461 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3462 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3463 TYPE_ALIGN (TREE_TYPE (rinner
))),
3464 word_mode
, lvolatilep
|| rvolatilep
);
3465 if (nmode
== VOIDmode
)
3468 /* Set signed and unsigned types of the precision of this mode for the
3470 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3471 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3473 /* Compute the bit position and size for the new reference and our offset
3474 within it. If the new reference is the same size as the original, we
3475 won't optimize anything, so return zero. */
3476 nbitsize
= GET_MODE_BITSIZE (nmode
);
3477 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3479 if (nbitsize
== lbitsize
)
3482 if (BYTES_BIG_ENDIAN
)
3483 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3485 /* Make the mask to be used against the extracted field. */
3486 mask
= build_int_cst_type (unsigned_type
, -1);
3487 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3488 mask
= const_binop (RSHIFT_EXPR
, mask
,
3489 size_int (nbitsize
- lbitsize
- lbitpos
));
3492 /* If not comparing with constant, just rework the comparison
3494 return fold_build2_loc (loc
, code
, compare_type
,
3495 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3496 make_bit_field_ref (loc
, linner
,
3501 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3502 make_bit_field_ref (loc
, rinner
,
3508 /* Otherwise, we are handling the constant case. See if the constant is too
3509 big for the field. Warn and return a tree of for 0 (false) if so. We do
3510 this not only for its own sake, but to avoid having to test for this
3511 error case below. If we didn't, we might generate wrong code.
3513 For unsigned fields, the constant shifted right by the field length should
3514 be all zero. For signed fields, the high-order bits should agree with
3519 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3520 fold_convert_loc (loc
,
3521 unsigned_type
, rhs
),
3522 size_int (lbitsize
))))
3524 warning (0, "comparison is always %d due to width of bit-field",
3526 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3531 tree tem
= const_binop (RSHIFT_EXPR
,
3532 fold_convert_loc (loc
, signed_type
, rhs
),
3533 size_int (lbitsize
- 1));
3534 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3536 warning (0, "comparison is always %d due to width of bit-field",
3538 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3542 /* Single-bit compares should always be against zero. */
3543 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3545 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3546 rhs
= build_int_cst (type
, 0);
3549 /* Make a new bitfield reference, shift the constant over the
3550 appropriate number of bits and mask it with the computed mask
3551 (in case this was a signed field). If we changed it, make a new one. */
3552 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3555 TREE_SIDE_EFFECTS (lhs
) = 1;
3556 TREE_THIS_VOLATILE (lhs
) = 1;
3559 rhs
= const_binop (BIT_AND_EXPR
,
3560 const_binop (LSHIFT_EXPR
,
3561 fold_convert_loc (loc
, unsigned_type
, rhs
),
3562 size_int (lbitpos
)),
3565 lhs
= build2_loc (loc
, code
, compare_type
,
3566 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3570 /* Subroutine for fold_truthop: decode a field reference.
3572 If EXP is a comparison reference, we return the innermost reference.
3574 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3575 set to the starting bit number.
3577 If the innermost field can be completely contained in a mode-sized
3578 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3580 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3581 otherwise it is not changed.
3583 *PUNSIGNEDP is set to the signedness of the field.
3585 *PMASK is set to the mask used. This is either contained in a
3586 BIT_AND_EXPR or derived from the width of the field.
3588 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3590 Return 0 if this is not a component reference or is one that we can't
3591 do anything with. */
3594 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3595 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3596 int *punsignedp
, int *pvolatilep
,
3597 tree
*pmask
, tree
*pand_mask
)
3599 tree outer_type
= 0;
3601 tree mask
, inner
, offset
;
3603 unsigned int precision
;
3605 /* All the optimizations using this function assume integer fields.
3606 There are problems with FP fields since the type_for_size call
3607 below can fail for, e.g., XFmode. */
3608 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3611 /* We are interested in the bare arrangement of bits, so strip everything
3612 that doesn't affect the machine mode. However, record the type of the
3613 outermost expression if it may matter below. */
3614 if (CONVERT_EXPR_P (exp
)
3615 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3616 outer_type
= TREE_TYPE (exp
);
3619 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3621 and_mask
= TREE_OPERAND (exp
, 1);
3622 exp
= TREE_OPERAND (exp
, 0);
3623 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3624 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3628 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3629 punsignedp
, pvolatilep
, false);
3630 if ((inner
== exp
&& and_mask
== 0)
3631 || *pbitsize
< 0 || offset
!= 0
3632 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3635 /* If the number of bits in the reference is the same as the bitsize of
3636 the outer type, then the outer type gives the signedness. Otherwise
3637 (in case of a small bitfield) the signedness is unchanged. */
3638 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3639 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3641 /* Compute the mask to access the bitfield. */
3642 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3643 precision
= TYPE_PRECISION (unsigned_type
);
3645 mask
= build_int_cst_type (unsigned_type
, -1);
3647 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3648 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3650 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3652 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3653 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3656 *pand_mask
= and_mask
;
3660 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3664 all_ones_mask_p (const_tree mask
, int size
)
3666 tree type
= TREE_TYPE (mask
);
3667 unsigned int precision
= TYPE_PRECISION (type
);
3670 tmask
= build_int_cst_type (signed_type_for (type
), -1);
3673 tree_int_cst_equal (mask
,
3674 const_binop (RSHIFT_EXPR
,
3675 const_binop (LSHIFT_EXPR
, tmask
,
3676 size_int (precision
- size
)),
3677 size_int (precision
- size
)));
3680 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3681 represents the sign bit of EXP's type. If EXP represents a sign
3682 or zero extension, also test VAL against the unextended type.
3683 The return value is the (sub)expression whose sign bit is VAL,
3684 or NULL_TREE otherwise. */
3687 sign_bit_p (tree exp
, const_tree val
)
3689 unsigned HOST_WIDE_INT mask_lo
, lo
;
3690 HOST_WIDE_INT mask_hi
, hi
;
3694 /* Tree EXP must have an integral type. */
3695 t
= TREE_TYPE (exp
);
3696 if (! INTEGRAL_TYPE_P (t
))
3699 /* Tree VAL must be an integer constant. */
3700 if (TREE_CODE (val
) != INTEGER_CST
3701 || TREE_OVERFLOW (val
))
3704 width
= TYPE_PRECISION (t
);
3705 if (width
> HOST_BITS_PER_WIDE_INT
)
3707 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3710 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3711 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3717 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3720 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3721 >> (HOST_BITS_PER_WIDE_INT
- width
));
3724 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3725 treat VAL as if it were unsigned. */
3726 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3727 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3730 /* Handle extension from a narrower type. */
3731 if (TREE_CODE (exp
) == NOP_EXPR
3732 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3733 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3738 /* Subroutine for fold_truthop: determine if an operand is simple enough
3739 to be evaluated unconditionally. */
3742 simple_operand_p (const_tree exp
)
3744 /* Strip any conversions that don't change the machine mode. */
3747 return (CONSTANT_CLASS_P (exp
)
3748 || TREE_CODE (exp
) == SSA_NAME
3750 && ! TREE_ADDRESSABLE (exp
)
3751 && ! TREE_THIS_VOLATILE (exp
)
3752 && ! DECL_NONLOCAL (exp
)
3753 /* Don't regard global variables as simple. They may be
3754 allocated in ways unknown to the compiler (shared memory,
3755 #pragma weak, etc). */
3756 && ! TREE_PUBLIC (exp
)
3757 && ! DECL_EXTERNAL (exp
)
3758 /* Loading a static variable is unduly expensive, but global
3759 registers aren't expensive. */
3760 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3763 /* The following functions are subroutines to fold_range_test and allow it to
3764 try to change a logical combination of comparisons into a range test.
3767 X == 2 || X == 3 || X == 4 || X == 5
3771 (unsigned) (X - 2) <= 3
3773 We describe each set of comparisons as being either inside or outside
3774 a range, using a variable named like IN_P, and then describe the
3775 range with a lower and upper bound. If one of the bounds is omitted,
3776 it represents either the highest or lowest value of the type.
3778 In the comments below, we represent a range by two numbers in brackets
3779 preceded by a "+" to designate being inside that range, or a "-" to
3780 designate being outside that range, so the condition can be inverted by
3781 flipping the prefix. An omitted bound is represented by a "-". For
3782 example, "- [-, 10]" means being outside the range starting at the lowest
3783 possible value and ending at 10, in other words, being greater than 10.
3784 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3787 We set up things so that the missing bounds are handled in a consistent
3788 manner so neither a missing bound nor "true" and "false" need to be
3789 handled using a special case. */
3791 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3792 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3793 and UPPER1_P are nonzero if the respective argument is an upper bound
3794 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3795 must be specified for a comparison. ARG1 will be converted to ARG0's
3796 type if both are specified. */
3799 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3800 tree arg1
, int upper1_p
)
3806 /* If neither arg represents infinity, do the normal operation.
3807 Else, if not a comparison, return infinity. Else handle the special
3808 comparison rules. Note that most of the cases below won't occur, but
3809 are handled for consistency. */
3811 if (arg0
!= 0 && arg1
!= 0)
3813 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3814 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3816 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3819 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3822 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3823 for neither. In real maths, we cannot assume open ended ranges are
3824 the same. But, this is computer arithmetic, where numbers are finite.
3825 We can therefore make the transformation of any unbounded range with
3826 the value Z, Z being greater than any representable number. This permits
3827 us to treat unbounded ranges as equal. */
3828 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3829 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3833 result
= sgn0
== sgn1
;
3836 result
= sgn0
!= sgn1
;
3839 result
= sgn0
< sgn1
;
3842 result
= sgn0
<= sgn1
;
3845 result
= sgn0
> sgn1
;
3848 result
= sgn0
>= sgn1
;
3854 return constant_boolean_node (result
, type
);
3857 /* Given EXP, a logical expression, set the range it is testing into
3858 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3859 actually being tested. *PLOW and *PHIGH will be made of the same
3860 type as the returned expression. If EXP is not a comparison, we
3861 will most likely not be returning a useful value and range. Set
3862 *STRICT_OVERFLOW_P to true if the return value is only valid
3863 because signed overflow is undefined; otherwise, do not change
3864 *STRICT_OVERFLOW_P. */
3867 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
3868 bool *strict_overflow_p
)
3870 enum tree_code code
;
3871 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
;
3872 tree exp_type
= NULL_TREE
, arg0_type
= NULL_TREE
;
3874 tree low
, high
, n_low
, n_high
;
3875 location_t loc
= EXPR_LOCATION (exp
);
3877 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3878 and see if we can refine the range. Some of the cases below may not
3879 happen, but it doesn't seem worth worrying about this. We "continue"
3880 the outer loop when we've changed something; otherwise we "break"
3881 the switch, which will "break" the while. */
3884 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
3888 code
= TREE_CODE (exp
);
3889 exp_type
= TREE_TYPE (exp
);
3891 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
3893 if (TREE_OPERAND_LENGTH (exp
) > 0)
3894 arg0
= TREE_OPERAND (exp
, 0);
3895 if (TREE_CODE_CLASS (code
) == tcc_comparison
3896 || TREE_CODE_CLASS (code
) == tcc_unary
3897 || TREE_CODE_CLASS (code
) == tcc_binary
)
3898 arg0_type
= TREE_TYPE (arg0
);
3899 if (TREE_CODE_CLASS (code
) == tcc_binary
3900 || TREE_CODE_CLASS (code
) == tcc_comparison
3901 || (TREE_CODE_CLASS (code
) == tcc_expression
3902 && TREE_OPERAND_LENGTH (exp
) > 1))
3903 arg1
= TREE_OPERAND (exp
, 1);
3908 case TRUTH_NOT_EXPR
:
3909 in_p
= ! in_p
, exp
= arg0
;
3912 case EQ_EXPR
: case NE_EXPR
:
3913 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3914 /* We can only do something if the range is testing for zero
3915 and if the second operand is an integer constant. Note that
3916 saying something is "in" the range we make is done by
3917 complementing IN_P since it will set in the initial case of
3918 being not equal to zero; "out" is leaving it alone. */
3919 if (low
== 0 || high
== 0
3920 || ! integer_zerop (low
) || ! integer_zerop (high
)
3921 || TREE_CODE (arg1
) != INTEGER_CST
)
3926 case NE_EXPR
: /* - [c, c] */
3929 case EQ_EXPR
: /* + [c, c] */
3930 in_p
= ! in_p
, low
= high
= arg1
;
3932 case GT_EXPR
: /* - [-, c] */
3933 low
= 0, high
= arg1
;
3935 case GE_EXPR
: /* + [c, -] */
3936 in_p
= ! in_p
, low
= arg1
, high
= 0;
3938 case LT_EXPR
: /* - [c, -] */
3939 low
= arg1
, high
= 0;
3941 case LE_EXPR
: /* + [-, c] */
3942 in_p
= ! in_p
, low
= 0, high
= arg1
;
3948 /* If this is an unsigned comparison, we also know that EXP is
3949 greater than or equal to zero. We base the range tests we make
3950 on that fact, so we record it here so we can parse existing
3951 range tests. We test arg0_type since often the return type
3952 of, e.g. EQ_EXPR, is boolean. */
3953 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3955 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3957 build_int_cst (arg0_type
, 0),
3961 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3963 /* If the high bound is missing, but we have a nonzero low
3964 bound, reverse the range so it goes from zero to the low bound
3966 if (high
== 0 && low
&& ! integer_zerop (low
))
3969 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3970 integer_one_node
, 0);
3971 low
= build_int_cst (arg0_type
, 0);
3979 /* (-x) IN [a,b] -> x in [-b, -a] */
3980 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3981 build_int_cst (exp_type
, 0),
3983 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3984 build_int_cst (exp_type
, 0),
3986 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
3992 exp
= build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3993 build_int_cst (exp_type
, 1));
3996 case PLUS_EXPR
: case MINUS_EXPR
:
3997 if (TREE_CODE (arg1
) != INTEGER_CST
)
4000 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4001 move a constant to the other side. */
4002 if (!TYPE_UNSIGNED (arg0_type
)
4003 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4006 /* If EXP is signed, any overflow in the computation is undefined,
4007 so we don't worry about it so long as our computations on
4008 the bounds don't overflow. For unsigned, overflow is defined
4009 and this is exactly the right thing. */
4010 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4011 arg0_type
, low
, 0, arg1
, 0);
4012 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
4013 arg0_type
, high
, 1, arg1
, 0);
4014 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
4015 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
4018 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
4019 *strict_overflow_p
= true;
4022 /* Check for an unsigned range which has wrapped around the maximum
4023 value thus making n_high < n_low, and normalize it. */
4024 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
4026 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
4027 integer_one_node
, 0);
4028 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
4029 integer_one_node
, 0);
4031 /* If the range is of the form +/- [ x+1, x ], we won't
4032 be able to normalize it. But then, it represents the
4033 whole range or the empty set, so make it
4035 if (tree_int_cst_equal (n_low
, low
)
4036 && tree_int_cst_equal (n_high
, high
))
4042 low
= n_low
, high
= n_high
;
4047 CASE_CONVERT
: case NON_LVALUE_EXPR
:
4048 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
4051 if (! INTEGRAL_TYPE_P (arg0_type
)
4052 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
4053 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
4056 n_low
= low
, n_high
= high
;
4059 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
4062 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
4065 /* If we're converting arg0 from an unsigned type, to exp,
4066 a signed type, we will be doing the comparison as unsigned.
4067 The tests above have already verified that LOW and HIGH
4070 So we have to ensure that we will handle large unsigned
4071 values the same way that the current signed bounds treat
4074 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4078 /* For fixed-point modes, we need to pass the saturating flag
4079 as the 2nd parameter. */
4080 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4081 equiv_type
= lang_hooks
.types
.type_for_mode
4082 (TYPE_MODE (arg0_type
),
4083 TYPE_SATURATING (arg0_type
));
4085 equiv_type
= lang_hooks
.types
.type_for_mode
4086 (TYPE_MODE (arg0_type
), 1);
4088 /* A range without an upper bound is, naturally, unbounded.
4089 Since convert would have cropped a very large value, use
4090 the max value for the destination type. */
4092 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4093 : TYPE_MAX_VALUE (arg0_type
);
4095 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4096 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4097 fold_convert_loc (loc
, arg0_type
,
4099 build_int_cst (arg0_type
, 1));
4101 /* If the low bound is specified, "and" the range with the
4102 range for which the original unsigned value will be
4106 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4107 1, n_low
, n_high
, 1,
4108 fold_convert_loc (loc
, arg0_type
,
4113 in_p
= (n_in_p
== in_p
);
4117 /* Otherwise, "or" the range with the range of the input
4118 that will be interpreted as negative. */
4119 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
4120 0, n_low
, n_high
, 1,
4121 fold_convert_loc (loc
, arg0_type
,
4126 in_p
= (in_p
!= n_in_p
);
4131 low
= n_low
, high
= n_high
;
4141 /* If EXP is a constant, we can evaluate whether this is true or false. */
4142 if (TREE_CODE (exp
) == INTEGER_CST
)
4144 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4146 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4152 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4156 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4157 type, TYPE, return an expression to test if EXP is in (or out of, depending
4158 on IN_P) the range. Return 0 if the test couldn't be created. */
4161 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4162 tree low
, tree high
)
4164 tree etype
= TREE_TYPE (exp
), value
;
4166 #ifdef HAVE_canonicalize_funcptr_for_compare
4167 /* Disable this optimization for function pointer expressions
4168 on targets that require function pointer canonicalization. */
4169 if (HAVE_canonicalize_funcptr_for_compare
4170 && TREE_CODE (etype
) == POINTER_TYPE
4171 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4177 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4179 return invert_truthvalue_loc (loc
, value
);
4184 if (low
== 0 && high
== 0)
4185 return build_int_cst (type
, 1);
4188 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4189 fold_convert_loc (loc
, etype
, high
));
4192 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4193 fold_convert_loc (loc
, etype
, low
));
4195 if (operand_equal_p (low
, high
, 0))
4196 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4197 fold_convert_loc (loc
, etype
, low
));
4199 if (integer_zerop (low
))
4201 if (! TYPE_UNSIGNED (etype
))
4203 etype
= unsigned_type_for (etype
);
4204 high
= fold_convert_loc (loc
, etype
, high
);
4205 exp
= fold_convert_loc (loc
, etype
, exp
);
4207 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4210 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4211 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4213 unsigned HOST_WIDE_INT lo
;
4217 prec
= TYPE_PRECISION (etype
);
4218 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4221 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4225 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4226 lo
= (unsigned HOST_WIDE_INT
) -1;
4229 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4231 if (TYPE_UNSIGNED (etype
))
4233 tree signed_etype
= signed_type_for (etype
);
4234 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4236 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4238 etype
= signed_etype
;
4239 exp
= fold_convert_loc (loc
, etype
, exp
);
4241 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4242 build_int_cst (etype
, 0));
4246 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4247 This requires wrap-around arithmetics for the type of the expression.
4248 First make sure that arithmetics in this type is valid, then make sure
4249 that it wraps around. */
4250 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4251 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4252 TYPE_UNSIGNED (etype
));
4254 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4256 tree utype
, minv
, maxv
;
4258 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4259 for the type in question, as we rely on this here. */
4260 utype
= unsigned_type_for (etype
);
4261 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4262 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4263 integer_one_node
, 1);
4264 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4266 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4273 high
= fold_convert_loc (loc
, etype
, high
);
4274 low
= fold_convert_loc (loc
, etype
, low
);
4275 exp
= fold_convert_loc (loc
, etype
, exp
);
4277 value
= const_binop (MINUS_EXPR
, high
, low
);
4280 if (POINTER_TYPE_P (etype
))
4282 if (value
!= 0 && !TREE_OVERFLOW (value
))
4284 low
= fold_convert_loc (loc
, sizetype
, low
);
4285 low
= fold_build1_loc (loc
, NEGATE_EXPR
, sizetype
, low
);
4286 return build_range_check (loc
, type
,
4287 fold_build2_loc (loc
, POINTER_PLUS_EXPR
,
4289 1, build_int_cst (etype
, 0), value
);
4294 if (value
!= 0 && !TREE_OVERFLOW (value
))
4295 return build_range_check (loc
, type
,
4296 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4297 1, build_int_cst (etype
, 0), value
);
4302 /* Return the predecessor of VAL in its type, handling the infinite case. */
4305 range_predecessor (tree val
)
4307 tree type
= TREE_TYPE (val
);
4309 if (INTEGRAL_TYPE_P (type
)
4310 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4313 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4316 /* Return the successor of VAL in its type, handling the infinite case. */
4319 range_successor (tree val
)
4321 tree type
= TREE_TYPE (val
);
4323 if (INTEGRAL_TYPE_P (type
)
4324 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4327 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4330 /* Given two ranges, see if we can merge them into one. Return 1 if we
4331 can, 0 if we can't. Set the output range into the specified parameters. */
4334 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4335 tree high0
, int in1_p
, tree low1
, tree high1
)
4343 int lowequal
= ((low0
== 0 && low1
== 0)
4344 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4345 low0
, 0, low1
, 0)));
4346 int highequal
= ((high0
== 0 && high1
== 0)
4347 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4348 high0
, 1, high1
, 1)));
4350 /* Make range 0 be the range that starts first, or ends last if they
4351 start at the same value. Swap them if it isn't. */
4352 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4355 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4356 high1
, 1, high0
, 1))))
4358 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4359 tem
= low0
, low0
= low1
, low1
= tem
;
4360 tem
= high0
, high0
= high1
, high1
= tem
;
4363 /* Now flag two cases, whether the ranges are disjoint or whether the
4364 second range is totally subsumed in the first. Note that the tests
4365 below are simplified by the ones above. */
4366 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4367 high0
, 1, low1
, 0));
4368 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4369 high1
, 1, high0
, 1));
4371 /* We now have four cases, depending on whether we are including or
4372 excluding the two ranges. */
4375 /* If they don't overlap, the result is false. If the second range
4376 is a subset it is the result. Otherwise, the range is from the start
4377 of the second to the end of the first. */
4379 in_p
= 0, low
= high
= 0;
4381 in_p
= 1, low
= low1
, high
= high1
;
4383 in_p
= 1, low
= low1
, high
= high0
;
4386 else if (in0_p
&& ! in1_p
)
4388 /* If they don't overlap, the result is the first range. If they are
4389 equal, the result is false. If the second range is a subset of the
4390 first, and the ranges begin at the same place, we go from just after
4391 the end of the second range to the end of the first. If the second
4392 range is not a subset of the first, or if it is a subset and both
4393 ranges end at the same place, the range starts at the start of the
4394 first range and ends just before the second range.
4395 Otherwise, we can't describe this as a single range. */
4397 in_p
= 1, low
= low0
, high
= high0
;
4398 else if (lowequal
&& highequal
)
4399 in_p
= 0, low
= high
= 0;
4400 else if (subset
&& lowequal
)
4402 low
= range_successor (high1
);
4407 /* We are in the weird situation where high0 > high1 but
4408 high1 has no successor. Punt. */
4412 else if (! subset
|| highequal
)
4415 high
= range_predecessor (low1
);
4419 /* low0 < low1 but low1 has no predecessor. Punt. */
4427 else if (! in0_p
&& in1_p
)
4429 /* If they don't overlap, the result is the second range. If the second
4430 is a subset of the first, the result is false. Otherwise,
4431 the range starts just after the first range and ends at the
4432 end of the second. */
4434 in_p
= 1, low
= low1
, high
= high1
;
4435 else if (subset
|| highequal
)
4436 in_p
= 0, low
= high
= 0;
4439 low
= range_successor (high0
);
4444 /* high1 > high0 but high0 has no successor. Punt. */
4452 /* The case where we are excluding both ranges. Here the complex case
4453 is if they don't overlap. In that case, the only time we have a
4454 range is if they are adjacent. If the second is a subset of the
4455 first, the result is the first. Otherwise, the range to exclude
4456 starts at the beginning of the first range and ends at the end of the
4460 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4461 range_successor (high0
),
4463 in_p
= 0, low
= low0
, high
= high1
;
4466 /* Canonicalize - [min, x] into - [-, x]. */
4467 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4468 switch (TREE_CODE (TREE_TYPE (low0
)))
4471 if (TYPE_PRECISION (TREE_TYPE (low0
))
4472 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4476 if (tree_int_cst_equal (low0
,
4477 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4481 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4482 && integer_zerop (low0
))
4489 /* Canonicalize - [x, max] into - [x, -]. */
4490 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4491 switch (TREE_CODE (TREE_TYPE (high1
)))
4494 if (TYPE_PRECISION (TREE_TYPE (high1
))
4495 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4499 if (tree_int_cst_equal (high1
,
4500 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4504 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4505 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4507 integer_one_node
, 1)))
4514 /* The ranges might be also adjacent between the maximum and
4515 minimum values of the given type. For
4516 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4517 return + [x + 1, y - 1]. */
4518 if (low0
== 0 && high1
== 0)
4520 low
= range_successor (high0
);
4521 high
= range_predecessor (low1
);
4522 if (low
== 0 || high
== 0)
4532 in_p
= 0, low
= low0
, high
= high0
;
4534 in_p
= 0, low
= low0
, high
= high1
;
4537 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4542 /* Subroutine of fold, looking inside expressions of the form
4543 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4544 of the COND_EXPR. This function is being used also to optimize
4545 A op B ? C : A, by reversing the comparison first.
4547 Return a folded expression whose code is not a COND_EXPR
4548 anymore, or NULL_TREE if no folding opportunity is found. */
4551 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4552 tree arg0
, tree arg1
, tree arg2
)
4554 enum tree_code comp_code
= TREE_CODE (arg0
);
4555 tree arg00
= TREE_OPERAND (arg0
, 0);
4556 tree arg01
= TREE_OPERAND (arg0
, 1);
4557 tree arg1_type
= TREE_TYPE (arg1
);
4563 /* If we have A op 0 ? A : -A, consider applying the following
4566 A == 0? A : -A same as -A
4567 A != 0? A : -A same as A
4568 A >= 0? A : -A same as abs (A)
4569 A > 0? A : -A same as abs (A)
4570 A <= 0? A : -A same as -abs (A)
4571 A < 0? A : -A same as -abs (A)
4573 None of these transformations work for modes with signed
4574 zeros. If A is +/-0, the first two transformations will
4575 change the sign of the result (from +0 to -0, or vice
4576 versa). The last four will fix the sign of the result,
4577 even though the original expressions could be positive or
4578 negative, depending on the sign of A.
4580 Note that all these transformations are correct if A is
4581 NaN, since the two alternatives (A and -A) are also NaNs. */
4582 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4583 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4584 ? real_zerop (arg01
)
4585 : integer_zerop (arg01
))
4586 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4587 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4588 /* In the case that A is of the form X-Y, '-A' (arg2) may
4589 have already been folded to Y-X, check for that. */
4590 || (TREE_CODE (arg1
) == MINUS_EXPR
4591 && TREE_CODE (arg2
) == MINUS_EXPR
4592 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4593 TREE_OPERAND (arg2
, 1), 0)
4594 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4595 TREE_OPERAND (arg2
, 0), 0))))
4600 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4601 return pedantic_non_lvalue_loc (loc
,
4602 fold_convert_loc (loc
, type
,
4603 negate_expr (tem
)));
4606 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4609 if (flag_trapping_math
)
4614 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4615 arg1
= fold_convert_loc (loc
, signed_type_for
4616 (TREE_TYPE (arg1
)), arg1
);
4617 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4618 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4621 if (flag_trapping_math
)
4625 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4626 arg1
= fold_convert_loc (loc
, signed_type_for
4627 (TREE_TYPE (arg1
)), arg1
);
4628 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4629 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4631 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4635 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4636 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4637 both transformations are correct when A is NaN: A != 0
4638 is then true, and A == 0 is false. */
4640 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4641 && integer_zerop (arg01
) && integer_zerop (arg2
))
4643 if (comp_code
== NE_EXPR
)
4644 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4645 else if (comp_code
== EQ_EXPR
)
4646 return build_int_cst (type
, 0);
4649 /* Try some transformations of A op B ? A : B.
4651 A == B? A : B same as B
4652 A != B? A : B same as A
4653 A >= B? A : B same as max (A, B)
4654 A > B? A : B same as max (B, A)
4655 A <= B? A : B same as min (A, B)
4656 A < B? A : B same as min (B, A)
4658 As above, these transformations don't work in the presence
4659 of signed zeros. For example, if A and B are zeros of
4660 opposite sign, the first two transformations will change
4661 the sign of the result. In the last four, the original
4662 expressions give different results for (A=+0, B=-0) and
4663 (A=-0, B=+0), but the transformed expressions do not.
4665 The first two transformations are correct if either A or B
4666 is a NaN. In the first transformation, the condition will
4667 be false, and B will indeed be chosen. In the case of the
4668 second transformation, the condition A != B will be true,
4669 and A will be chosen.
4671 The conversions to max() and min() are not correct if B is
4672 a number and A is not. The conditions in the original
4673 expressions will be false, so all four give B. The min()
4674 and max() versions would give a NaN instead. */
4675 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4676 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4677 /* Avoid these transformations if the COND_EXPR may be used
4678 as an lvalue in the C++ front-end. PR c++/19199. */
4680 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4681 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4682 || ! maybe_lvalue_p (arg1
)
4683 || ! maybe_lvalue_p (arg2
)))
4685 tree comp_op0
= arg00
;
4686 tree comp_op1
= arg01
;
4687 tree comp_type
= TREE_TYPE (comp_op0
);
4689 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4690 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4700 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4702 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4707 /* In C++ a ?: expression can be an lvalue, so put the
4708 operand which will be used if they are equal first
4709 so that we can convert this back to the
4710 corresponding COND_EXPR. */
4711 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4713 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4714 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4715 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4716 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4717 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4718 comp_op1
, comp_op0
);
4719 return pedantic_non_lvalue_loc (loc
,
4720 fold_convert_loc (loc
, type
, tem
));
4727 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4729 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4730 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4731 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4732 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4733 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4734 comp_op1
, comp_op0
);
4735 return pedantic_non_lvalue_loc (loc
,
4736 fold_convert_loc (loc
, type
, tem
));
4740 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4741 return pedantic_non_lvalue_loc (loc
,
4742 fold_convert_loc (loc
, type
, arg2
));
4745 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4746 return pedantic_non_lvalue_loc (loc
,
4747 fold_convert_loc (loc
, type
, arg1
));
4750 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4755 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4756 we might still be able to simplify this. For example,
4757 if C1 is one less or one more than C2, this might have started
4758 out as a MIN or MAX and been transformed by this function.
4759 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4761 if (INTEGRAL_TYPE_P (type
)
4762 && TREE_CODE (arg01
) == INTEGER_CST
4763 && TREE_CODE (arg2
) == INTEGER_CST
)
4767 if (TREE_CODE (arg1
) == INTEGER_CST
)
4769 /* We can replace A with C1 in this case. */
4770 arg1
= fold_convert_loc (loc
, type
, arg01
);
4771 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4774 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4775 MIN_EXPR, to preserve the signedness of the comparison. */
4776 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4778 && operand_equal_p (arg01
,
4779 const_binop (PLUS_EXPR
, arg2
,
4780 build_int_cst (type
, 1)),
4783 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4784 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4786 return pedantic_non_lvalue_loc (loc
,
4787 fold_convert_loc (loc
, type
, tem
));
4792 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4794 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4796 && operand_equal_p (arg01
,
4797 const_binop (MINUS_EXPR
, arg2
,
4798 build_int_cst (type
, 1)),
4801 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4802 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4804 return pedantic_non_lvalue_loc (loc
,
4805 fold_convert_loc (loc
, type
, tem
));
4810 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4811 MAX_EXPR, to preserve the signedness of the comparison. */
4812 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4814 && operand_equal_p (arg01
,
4815 const_binop (MINUS_EXPR
, arg2
,
4816 build_int_cst (type
, 1)),
4819 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4820 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4822 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4827 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4828 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4830 && operand_equal_p (arg01
,
4831 const_binop (PLUS_EXPR
, arg2
,
4832 build_int_cst (type
, 1)),
4835 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4836 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4838 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4852 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4853 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4854 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4858 /* EXP is some logical combination of boolean tests. See if we can
4859 merge it into some range test. Return the new tree if so. */
4862 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4865 int or_op
= (code
== TRUTH_ORIF_EXPR
4866 || code
== TRUTH_OR_EXPR
);
4867 int in0_p
, in1_p
, in_p
;
4868 tree low0
, low1
, low
, high0
, high1
, high
;
4869 bool strict_overflow_p
= false;
4870 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4871 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4873 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4874 "when simplifying range test");
4876 /* If this is an OR operation, invert both sides; we will invert
4877 again at the end. */
4879 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4881 /* If both expressions are the same, if we can merge the ranges, and we
4882 can build the range test, return it or it inverted. If one of the
4883 ranges is always true or always false, consider it to be the same
4884 expression as the other. */
4885 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4886 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4888 && 0 != (tem
= (build_range_check (UNKNOWN_LOCATION
, type
,
4890 : rhs
!= 0 ? rhs
: integer_zero_node
,
4893 if (strict_overflow_p
)
4894 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4895 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4898 /* On machines where the branch cost is expensive, if this is a
4899 short-circuited branch and the underlying object on both sides
4900 is the same, make a non-short-circuit operation. */
4901 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4902 && lhs
!= 0 && rhs
!= 0
4903 && (code
== TRUTH_ANDIF_EXPR
4904 || code
== TRUTH_ORIF_EXPR
)
4905 && operand_equal_p (lhs
, rhs
, 0))
4907 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4908 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4909 which cases we can't do this. */
4910 if (simple_operand_p (lhs
))
4911 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4912 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4915 else if (lang_hooks
.decls
.global_bindings_p () == 0
4916 && ! CONTAINS_PLACEHOLDER_P (lhs
))
4918 tree common
= save_expr (lhs
);
4920 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4921 or_op
? ! in0_p
: in0_p
,
4923 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4924 or_op
? ! in1_p
: in1_p
,
4927 if (strict_overflow_p
)
4928 fold_overflow_warning (warnmsg
,
4929 WARN_STRICT_OVERFLOW_COMPARISON
);
4930 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4931 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4940 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4941 bit value. Arrange things so the extra bits will be set to zero if and
4942 only if C is signed-extended to its full width. If MASK is nonzero,
4943 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4946 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4948 tree type
= TREE_TYPE (c
);
4949 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4952 if (p
== modesize
|| unsignedp
)
4955 /* We work by getting just the sign bit into the low-order bit, then
4956 into the high-order bit, then sign-extend. We then XOR that value
4958 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1));
4959 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1));
4961 /* We must use a signed type in order to get an arithmetic right shift.
4962 However, we must also avoid introducing accidental overflows, so that
4963 a subsequent call to integer_zerop will work. Hence we must
4964 do the type conversion here. At this point, the constant is either
4965 zero or one, and the conversion to a signed type can never overflow.
4966 We could get an overflow if this conversion is done anywhere else. */
4967 if (TYPE_UNSIGNED (type
))
4968 temp
= fold_convert (signed_type_for (type
), temp
);
4970 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
4971 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
4973 temp
= const_binop (BIT_AND_EXPR
, temp
,
4974 fold_convert (TREE_TYPE (c
), mask
));
4975 /* If necessary, convert the type back to match the type of C. */
4976 if (TYPE_UNSIGNED (type
))
4977 temp
= fold_convert (type
, temp
);
4979 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
4982 /* For an expression that has the form
4986 we can drop one of the inner expressions and simplify to
4990 LOC is the location of the resulting expression. OP is the inner
4991 logical operation; the left-hand side in the examples above, while CMPOP
4992 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4993 removing a condition that guards another, as in
4994 (A != NULL && A->...) || A == NULL
4995 which we must not transform. If RHS_ONLY is true, only eliminate the
4996 right-most operand of the inner logical operation. */
4999 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
5002 tree type
= TREE_TYPE (cmpop
);
5003 enum tree_code code
= TREE_CODE (cmpop
);
5004 enum tree_code truthop_code
= TREE_CODE (op
);
5005 tree lhs
= TREE_OPERAND (op
, 0);
5006 tree rhs
= TREE_OPERAND (op
, 1);
5007 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5008 enum tree_code rhs_code
= TREE_CODE (rhs
);
5009 enum tree_code lhs_code
= TREE_CODE (lhs
);
5010 enum tree_code inv_code
;
5012 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
5015 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
5018 if (rhs_code
== truthop_code
)
5020 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5021 if (newrhs
!= NULL_TREE
)
5024 rhs_code
= TREE_CODE (rhs
);
5027 if (lhs_code
== truthop_code
&& !rhs_only
)
5029 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5030 if (newlhs
!= NULL_TREE
)
5033 lhs_code
= TREE_CODE (lhs
);
5037 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5038 if (inv_code
== rhs_code
5039 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5040 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5042 if (!rhs_only
&& inv_code
== lhs_code
5043 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5044 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5046 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5047 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5052 /* Find ways of folding logical expressions of LHS and RHS:
5053 Try to merge two comparisons to the same innermost item.
5054 Look for range tests like "ch >= '0' && ch <= '9'".
5055 Look for combinations of simple terms on machines with expensive branches
5056 and evaluate the RHS unconditionally.
5058 For example, if we have p->a == 2 && p->b == 4 and we can make an
5059 object large enough to span both A and B, we can do this with a comparison
5060 against the object ANDed with the a mask.
5062 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5063 operations to do this with one comparison.
5065 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5066 function and the one above.
5068 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5069 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5071 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5074 We return the simplified tree or 0 if no optimization is possible. */
5077 fold_truthop (location_t loc
, enum tree_code code
, tree truth_type
,
5080 /* If this is the "or" of two comparisons, we can do something if
5081 the comparisons are NE_EXPR. If this is the "and", we can do something
5082 if the comparisons are EQ_EXPR. I.e.,
5083 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5085 WANTED_CODE is this operation code. For single bit fields, we can
5086 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5087 comparison for one-bit fields. */
5089 enum tree_code wanted_code
;
5090 enum tree_code lcode
, rcode
;
5091 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5092 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5093 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5094 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5095 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5096 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5097 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5098 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5099 enum machine_mode lnmode
, rnmode
;
5100 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5101 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5102 tree l_const
, r_const
;
5103 tree lntype
, rntype
, result
;
5104 HOST_WIDE_INT first_bit
, end_bit
;
5106 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5107 enum tree_code orig_code
= code
;
5109 /* Start by getting the comparison codes. Fail if anything is volatile.
5110 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5111 it were surrounded with a NE_EXPR. */
5113 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5116 lcode
= TREE_CODE (lhs
);
5117 rcode
= TREE_CODE (rhs
);
5119 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5121 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5122 build_int_cst (TREE_TYPE (lhs
), 0));
5126 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5128 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5129 build_int_cst (TREE_TYPE (rhs
), 0));
5133 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5134 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5137 ll_arg
= TREE_OPERAND (lhs
, 0);
5138 lr_arg
= TREE_OPERAND (lhs
, 1);
5139 rl_arg
= TREE_OPERAND (rhs
, 0);
5140 rr_arg
= TREE_OPERAND (rhs
, 1);
5142 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5143 if (simple_operand_p (ll_arg
)
5144 && simple_operand_p (lr_arg
))
5146 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5147 && operand_equal_p (lr_arg
, rr_arg
, 0))
5149 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5150 truth_type
, ll_arg
, lr_arg
);
5154 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5155 && operand_equal_p (lr_arg
, rl_arg
, 0))
5157 result
= combine_comparisons (loc
, code
, lcode
,
5158 swap_tree_comparison (rcode
),
5159 truth_type
, ll_arg
, lr_arg
);
5165 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5166 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5168 /* If the RHS can be evaluated unconditionally and its operands are
5169 simple, it wins to evaluate the RHS unconditionally on machines
5170 with expensive branches. In this case, this isn't a comparison
5171 that can be merged. Avoid doing this if the RHS is a floating-point
5172 comparison since those can trap. */
5174 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5176 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5177 && simple_operand_p (rl_arg
)
5178 && simple_operand_p (rr_arg
))
5180 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5181 if (code
== TRUTH_OR_EXPR
5182 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5183 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5184 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5185 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5186 return build2_loc (loc
, NE_EXPR
, truth_type
,
5187 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5189 build_int_cst (TREE_TYPE (ll_arg
), 0));
5191 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5192 if (code
== TRUTH_AND_EXPR
5193 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5194 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5195 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5196 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5197 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5198 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5200 build_int_cst (TREE_TYPE (ll_arg
), 0));
5202 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
5204 if (code
!= orig_code
|| lhs
!= orig_lhs
|| rhs
!= orig_rhs
)
5205 return build2_loc (loc
, code
, truth_type
, lhs
, rhs
);
5210 /* See if the comparisons can be merged. Then get all the parameters for
5213 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5214 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5218 ll_inner
= decode_field_reference (loc
, ll_arg
,
5219 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5220 &ll_unsignedp
, &volatilep
, &ll_mask
,
5222 lr_inner
= decode_field_reference (loc
, lr_arg
,
5223 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5224 &lr_unsignedp
, &volatilep
, &lr_mask
,
5226 rl_inner
= decode_field_reference (loc
, rl_arg
,
5227 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5228 &rl_unsignedp
, &volatilep
, &rl_mask
,
5230 rr_inner
= decode_field_reference (loc
, rr_arg
,
5231 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5232 &rr_unsignedp
, &volatilep
, &rr_mask
,
5235 /* It must be true that the inner operation on the lhs of each
5236 comparison must be the same if we are to be able to do anything.
5237 Then see if we have constants. If not, the same must be true for
5239 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5240 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5243 if (TREE_CODE (lr_arg
) == INTEGER_CST
5244 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5245 l_const
= lr_arg
, r_const
= rr_arg
;
5246 else if (lr_inner
== 0 || rr_inner
== 0
5247 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5250 l_const
= r_const
= 0;
5252 /* If either comparison code is not correct for our logical operation,
5253 fail. However, we can convert a one-bit comparison against zero into
5254 the opposite comparison against that bit being set in the field. */
5256 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5257 if (lcode
!= wanted_code
)
5259 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5261 /* Make the left operand unsigned, since we are only interested
5262 in the value of one bit. Otherwise we are doing the wrong
5271 /* This is analogous to the code for l_const above. */
5272 if (rcode
!= wanted_code
)
5274 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5283 /* See if we can find a mode that contains both fields being compared on
5284 the left. If we can't, fail. Otherwise, update all constants and masks
5285 to be relative to a field of that size. */
5286 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5287 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5288 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5289 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5291 if (lnmode
== VOIDmode
)
5294 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5295 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5296 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5297 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5299 if (BYTES_BIG_ENDIAN
)
5301 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5302 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5305 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5306 size_int (xll_bitpos
));
5307 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5308 size_int (xrl_bitpos
));
5312 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5313 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5314 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5315 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5316 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5319 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5321 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5326 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5327 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5328 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5329 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5330 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5333 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5335 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5339 /* If the right sides are not constant, do the same for it. Also,
5340 disallow this optimization if a size or signedness mismatch occurs
5341 between the left and right sides. */
5344 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5345 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5346 /* Make sure the two fields on the right
5347 correspond to the left without being swapped. */
5348 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5351 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5352 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5353 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
5354 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5356 if (rnmode
== VOIDmode
)
5359 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5360 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5361 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5362 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5364 if (BYTES_BIG_ENDIAN
)
5366 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5367 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5370 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5372 size_int (xlr_bitpos
));
5373 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5375 size_int (xrr_bitpos
));
5377 /* Make a mask that corresponds to both fields being compared.
5378 Do this for both items being compared. If the operands are the
5379 same size and the bits being compared are in the same position
5380 then we can do this by masking both and comparing the masked
5382 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5383 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5384 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5386 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5387 ll_unsignedp
|| rl_unsignedp
);
5388 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5389 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5391 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5392 lr_unsignedp
|| rr_unsignedp
);
5393 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5394 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5396 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5399 /* There is still another way we can do something: If both pairs of
5400 fields being compared are adjacent, we may be able to make a wider
5401 field containing them both.
5403 Note that we still must mask the lhs/rhs expressions. Furthermore,
5404 the mask must be shifted to account for the shift done by
5405 make_bit_field_ref. */
5406 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5407 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5408 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5409 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5413 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5414 ll_bitsize
+ rl_bitsize
,
5415 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5416 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5417 lr_bitsize
+ rr_bitsize
,
5418 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5420 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5421 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5422 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5423 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5425 /* Convert to the smaller type before masking out unwanted bits. */
5427 if (lntype
!= rntype
)
5429 if (lnbitsize
> rnbitsize
)
5431 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5432 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5435 else if (lnbitsize
< rnbitsize
)
5437 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5438 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5443 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5444 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5446 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5447 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5449 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5455 /* Handle the case of comparisons with constants. If there is something in
5456 common between the masks, those bits of the constants must be the same.
5457 If not, the condition is always false. Test for this to avoid generating
5458 incorrect code below. */
5459 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5460 if (! integer_zerop (result
)
5461 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5462 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5464 if (wanted_code
== NE_EXPR
)
5466 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5467 return constant_boolean_node (true, truth_type
);
5471 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5472 return constant_boolean_node (false, truth_type
);
5476 /* Construct the expression we will return. First get the component
5477 reference we will make. Unless the mask is all ones the width of
5478 that field, perform the mask operation. Then compare with the
5480 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5481 ll_unsignedp
|| rl_unsignedp
);
5483 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5484 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5485 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5487 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5488 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5491 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5495 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5499 enum tree_code op_code
;
5502 int consts_equal
, consts_lt
;
5505 STRIP_SIGN_NOPS (arg0
);
5507 op_code
= TREE_CODE (arg0
);
5508 minmax_const
= TREE_OPERAND (arg0
, 1);
5509 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5510 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5511 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5512 inner
= TREE_OPERAND (arg0
, 0);
5514 /* If something does not permit us to optimize, return the original tree. */
5515 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5516 || TREE_CODE (comp_const
) != INTEGER_CST
5517 || TREE_OVERFLOW (comp_const
)
5518 || TREE_CODE (minmax_const
) != INTEGER_CST
5519 || TREE_OVERFLOW (minmax_const
))
5522 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5523 and GT_EXPR, doing the rest with recursive calls using logical
5527 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5530 = optimize_minmax_comparison (loc
,
5531 invert_tree_comparison (code
, false),
5534 return invert_truthvalue_loc (loc
, tem
);
5540 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5541 optimize_minmax_comparison
5542 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5543 optimize_minmax_comparison
5544 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5547 if (op_code
== MAX_EXPR
&& consts_equal
)
5548 /* MAX (X, 0) == 0 -> X <= 0 */
5549 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5551 else if (op_code
== MAX_EXPR
&& consts_lt
)
5552 /* MAX (X, 0) == 5 -> X == 5 */
5553 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5555 else if (op_code
== MAX_EXPR
)
5556 /* MAX (X, 0) == -1 -> false */
5557 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5559 else if (consts_equal
)
5560 /* MIN (X, 0) == 0 -> X >= 0 */
5561 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5564 /* MIN (X, 0) == 5 -> false */
5565 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5568 /* MIN (X, 0) == -1 -> X == -1 */
5569 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5572 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5573 /* MAX (X, 0) > 0 -> X > 0
5574 MAX (X, 0) > 5 -> X > 5 */
5575 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5577 else if (op_code
== MAX_EXPR
)
5578 /* MAX (X, 0) > -1 -> true */
5579 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5581 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5582 /* MIN (X, 0) > 0 -> false
5583 MIN (X, 0) > 5 -> false */
5584 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5587 /* MIN (X, 0) > -1 -> X > -1 */
5588 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5595 /* T is an integer expression that is being multiplied, divided, or taken a
5596 modulus (CODE says which and what kind of divide or modulus) by a
5597 constant C. See if we can eliminate that operation by folding it with
5598 other operations already in T. WIDE_TYPE, if non-null, is a type that
5599 should be used for the computation if wider than our type.
5601 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5602 (X * 2) + (Y * 4). We must, however, be assured that either the original
5603 expression would not overflow or that overflow is undefined for the type
5604 in the language in question.
5606 If we return a non-null expression, it is an equivalent form of the
5607 original computation, but need not be in the original type.
5609 We set *STRICT_OVERFLOW_P to true if the return values depends on
5610 signed overflow being undefined. Otherwise we do not change
5611 *STRICT_OVERFLOW_P. */
5614 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5615 bool *strict_overflow_p
)
5617 /* To avoid exponential search depth, refuse to allow recursion past
5618 three levels. Beyond that (1) it's highly unlikely that we'll find
5619 something interesting and (2) we've probably processed it before
5620 when we built the inner expression. */
5629 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5636 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5637 bool *strict_overflow_p
)
5639 tree type
= TREE_TYPE (t
);
5640 enum tree_code tcode
= TREE_CODE (t
);
5641 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5642 > GET_MODE_SIZE (TYPE_MODE (type
)))
5643 ? wide_type
: type
);
5645 int same_p
= tcode
== code
;
5646 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5647 bool sub_strict_overflow_p
;
5649 /* Don't deal with constants of zero here; they confuse the code below. */
5650 if (integer_zerop (c
))
5653 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5654 op0
= TREE_OPERAND (t
, 0);
5656 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5657 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5659 /* Note that we need not handle conditional operations here since fold
5660 already handles those cases. So just do arithmetic here. */
5664 /* For a constant, we can always simplify if we are a multiply
5665 or (for divide and modulus) if it is a multiple of our constant. */
5666 if (code
== MULT_EXPR
5667 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
)))
5668 return const_binop (code
, fold_convert (ctype
, t
),
5669 fold_convert (ctype
, c
));
5672 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5673 /* If op0 is an expression ... */
5674 if ((COMPARISON_CLASS_P (op0
)
5675 || UNARY_CLASS_P (op0
)
5676 || BINARY_CLASS_P (op0
)
5677 || VL_EXP_CLASS_P (op0
)
5678 || EXPRESSION_CLASS_P (op0
))
5679 /* ... and has wrapping overflow, and its type is smaller
5680 than ctype, then we cannot pass through as widening. */
5681 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5682 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5683 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5684 && (TYPE_PRECISION (ctype
)
5685 > TYPE_PRECISION (TREE_TYPE (op0
))))
5686 /* ... or this is a truncation (t is narrower than op0),
5687 then we cannot pass through this narrowing. */
5688 || (TYPE_PRECISION (type
)
5689 < TYPE_PRECISION (TREE_TYPE (op0
)))
5690 /* ... or signedness changes for division or modulus,
5691 then we cannot pass through this conversion. */
5692 || (code
!= MULT_EXPR
5693 && (TYPE_UNSIGNED (ctype
)
5694 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5695 /* ... or has undefined overflow while the converted to
5696 type has not, we cannot do the operation in the inner type
5697 as that would introduce undefined overflow. */
5698 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5699 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5702 /* Pass the constant down and see if we can make a simplification. If
5703 we can, replace this expression with the inner simplification for
5704 possible later conversion to our or some other type. */
5705 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5706 && TREE_CODE (t2
) == INTEGER_CST
5707 && !TREE_OVERFLOW (t2
)
5708 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5710 ? ctype
: NULL_TREE
,
5711 strict_overflow_p
))))
5716 /* If widening the type changes it from signed to unsigned, then we
5717 must avoid building ABS_EXPR itself as unsigned. */
5718 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5720 tree cstype
= (*signed_type_for
) (ctype
);
5721 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5724 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5725 return fold_convert (ctype
, t1
);
5729 /* If the constant is negative, we cannot simplify this. */
5730 if (tree_int_cst_sgn (c
) == -1)
5734 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5736 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5739 case MIN_EXPR
: case MAX_EXPR
:
5740 /* If widening the type changes the signedness, then we can't perform
5741 this optimization as that changes the result. */
5742 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5745 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5746 sub_strict_overflow_p
= false;
5747 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5748 &sub_strict_overflow_p
)) != 0
5749 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5750 &sub_strict_overflow_p
)) != 0)
5752 if (tree_int_cst_sgn (c
) < 0)
5753 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5754 if (sub_strict_overflow_p
)
5755 *strict_overflow_p
= true;
5756 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5757 fold_convert (ctype
, t2
));
5761 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5762 /* If the second operand is constant, this is a multiplication
5763 or floor division, by a power of two, so we can treat it that
5764 way unless the multiplier or divisor overflows. Signed
5765 left-shift overflow is implementation-defined rather than
5766 undefined in C90, so do not convert signed left shift into
5768 if (TREE_CODE (op1
) == INTEGER_CST
5769 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5770 /* const_binop may not detect overflow correctly,
5771 so check for it explicitly here. */
5772 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5773 && TREE_INT_CST_HIGH (op1
) == 0
5774 && 0 != (t1
= fold_convert (ctype
,
5775 const_binop (LSHIFT_EXPR
,
5778 && !TREE_OVERFLOW (t1
))
5779 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5780 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5782 fold_convert (ctype
, op0
),
5784 c
, code
, wide_type
, strict_overflow_p
);
5787 case PLUS_EXPR
: case MINUS_EXPR
:
5788 /* See if we can eliminate the operation on both sides. If we can, we
5789 can return a new PLUS or MINUS. If we can't, the only remaining
5790 cases where we can do anything are if the second operand is a
5792 sub_strict_overflow_p
= false;
5793 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5794 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5795 if (t1
!= 0 && t2
!= 0
5796 && (code
== MULT_EXPR
5797 /* If not multiplication, we can only do this if both operands
5798 are divisible by c. */
5799 || (multiple_of_p (ctype
, op0
, c
)
5800 && multiple_of_p (ctype
, op1
, c
))))
5802 if (sub_strict_overflow_p
)
5803 *strict_overflow_p
= true;
5804 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5805 fold_convert (ctype
, t2
));
5808 /* If this was a subtraction, negate OP1 and set it to be an addition.
5809 This simplifies the logic below. */
5810 if (tcode
== MINUS_EXPR
)
5812 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5813 /* If OP1 was not easily negatable, the constant may be OP0. */
5814 if (TREE_CODE (op0
) == INTEGER_CST
)
5825 if (TREE_CODE (op1
) != INTEGER_CST
)
5828 /* If either OP1 or C are negative, this optimization is not safe for
5829 some of the division and remainder types while for others we need
5830 to change the code. */
5831 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5833 if (code
== CEIL_DIV_EXPR
)
5834 code
= FLOOR_DIV_EXPR
;
5835 else if (code
== FLOOR_DIV_EXPR
)
5836 code
= CEIL_DIV_EXPR
;
5837 else if (code
!= MULT_EXPR
5838 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5842 /* If it's a multiply or a division/modulus operation of a multiple
5843 of our constant, do the operation and verify it doesn't overflow. */
5844 if (code
== MULT_EXPR
5845 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5847 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5848 fold_convert (ctype
, c
));
5849 /* We allow the constant to overflow with wrapping semantics. */
5851 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5857 /* If we have an unsigned type is not a sizetype, we cannot widen
5858 the operation since it will change the result if the original
5859 computation overflowed. */
5860 if (TYPE_UNSIGNED (ctype
)
5861 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5865 /* If we were able to eliminate our operation from the first side,
5866 apply our operation to the second side and reform the PLUS. */
5867 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5868 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5870 /* The last case is if we are a multiply. In that case, we can
5871 apply the distributive law to commute the multiply and addition
5872 if the multiplication of the constants doesn't overflow. */
5873 if (code
== MULT_EXPR
)
5874 return fold_build2 (tcode
, ctype
,
5875 fold_build2 (code
, ctype
,
5876 fold_convert (ctype
, op0
),
5877 fold_convert (ctype
, c
)),
5883 /* We have a special case here if we are doing something like
5884 (C * 8) % 4 since we know that's zero. */
5885 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5886 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5887 /* If the multiplication can overflow we cannot optimize this.
5888 ??? Until we can properly mark individual operations as
5889 not overflowing we need to treat sizetype special here as
5890 stor-layout relies on this opimization to make
5891 DECL_FIELD_BIT_OFFSET always a constant. */
5892 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5893 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
5894 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))))
5895 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5896 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5898 *strict_overflow_p
= true;
5899 return omit_one_operand (type
, integer_zero_node
, op0
);
5902 /* ... fall through ... */
5904 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5905 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5906 /* If we can extract our operation from the LHS, do so and return a
5907 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5908 do something only if the second operand is a constant. */
5910 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5911 strict_overflow_p
)) != 0)
5912 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5913 fold_convert (ctype
, op1
));
5914 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5915 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5916 strict_overflow_p
)) != 0)
5917 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5918 fold_convert (ctype
, t1
));
5919 else if (TREE_CODE (op1
) != INTEGER_CST
)
5922 /* If these are the same operation types, we can associate them
5923 assuming no overflow. */
5925 && 0 != (t1
= int_const_binop (MULT_EXPR
,
5926 fold_convert (ctype
, op1
),
5927 fold_convert (ctype
, c
), 1))
5928 && 0 != (t1
= force_fit_type_double (ctype
, tree_to_double_int (t1
),
5929 (TYPE_UNSIGNED (ctype
)
5930 && tcode
!= MULT_EXPR
) ? -1 : 1,
5931 TREE_OVERFLOW (t1
)))
5932 && !TREE_OVERFLOW (t1
))
5933 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
), t1
);
5935 /* If these operations "cancel" each other, we have the main
5936 optimizations of this pass, which occur when either constant is a
5937 multiple of the other, in which case we replace this with either an
5938 operation or CODE or TCODE.
5940 If we have an unsigned type that is not a sizetype, we cannot do
5941 this since it will change the result if the original computation
5943 if ((TYPE_OVERFLOW_UNDEFINED (ctype
)
5944 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5945 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5946 || (tcode
== MULT_EXPR
5947 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5948 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5949 && code
!= MULT_EXPR
)))
5951 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5953 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5954 *strict_overflow_p
= true;
5955 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5956 fold_convert (ctype
,
5957 const_binop (TRUNC_DIV_EXPR
,
5960 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
)))
5962 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5963 *strict_overflow_p
= true;
5964 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5965 fold_convert (ctype
,
5966 const_binop (TRUNC_DIV_EXPR
,
5979 /* Return a node which has the indicated constant VALUE (either 0 or
5980 1), and is of the indicated TYPE. */
5983 constant_boolean_node (int value
, tree type
)
5985 if (type
== integer_type_node
)
5986 return value
? integer_one_node
: integer_zero_node
;
5987 else if (type
== boolean_type_node
)
5988 return value
? boolean_true_node
: boolean_false_node
;
5990 return build_int_cst_type (type
, value
);
5992 return build_int_cst (NULL_TREE
, value
);
5996 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5997 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5998 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5999 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6000 COND is the first argument to CODE; otherwise (as in the example
6001 given here), it is the second argument. TYPE is the type of the
6002 original expression. Return NULL_TREE if no simplification is
6006 fold_binary_op_with_conditional_arg (location_t loc
,
6007 enum tree_code code
,
6008 tree type
, tree op0
, tree op1
,
6009 tree cond
, tree arg
, int cond_first_p
)
6011 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6012 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6013 tree test
, true_value
, false_value
;
6014 tree lhs
= NULL_TREE
;
6015 tree rhs
= NULL_TREE
;
6017 if (TREE_CODE (cond
) == COND_EXPR
)
6019 test
= TREE_OPERAND (cond
, 0);
6020 true_value
= TREE_OPERAND (cond
, 1);
6021 false_value
= TREE_OPERAND (cond
, 2);
6022 /* If this operand throws an expression, then it does not make
6023 sense to try to perform a logical or arithmetic operation
6025 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6027 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6032 tree testtype
= TREE_TYPE (cond
);
6034 true_value
= constant_boolean_node (true, testtype
);
6035 false_value
= constant_boolean_node (false, testtype
);
6038 /* This transformation is only worthwhile if we don't have to wrap ARG
6039 in a SAVE_EXPR and the operation can be simplified on at least one
6040 of the branches once its pushed inside the COND_EXPR. */
6041 if (!TREE_CONSTANT (arg
)
6042 && (TREE_SIDE_EFFECTS (arg
)
6043 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6046 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6049 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6051 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6053 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6057 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6059 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6061 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6064 /* Check that we have simplified at least one of the branches. */
6065 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6068 return fold_build3_loc (loc
, COND_EXPR
, type
, test
, lhs
, rhs
);
6072 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6074 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6075 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6076 ADDEND is the same as X.
6078 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6079 and finite. The problematic cases are when X is zero, and its mode
6080 has signed zeros. In the case of rounding towards -infinity,
6081 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6082 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6085 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6087 if (!real_zerop (addend
))
6090 /* Don't allow the fold with -fsignaling-nans. */
6091 if (HONOR_SNANS (TYPE_MODE (type
)))
6094 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6095 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6098 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6099 if (TREE_CODE (addend
) == REAL_CST
6100 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6103 /* The mode has signed zeros, and we have to honor their sign.
6104 In this situation, there is only one case we can return true for.
6105 X - 0 is the same as X unless rounding towards -infinity is
6107 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6110 /* Subroutine of fold() that checks comparisons of built-in math
6111 functions against real constants.
6113 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6114 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6115 is the type of the result and ARG0 and ARG1 are the operands of the
6116 comparison. ARG1 must be a TREE_REAL_CST.
6118 The function returns the constant folded tree if a simplification
6119 can be made, and NULL_TREE otherwise. */
6122 fold_mathfn_compare (location_t loc
,
6123 enum built_in_function fcode
, enum tree_code code
,
6124 tree type
, tree arg0
, tree arg1
)
6128 if (BUILTIN_SQRT_P (fcode
))
6130 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6131 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6133 c
= TREE_REAL_CST (arg1
);
6134 if (REAL_VALUE_NEGATIVE (c
))
6136 /* sqrt(x) < y is always false, if y is negative. */
6137 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6138 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6140 /* sqrt(x) > y is always true, if y is negative and we
6141 don't care about NaNs, i.e. negative values of x. */
6142 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6143 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6145 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6146 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6147 build_real (TREE_TYPE (arg
), dconst0
));
6149 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6153 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6154 real_convert (&c2
, mode
, &c2
);
6156 if (REAL_VALUE_ISINF (c2
))
6158 /* sqrt(x) > y is x == +Inf, when y is very large. */
6159 if (HONOR_INFINITIES (mode
))
6160 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6161 build_real (TREE_TYPE (arg
), c2
));
6163 /* sqrt(x) > y is always false, when y is very large
6164 and we don't care about infinities. */
6165 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6168 /* sqrt(x) > c is the same as x > c*c. */
6169 return fold_build2_loc (loc
, code
, type
, arg
,
6170 build_real (TREE_TYPE (arg
), c2
));
6172 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6176 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6177 real_convert (&c2
, mode
, &c2
);
6179 if (REAL_VALUE_ISINF (c2
))
6181 /* sqrt(x) < y is always true, when y is a very large
6182 value and we don't care about NaNs or Infinities. */
6183 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6184 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6186 /* sqrt(x) < y is x != +Inf when y is very large and we
6187 don't care about NaNs. */
6188 if (! HONOR_NANS (mode
))
6189 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6190 build_real (TREE_TYPE (arg
), c2
));
6192 /* sqrt(x) < y is x >= 0 when y is very large and we
6193 don't care about Infinities. */
6194 if (! HONOR_INFINITIES (mode
))
6195 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6196 build_real (TREE_TYPE (arg
), dconst0
));
6198 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6199 if (lang_hooks
.decls
.global_bindings_p () != 0
6200 || CONTAINS_PLACEHOLDER_P (arg
))
6203 arg
= save_expr (arg
);
6204 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6205 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6206 build_real (TREE_TYPE (arg
),
6208 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6209 build_real (TREE_TYPE (arg
),
6213 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6214 if (! HONOR_NANS (mode
))
6215 return fold_build2_loc (loc
, code
, type
, arg
,
6216 build_real (TREE_TYPE (arg
), c2
));
6218 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6219 if (lang_hooks
.decls
.global_bindings_p () == 0
6220 && ! CONTAINS_PLACEHOLDER_P (arg
))
6222 arg
= save_expr (arg
);
6223 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6224 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6225 build_real (TREE_TYPE (arg
),
6227 fold_build2_loc (loc
, code
, type
, arg
,
6228 build_real (TREE_TYPE (arg
),
6237 /* Subroutine of fold() that optimizes comparisons against Infinities,
6238 either +Inf or -Inf.
6240 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6241 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6242 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6244 The function returns the constant folded tree if a simplification
6245 can be made, and NULL_TREE otherwise. */
6248 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6249 tree arg0
, tree arg1
)
6251 enum machine_mode mode
;
6252 REAL_VALUE_TYPE max
;
6256 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6258 /* For negative infinity swap the sense of the comparison. */
6259 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6261 code
= swap_tree_comparison (code
);
6266 /* x > +Inf is always false, if with ignore sNANs. */
6267 if (HONOR_SNANS (mode
))
6269 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6272 /* x <= +Inf is always true, if we don't case about NaNs. */
6273 if (! HONOR_NANS (mode
))
6274 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6276 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6277 if (lang_hooks
.decls
.global_bindings_p () == 0
6278 && ! CONTAINS_PLACEHOLDER_P (arg0
))
6280 arg0
= save_expr (arg0
);
6281 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6287 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6288 real_maxval (&max
, neg
, mode
);
6289 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6290 arg0
, build_real (TREE_TYPE (arg0
), max
));
6293 /* x < +Inf is always equal to x <= DBL_MAX. */
6294 real_maxval (&max
, neg
, mode
);
6295 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6296 arg0
, build_real (TREE_TYPE (arg0
), max
));
6299 /* x != +Inf is always equal to !(x > DBL_MAX). */
6300 real_maxval (&max
, neg
, mode
);
6301 if (! HONOR_NANS (mode
))
6302 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6303 arg0
, build_real (TREE_TYPE (arg0
), max
));
6305 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6306 arg0
, build_real (TREE_TYPE (arg0
), max
));
6307 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6316 /* Subroutine of fold() that optimizes comparisons of a division by
6317 a nonzero integer constant against an integer constant, i.e.
6320 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6321 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6322 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6324 The function returns the constant folded tree if a simplification
6325 can be made, and NULL_TREE otherwise. */
6328 fold_div_compare (location_t loc
,
6329 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6331 tree prod
, tmp
, hi
, lo
;
6332 tree arg00
= TREE_OPERAND (arg0
, 0);
6333 tree arg01
= TREE_OPERAND (arg0
, 1);
6335 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6339 /* We have to do this the hard way to detect unsigned overflow.
6340 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6341 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
6342 TREE_INT_CST_HIGH (arg01
),
6343 TREE_INT_CST_LOW (arg1
),
6344 TREE_INT_CST_HIGH (arg1
),
6345 &val
.low
, &val
.high
, unsigned_p
);
6346 prod
= force_fit_type_double (TREE_TYPE (arg00
), val
, -1, overflow
);
6347 neg_overflow
= false;
6351 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6352 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6355 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6356 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
6357 TREE_INT_CST_HIGH (prod
),
6358 TREE_INT_CST_LOW (tmp
),
6359 TREE_INT_CST_HIGH (tmp
),
6360 &val
.low
, &val
.high
, unsigned_p
);
6361 hi
= force_fit_type_double (TREE_TYPE (arg00
), val
,
6362 -1, overflow
| TREE_OVERFLOW (prod
));
6364 else if (tree_int_cst_sgn (arg01
) >= 0)
6366 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6367 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6368 switch (tree_int_cst_sgn (arg1
))
6371 neg_overflow
= true;
6372 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6377 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6382 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6392 /* A negative divisor reverses the relational operators. */
6393 code
= swap_tree_comparison (code
);
6395 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6396 build_int_cst (TREE_TYPE (arg01
), 1), 0);
6397 switch (tree_int_cst_sgn (arg1
))
6400 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
, 0);
6405 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6410 neg_overflow
= true;
6411 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
, 0);
6423 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6424 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6425 if (TREE_OVERFLOW (hi
))
6426 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6427 if (TREE_OVERFLOW (lo
))
6428 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6429 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6432 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6433 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6434 if (TREE_OVERFLOW (hi
))
6435 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6436 if (TREE_OVERFLOW (lo
))
6437 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6438 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6441 if (TREE_OVERFLOW (lo
))
6443 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6444 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6446 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6449 if (TREE_OVERFLOW (hi
))
6451 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6452 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6454 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6457 if (TREE_OVERFLOW (hi
))
6459 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6460 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6462 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6465 if (TREE_OVERFLOW (lo
))
6467 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6468 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6470 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6480 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6481 equality/inequality test, then return a simplified form of the test
6482 using a sign testing. Otherwise return NULL. TYPE is the desired
6486 fold_single_bit_test_into_sign_test (location_t loc
,
6487 enum tree_code code
, tree arg0
, tree arg1
,
6490 /* If this is testing a single bit, we can optimize the test. */
6491 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6492 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6493 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6495 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6496 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6497 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6499 if (arg00
!= NULL_TREE
6500 /* This is only a win if casting to a signed type is cheap,
6501 i.e. when arg00's type is not a partial mode. */
6502 && TYPE_PRECISION (TREE_TYPE (arg00
))
6503 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6505 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6506 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6508 fold_convert_loc (loc
, stype
, arg00
),
6509 build_int_cst (stype
, 0));
6516 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6517 equality/inequality test, then return a simplified form of
6518 the test using shifts and logical operations. Otherwise return
6519 NULL. TYPE is the desired result type. */
6522 fold_single_bit_test (location_t loc
, enum tree_code code
,
6523 tree arg0
, tree arg1
, tree result_type
)
6525 /* If this is testing a single bit, we can optimize the test. */
6526 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6527 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6528 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6530 tree inner
= TREE_OPERAND (arg0
, 0);
6531 tree type
= TREE_TYPE (arg0
);
6532 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6533 enum machine_mode operand_mode
= TYPE_MODE (type
);
6535 tree signed_type
, unsigned_type
, intermediate_type
;
6538 /* First, see if we can fold the single bit test into a sign-bit
6540 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6545 /* Otherwise we have (A & C) != 0 where C is a single bit,
6546 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6547 Similarly for (A & C) == 0. */
6549 /* If INNER is a right shift of a constant and it plus BITNUM does
6550 not overflow, adjust BITNUM and INNER. */
6551 if (TREE_CODE (inner
) == RSHIFT_EXPR
6552 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6553 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6554 && bitnum
< TYPE_PRECISION (type
)
6555 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6556 bitnum
- TYPE_PRECISION (type
)))
6558 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6559 inner
= TREE_OPERAND (inner
, 0);
6562 /* If we are going to be able to omit the AND below, we must do our
6563 operations as unsigned. If we must use the AND, we have a choice.
6564 Normally unsigned is faster, but for some machines signed is. */
6565 #ifdef LOAD_EXTEND_OP
6566 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6567 && !flag_syntax_only
) ? 0 : 1;
6572 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6573 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6574 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6575 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6578 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6579 inner
, size_int (bitnum
));
6581 one
= build_int_cst (intermediate_type
, 1);
6583 if (code
== EQ_EXPR
)
6584 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6586 /* Put the AND last so it can combine with more things. */
6587 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6589 /* Make sure to return the proper type. */
6590 inner
= fold_convert_loc (loc
, result_type
, inner
);
6597 /* Check whether we are allowed to reorder operands arg0 and arg1,
6598 such that the evaluation of arg1 occurs before arg0. */
6601 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6603 if (! flag_evaluation_order
)
6605 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6607 return ! TREE_SIDE_EFFECTS (arg0
)
6608 && ! TREE_SIDE_EFFECTS (arg1
);
6611 /* Test whether it is preferable two swap two operands, ARG0 and
6612 ARG1, for example because ARG0 is an integer constant and ARG1
6613 isn't. If REORDER is true, only recommend swapping if we can
6614 evaluate the operands in reverse order. */
6617 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6619 STRIP_SIGN_NOPS (arg0
);
6620 STRIP_SIGN_NOPS (arg1
);
6622 if (TREE_CODE (arg1
) == INTEGER_CST
)
6624 if (TREE_CODE (arg0
) == INTEGER_CST
)
6627 if (TREE_CODE (arg1
) == REAL_CST
)
6629 if (TREE_CODE (arg0
) == REAL_CST
)
6632 if (TREE_CODE (arg1
) == FIXED_CST
)
6634 if (TREE_CODE (arg0
) == FIXED_CST
)
6637 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6639 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6642 if (TREE_CONSTANT (arg1
))
6644 if (TREE_CONSTANT (arg0
))
6647 if (optimize_function_for_size_p (cfun
))
6650 if (reorder
&& flag_evaluation_order
6651 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6654 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6655 for commutative and comparison operators. Ensuring a canonical
6656 form allows the optimizers to find additional redundancies without
6657 having to explicitly check for both orderings. */
6658 if (TREE_CODE (arg0
) == SSA_NAME
6659 && TREE_CODE (arg1
) == SSA_NAME
6660 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6663 /* Put SSA_NAMEs last. */
6664 if (TREE_CODE (arg1
) == SSA_NAME
)
6666 if (TREE_CODE (arg0
) == SSA_NAME
)
6669 /* Put variables last. */
6678 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6679 ARG0 is extended to a wider type. */
6682 fold_widened_comparison (location_t loc
, enum tree_code code
,
6683 tree type
, tree arg0
, tree arg1
)
6685 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6687 tree shorter_type
, outer_type
;
6691 if (arg0_unw
== arg0
)
6693 shorter_type
= TREE_TYPE (arg0_unw
);
6695 #ifdef HAVE_canonicalize_funcptr_for_compare
6696 /* Disable this optimization if we're casting a function pointer
6697 type on targets that require function pointer canonicalization. */
6698 if (HAVE_canonicalize_funcptr_for_compare
6699 && TREE_CODE (shorter_type
) == POINTER_TYPE
6700 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6704 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6707 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6709 /* If possible, express the comparison in the shorter mode. */
6710 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6711 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6712 && (TREE_TYPE (arg1_unw
) == shorter_type
6713 || ((TYPE_PRECISION (shorter_type
)
6714 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6715 && (TYPE_UNSIGNED (shorter_type
)
6716 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6717 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6718 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6719 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6720 && int_fits_type_p (arg1_unw
, shorter_type
))))
6721 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6722 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6724 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6725 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6726 || !int_fits_type_p (arg1_unw
, shorter_type
))
6729 /* If we are comparing with the integer that does not fit into the range
6730 of the shorter type, the result is known. */
6731 outer_type
= TREE_TYPE (arg1_unw
);
6732 min
= lower_bound_in_type (outer_type
, shorter_type
);
6733 max
= upper_bound_in_type (outer_type
, shorter_type
);
6735 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6737 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6744 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6749 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6755 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6757 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6762 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6764 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6773 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6774 ARG0 just the signedness is changed. */
6777 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6778 tree arg0
, tree arg1
)
6781 tree inner_type
, outer_type
;
6783 if (!CONVERT_EXPR_P (arg0
))
6786 outer_type
= TREE_TYPE (arg0
);
6787 arg0_inner
= TREE_OPERAND (arg0
, 0);
6788 inner_type
= TREE_TYPE (arg0_inner
);
6790 #ifdef HAVE_canonicalize_funcptr_for_compare
6791 /* Disable this optimization if we're casting a function pointer
6792 type on targets that require function pointer canonicalization. */
6793 if (HAVE_canonicalize_funcptr_for_compare
6794 && TREE_CODE (inner_type
) == POINTER_TYPE
6795 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6799 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6802 if (TREE_CODE (arg1
) != INTEGER_CST
6803 && !(CONVERT_EXPR_P (arg1
)
6804 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6807 if ((TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6808 || POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6813 if (TREE_CODE (arg1
) == INTEGER_CST
)
6814 arg1
= force_fit_type_double (inner_type
, tree_to_double_int (arg1
),
6815 0, TREE_OVERFLOW (arg1
));
6817 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6819 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6822 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6823 step of the array. Reconstructs s and delta in the case of s *
6824 delta being an integer constant (and thus already folded). ADDR is
6825 the address. MULT is the multiplicative expression. If the
6826 function succeeds, the new address expression is returned.
6827 Otherwise NULL_TREE is returned. LOC is the location of the
6828 resulting expression. */
6831 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6833 tree s
, delta
, step
;
6834 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6839 /* Strip the nops that might be added when converting op1 to sizetype. */
6842 /* Canonicalize op1 into a possibly non-constant delta
6843 and an INTEGER_CST s. */
6844 if (TREE_CODE (op1
) == MULT_EXPR
)
6846 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6851 if (TREE_CODE (arg0
) == INTEGER_CST
)
6856 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6864 else if (TREE_CODE (op1
) == INTEGER_CST
)
6871 /* Simulate we are delta * 1. */
6873 s
= integer_one_node
;
6876 for (;; ref
= TREE_OPERAND (ref
, 0))
6878 if (TREE_CODE (ref
) == ARRAY_REF
)
6882 /* Remember if this was a multi-dimensional array. */
6883 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6886 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6889 itype
= TREE_TYPE (domain
);
6891 step
= array_ref_element_size (ref
);
6892 if (TREE_CODE (step
) != INTEGER_CST
)
6897 if (! tree_int_cst_equal (step
, s
))
6902 /* Try if delta is a multiple of step. */
6903 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6909 /* Only fold here if we can verify we do not overflow one
6910 dimension of a multi-dimensional array. */
6915 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
6916 || !TYPE_MAX_VALUE (domain
)
6917 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6920 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6921 fold_convert_loc (loc
, itype
,
6922 TREE_OPERAND (ref
, 1)),
6923 fold_convert_loc (loc
, itype
, delta
));
6925 || TREE_CODE (tmp
) != INTEGER_CST
6926 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6935 if (!handled_component_p (ref
))
6939 /* We found the suitable array reference. So copy everything up to it,
6940 and replace the index. */
6942 pref
= TREE_OPERAND (addr
, 0);
6943 ret
= copy_node (pref
);
6944 SET_EXPR_LOCATION (ret
, loc
);
6949 pref
= TREE_OPERAND (pref
, 0);
6950 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6951 pos
= TREE_OPERAND (pos
, 0);
6954 TREE_OPERAND (pos
, 1) = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
6955 fold_convert_loc (loc
, itype
,
6956 TREE_OPERAND (pos
, 1)),
6957 fold_convert_loc (loc
, itype
, delta
));
6959 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6963 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6964 means A >= Y && A != MAX, but in this case we know that
6965 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6968 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6970 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
6972 if (TREE_CODE (bound
) == LT_EXPR
)
6973 a
= TREE_OPERAND (bound
, 0);
6974 else if (TREE_CODE (bound
) == GT_EXPR
)
6975 a
= TREE_OPERAND (bound
, 1);
6979 typea
= TREE_TYPE (a
);
6980 if (!INTEGRAL_TYPE_P (typea
)
6981 && !POINTER_TYPE_P (typea
))
6984 if (TREE_CODE (ineq
) == LT_EXPR
)
6986 a1
= TREE_OPERAND (ineq
, 1);
6987 y
= TREE_OPERAND (ineq
, 0);
6989 else if (TREE_CODE (ineq
) == GT_EXPR
)
6991 a1
= TREE_OPERAND (ineq
, 0);
6992 y
= TREE_OPERAND (ineq
, 1);
6997 if (TREE_TYPE (a1
) != typea
)
7000 if (POINTER_TYPE_P (typea
))
7002 /* Convert the pointer types into integer before taking the difference. */
7003 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7004 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7005 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7008 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7010 if (!diff
|| !integer_onep (diff
))
7013 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7016 /* Fold a sum or difference of at least one multiplication.
7017 Returns the folded tree or NULL if no simplification could be made. */
7020 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7021 tree arg0
, tree arg1
)
7023 tree arg00
, arg01
, arg10
, arg11
;
7024 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7026 /* (A * C) +- (B * C) -> (A+-B) * C.
7027 (A * C) +- A -> A * (C+-1).
7028 We are most concerned about the case where C is a constant,
7029 but other combinations show up during loop reduction. Since
7030 it is not difficult, try all four possibilities. */
7032 if (TREE_CODE (arg0
) == MULT_EXPR
)
7034 arg00
= TREE_OPERAND (arg0
, 0);
7035 arg01
= TREE_OPERAND (arg0
, 1);
7037 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7039 arg00
= build_one_cst (type
);
7044 /* We cannot generate constant 1 for fract. */
7045 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7048 arg01
= build_one_cst (type
);
7050 if (TREE_CODE (arg1
) == MULT_EXPR
)
7052 arg10
= TREE_OPERAND (arg1
, 0);
7053 arg11
= TREE_OPERAND (arg1
, 1);
7055 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7057 arg10
= build_one_cst (type
);
7058 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7059 the purpose of this canonicalization. */
7060 if (TREE_INT_CST_HIGH (arg1
) == -1
7061 && negate_expr_p (arg1
)
7062 && code
== PLUS_EXPR
)
7064 arg11
= negate_expr (arg1
);
7072 /* We cannot generate constant 1 for fract. */
7073 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7076 arg11
= build_one_cst (type
);
7080 if (operand_equal_p (arg01
, arg11
, 0))
7081 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7082 else if (operand_equal_p (arg00
, arg10
, 0))
7083 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7084 else if (operand_equal_p (arg00
, arg11
, 0))
7085 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7086 else if (operand_equal_p (arg01
, arg10
, 0))
7087 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7089 /* No identical multiplicands; see if we can find a common
7090 power-of-two factor in non-power-of-two multiplies. This
7091 can help in multi-dimensional array access. */
7092 else if (host_integerp (arg01
, 0)
7093 && host_integerp (arg11
, 0))
7095 HOST_WIDE_INT int01
, int11
, tmp
;
7098 int01
= TREE_INT_CST_LOW (arg01
);
7099 int11
= TREE_INT_CST_LOW (arg11
);
7101 /* Move min of absolute values to int11. */
7102 if ((int01
>= 0 ? int01
: -int01
)
7103 < (int11
>= 0 ? int11
: -int11
))
7105 tmp
= int01
, int01
= int11
, int11
= tmp
;
7106 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7113 if (exact_log2 (abs (int11
)) > 0 && int01
% int11
== 0
7114 /* The remainder should not be a constant, otherwise we
7115 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7116 increased the number of multiplications necessary. */
7117 && TREE_CODE (arg10
) != INTEGER_CST
)
7119 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7120 build_int_cst (TREE_TYPE (arg00
),
7125 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7130 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7131 fold_build2_loc (loc
, code
, type
,
7132 fold_convert_loc (loc
, type
, alt0
),
7133 fold_convert_loc (loc
, type
, alt1
)),
7134 fold_convert_loc (loc
, type
, same
));
7139 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7140 specified by EXPR into the buffer PTR of length LEN bytes.
7141 Return the number of bytes placed in the buffer, or zero
7145 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7147 tree type
= TREE_TYPE (expr
);
7148 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7149 int byte
, offset
, word
, words
;
7150 unsigned char value
;
7152 if (total_bytes
> len
)
7154 words
= total_bytes
/ UNITS_PER_WORD
;
7156 for (byte
= 0; byte
< total_bytes
; byte
++)
7158 int bitpos
= byte
* BITS_PER_UNIT
;
7159 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7160 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7162 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7163 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7165 if (total_bytes
> UNITS_PER_WORD
)
7167 word
= byte
/ UNITS_PER_WORD
;
7168 if (WORDS_BIG_ENDIAN
)
7169 word
= (words
- 1) - word
;
7170 offset
= word
* UNITS_PER_WORD
;
7171 if (BYTES_BIG_ENDIAN
)
7172 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7174 offset
+= byte
% UNITS_PER_WORD
;
7177 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7178 ptr
[offset
] = value
;
7184 /* Subroutine of native_encode_expr. Encode the REAL_CST
7185 specified by EXPR into the buffer PTR of length LEN bytes.
7186 Return the number of bytes placed in the buffer, or zero
7190 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7192 tree type
= TREE_TYPE (expr
);
7193 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7194 int byte
, offset
, word
, words
, bitpos
;
7195 unsigned char value
;
7197 /* There are always 32 bits in each long, no matter the size of
7198 the hosts long. We handle floating point representations with
7202 if (total_bytes
> len
)
7204 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7206 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7208 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7209 bitpos
+= BITS_PER_UNIT
)
7211 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7212 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7214 if (UNITS_PER_WORD
< 4)
7216 word
= byte
/ UNITS_PER_WORD
;
7217 if (WORDS_BIG_ENDIAN
)
7218 word
= (words
- 1) - word
;
7219 offset
= word
* UNITS_PER_WORD
;
7220 if (BYTES_BIG_ENDIAN
)
7221 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7223 offset
+= byte
% UNITS_PER_WORD
;
7226 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7227 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7232 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7233 specified by EXPR into the buffer PTR of length LEN bytes.
7234 Return the number of bytes placed in the buffer, or zero
7238 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7243 part
= TREE_REALPART (expr
);
7244 rsize
= native_encode_expr (part
, ptr
, len
);
7247 part
= TREE_IMAGPART (expr
);
7248 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7251 return rsize
+ isize
;
7255 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7256 specified by EXPR into the buffer PTR of length LEN bytes.
7257 Return the number of bytes placed in the buffer, or zero
7261 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7263 int i
, size
, offset
, count
;
7264 tree itype
, elem
, elements
;
7267 elements
= TREE_VECTOR_CST_ELTS (expr
);
7268 count
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
));
7269 itype
= TREE_TYPE (TREE_TYPE (expr
));
7270 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7271 for (i
= 0; i
< count
; i
++)
7275 elem
= TREE_VALUE (elements
);
7276 elements
= TREE_CHAIN (elements
);
7283 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7288 if (offset
+ size
> len
)
7290 memset (ptr
+offset
, 0, size
);
7298 /* Subroutine of native_encode_expr. Encode the STRING_CST
7299 specified by EXPR into the buffer PTR of length LEN bytes.
7300 Return the number of bytes placed in the buffer, or zero
7304 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
7306 tree type
= TREE_TYPE (expr
);
7307 HOST_WIDE_INT total_bytes
;
7309 if (TREE_CODE (type
) != ARRAY_TYPE
7310 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7311 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7312 || !host_integerp (TYPE_SIZE_UNIT (type
), 0))
7314 total_bytes
= tree_low_cst (TYPE_SIZE_UNIT (type
), 0);
7315 if (total_bytes
> len
)
7317 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
7319 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
7320 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
7321 total_bytes
- TREE_STRING_LENGTH (expr
));
7324 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
7329 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7330 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7331 buffer PTR of length LEN bytes. Return the number of bytes
7332 placed in the buffer, or zero upon failure. */
7335 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7337 switch (TREE_CODE (expr
))
7340 return native_encode_int (expr
, ptr
, len
);
7343 return native_encode_real (expr
, ptr
, len
);
7346 return native_encode_complex (expr
, ptr
, len
);
7349 return native_encode_vector (expr
, ptr
, len
);
7352 return native_encode_string (expr
, ptr
, len
);
7360 /* Subroutine of native_interpret_expr. Interpret the contents of
7361 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7362 If the buffer cannot be interpreted, return NULL_TREE. */
7365 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7367 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7368 int byte
, offset
, word
, words
;
7369 unsigned char value
;
7372 if (total_bytes
> len
)
7374 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
7377 result
= double_int_zero
;
7378 words
= total_bytes
/ UNITS_PER_WORD
;
7380 for (byte
= 0; byte
< total_bytes
; byte
++)
7382 int bitpos
= byte
* BITS_PER_UNIT
;
7383 if (total_bytes
> UNITS_PER_WORD
)
7385 word
= byte
/ UNITS_PER_WORD
;
7386 if (WORDS_BIG_ENDIAN
)
7387 word
= (words
- 1) - word
;
7388 offset
= word
* UNITS_PER_WORD
;
7389 if (BYTES_BIG_ENDIAN
)
7390 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7392 offset
+= byte
% UNITS_PER_WORD
;
7395 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7396 value
= ptr
[offset
];
7398 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7399 result
.low
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7401 result
.high
|= (unsigned HOST_WIDE_INT
) value
7402 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7405 return double_int_to_tree (type
, result
);
7409 /* Subroutine of native_interpret_expr. Interpret the contents of
7410 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7411 If the buffer cannot be interpreted, return NULL_TREE. */
7414 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7416 enum machine_mode mode
= TYPE_MODE (type
);
7417 int total_bytes
= GET_MODE_SIZE (mode
);
7418 int byte
, offset
, word
, words
, bitpos
;
7419 unsigned char value
;
7420 /* There are always 32 bits in each long, no matter the size of
7421 the hosts long. We handle floating point representations with
7426 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7427 if (total_bytes
> len
|| total_bytes
> 24)
7429 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7431 memset (tmp
, 0, sizeof (tmp
));
7432 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7433 bitpos
+= BITS_PER_UNIT
)
7435 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7436 if (UNITS_PER_WORD
< 4)
7438 word
= byte
/ UNITS_PER_WORD
;
7439 if (WORDS_BIG_ENDIAN
)
7440 word
= (words
- 1) - word
;
7441 offset
= word
* UNITS_PER_WORD
;
7442 if (BYTES_BIG_ENDIAN
)
7443 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7445 offset
+= byte
% UNITS_PER_WORD
;
7448 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7449 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7451 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7454 real_from_target (&r
, tmp
, mode
);
7455 return build_real (type
, r
);
7459 /* Subroutine of native_interpret_expr. Interpret the contents of
7460 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7461 If the buffer cannot be interpreted, return NULL_TREE. */
7464 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7466 tree etype
, rpart
, ipart
;
7469 etype
= TREE_TYPE (type
);
7470 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7473 rpart
= native_interpret_expr (etype
, ptr
, size
);
7476 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7479 return build_complex (type
, rpart
, ipart
);
7483 /* Subroutine of native_interpret_expr. Interpret the contents of
7484 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7485 If the buffer cannot be interpreted, return NULL_TREE. */
7488 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7490 tree etype
, elem
, elements
;
7493 etype
= TREE_TYPE (type
);
7494 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7495 count
= TYPE_VECTOR_SUBPARTS (type
);
7496 if (size
* count
> len
)
7499 elements
= NULL_TREE
;
7500 for (i
= count
- 1; i
>= 0; i
--)
7502 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7505 elements
= tree_cons (NULL_TREE
, elem
, elements
);
7507 return build_vector (type
, elements
);
7511 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7512 the buffer PTR of length LEN as a constant of type TYPE. For
7513 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7514 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7515 return NULL_TREE. */
7518 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7520 switch (TREE_CODE (type
))
7525 return native_interpret_int (type
, ptr
, len
);
7528 return native_interpret_real (type
, ptr
, len
);
7531 return native_interpret_complex (type
, ptr
, len
);
7534 return native_interpret_vector (type
, ptr
, len
);
7542 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7543 TYPE at compile-time. If we're unable to perform the conversion
7544 return NULL_TREE. */
7547 fold_view_convert_expr (tree type
, tree expr
)
7549 /* We support up to 512-bit values (for V8DFmode). */
7550 unsigned char buffer
[64];
7553 /* Check that the host and target are sane. */
7554 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7557 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7561 return native_interpret_expr (type
, buffer
, len
);
7564 /* Build an expression for the address of T. Folds away INDIRECT_REF
7565 to avoid confusing the gimplify process. */
7568 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7570 /* The size of the object is not relevant when talking about its address. */
7571 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7572 t
= TREE_OPERAND (t
, 0);
7574 if (TREE_CODE (t
) == INDIRECT_REF
)
7576 t
= TREE_OPERAND (t
, 0);
7578 if (TREE_TYPE (t
) != ptrtype
)
7579 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7581 else if (TREE_CODE (t
) == MEM_REF
7582 && integer_zerop (TREE_OPERAND (t
, 1)))
7583 return TREE_OPERAND (t
, 0);
7584 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7586 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7588 if (TREE_TYPE (t
) != ptrtype
)
7589 t
= fold_convert_loc (loc
, ptrtype
, t
);
7592 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7597 /* Build an expression for the address of T. */
7600 build_fold_addr_expr_loc (location_t loc
, tree t
)
7602 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7604 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7607 /* Fold a unary expression of code CODE and type TYPE with operand
7608 OP0. Return the folded expression if folding is successful.
7609 Otherwise, return NULL_TREE. */
7612 fold_unary_loc_1 (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7616 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7618 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7619 && TREE_CODE_LENGTH (code
) == 1);
7624 if (CONVERT_EXPR_CODE_P (code
)
7625 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
)
7627 /* Don't use STRIP_NOPS, because signedness of argument type
7629 STRIP_SIGN_NOPS (arg0
);
7633 /* Strip any conversions that don't change the mode. This
7634 is safe for every expression, except for a comparison
7635 expression because its signedness is derived from its
7638 Note that this is done as an internal manipulation within
7639 the constant folder, in order to find the simplest
7640 representation of the arguments so that their form can be
7641 studied. In any cases, the appropriate type conversions
7642 should be put back in the tree that will get out of the
7648 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7650 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7651 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7652 fold_build1_loc (loc
, code
, type
,
7653 fold_convert_loc (loc
, TREE_TYPE (op0
),
7654 TREE_OPERAND (arg0
, 1))));
7655 else if (TREE_CODE (arg0
) == COND_EXPR
)
7657 tree arg01
= TREE_OPERAND (arg0
, 1);
7658 tree arg02
= TREE_OPERAND (arg0
, 2);
7659 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7660 arg01
= fold_build1_loc (loc
, code
, type
,
7661 fold_convert_loc (loc
,
7662 TREE_TYPE (op0
), arg01
));
7663 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7664 arg02
= fold_build1_loc (loc
, code
, type
,
7665 fold_convert_loc (loc
,
7666 TREE_TYPE (op0
), arg02
));
7667 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7670 /* If this was a conversion, and all we did was to move into
7671 inside the COND_EXPR, bring it back out. But leave it if
7672 it is a conversion from integer to integer and the
7673 result precision is no wider than a word since such a
7674 conversion is cheap and may be optimized away by combine,
7675 while it couldn't if it were outside the COND_EXPR. Then return
7676 so we don't get into an infinite recursion loop taking the
7677 conversion out and then back in. */
7679 if ((CONVERT_EXPR_CODE_P (code
)
7680 || code
== NON_LVALUE_EXPR
)
7681 && TREE_CODE (tem
) == COND_EXPR
7682 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7683 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7684 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7685 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7686 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7687 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7688 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7690 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7691 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7692 || flag_syntax_only
))
7693 tem
= build1_loc (loc
, code
, type
,
7695 TREE_TYPE (TREE_OPERAND
7696 (TREE_OPERAND (tem
, 1), 0)),
7697 TREE_OPERAND (tem
, 0),
7698 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7699 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7703 else if (COMPARISON_CLASS_P (arg0
))
7705 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7707 arg0
= copy_node (arg0
);
7708 TREE_TYPE (arg0
) = type
;
7711 else if (TREE_CODE (type
) != INTEGER_TYPE
)
7712 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
,
7713 fold_build1_loc (loc
, code
, type
,
7715 fold_build1_loc (loc
, code
, type
,
7716 integer_zero_node
));
7723 /* Re-association barriers around constants and other re-association
7724 barriers can be removed. */
7725 if (CONSTANT_CLASS_P (op0
)
7726 || TREE_CODE (op0
) == PAREN_EXPR
)
7727 return fold_convert_loc (loc
, type
, op0
);
7732 case FIX_TRUNC_EXPR
:
7733 if (TREE_TYPE (op0
) == type
)
7736 /* If we have (type) (a CMP b) and type is an integral type, return
7737 new expression involving the new type. */
7738 if (COMPARISON_CLASS_P (op0
) && INTEGRAL_TYPE_P (type
))
7739 return fold_build2_loc (loc
, TREE_CODE (op0
), type
, TREE_OPERAND (op0
, 0),
7740 TREE_OPERAND (op0
, 1));
7742 /* Handle cases of two conversions in a row. */
7743 if (CONVERT_EXPR_P (op0
))
7745 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7746 tree inter_type
= TREE_TYPE (op0
);
7747 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7748 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7749 int inside_float
= FLOAT_TYPE_P (inside_type
);
7750 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7751 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7752 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7753 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7754 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7755 int inter_float
= FLOAT_TYPE_P (inter_type
);
7756 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7757 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7758 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7759 int final_int
= INTEGRAL_TYPE_P (type
);
7760 int final_ptr
= POINTER_TYPE_P (type
);
7761 int final_float
= FLOAT_TYPE_P (type
);
7762 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7763 unsigned int final_prec
= TYPE_PRECISION (type
);
7764 int final_unsignedp
= TYPE_UNSIGNED (type
);
7766 /* In addition to the cases of two conversions in a row
7767 handled below, if we are converting something to its own
7768 type via an object of identical or wider precision, neither
7769 conversion is needed. */
7770 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7771 && (((inter_int
|| inter_ptr
) && final_int
)
7772 || (inter_float
&& final_float
))
7773 && inter_prec
>= final_prec
)
7774 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7776 /* Likewise, if the intermediate and initial types are either both
7777 float or both integer, we don't need the middle conversion if the
7778 former is wider than the latter and doesn't change the signedness
7779 (for integers). Avoid this if the final type is a pointer since
7780 then we sometimes need the middle conversion. Likewise if the
7781 final type has a precision not equal to the size of its mode. */
7782 if (((inter_int
&& inside_int
)
7783 || (inter_float
&& inside_float
)
7784 || (inter_vec
&& inside_vec
))
7785 && inter_prec
>= inside_prec
7786 && (inter_float
|| inter_vec
7787 || inter_unsignedp
== inside_unsignedp
)
7788 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7789 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7791 && (! final_vec
|| inter_prec
== inside_prec
))
7792 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7794 /* If we have a sign-extension of a zero-extended value, we can
7795 replace that by a single zero-extension. */
7796 if (inside_int
&& inter_int
&& final_int
7797 && inside_prec
< inter_prec
&& inter_prec
< final_prec
7798 && inside_unsignedp
&& !inter_unsignedp
)
7799 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7801 /* Two conversions in a row are not needed unless:
7802 - some conversion is floating-point (overstrict for now), or
7803 - some conversion is a vector (overstrict for now), or
7804 - the intermediate type is narrower than both initial and
7806 - the intermediate type and innermost type differ in signedness,
7807 and the outermost type is wider than the intermediate, or
7808 - the initial type is a pointer type and the precisions of the
7809 intermediate and final types differ, or
7810 - the final type is a pointer type and the precisions of the
7811 initial and intermediate types differ. */
7812 if (! inside_float
&& ! inter_float
&& ! final_float
7813 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7814 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7815 && ! (inside_int
&& inter_int
7816 && inter_unsignedp
!= inside_unsignedp
7817 && inter_prec
< final_prec
)
7818 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7819 == (final_unsignedp
&& final_prec
> inter_prec
))
7820 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7821 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7822 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7823 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7824 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7827 /* Handle (T *)&A.B.C for A being of type T and B and C
7828 living at offset zero. This occurs frequently in
7829 C++ upcasting and then accessing the base. */
7830 if (TREE_CODE (op0
) == ADDR_EXPR
7831 && POINTER_TYPE_P (type
)
7832 && handled_component_p (TREE_OPERAND (op0
, 0)))
7834 HOST_WIDE_INT bitsize
, bitpos
;
7836 enum machine_mode mode
;
7837 int unsignedp
, volatilep
;
7838 tree base
= TREE_OPERAND (op0
, 0);
7839 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7840 &mode
, &unsignedp
, &volatilep
, false);
7841 /* If the reference was to a (constant) zero offset, we can use
7842 the address of the base if it has the same base type
7843 as the result type and the pointer type is unqualified. */
7844 if (! offset
&& bitpos
== 0
7845 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7846 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7847 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7848 return fold_convert_loc (loc
, type
,
7849 build_fold_addr_expr_loc (loc
, base
));
7852 if (TREE_CODE (op0
) == MODIFY_EXPR
7853 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7854 /* Detect assigning a bitfield. */
7855 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7857 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7859 /* Don't leave an assignment inside a conversion
7860 unless assigning a bitfield. */
7861 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7862 /* First do the assignment, then return converted constant. */
7863 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7864 TREE_NO_WARNING (tem
) = 1;
7865 TREE_USED (tem
) = 1;
7869 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7870 constants (if x has signed type, the sign bit cannot be set
7871 in c). This folds extension into the BIT_AND_EXPR.
7872 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7873 very likely don't have maximal range for their precision and this
7874 transformation effectively doesn't preserve non-maximal ranges. */
7875 if (TREE_CODE (type
) == INTEGER_TYPE
7876 && TREE_CODE (op0
) == BIT_AND_EXPR
7877 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7879 tree and_expr
= op0
;
7880 tree and0
= TREE_OPERAND (and_expr
, 0);
7881 tree and1
= TREE_OPERAND (and_expr
, 1);
7884 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7885 || (TYPE_PRECISION (type
)
7886 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7888 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7889 <= HOST_BITS_PER_WIDE_INT
7890 && host_integerp (and1
, 1))
7892 unsigned HOST_WIDE_INT cst
;
7894 cst
= tree_low_cst (and1
, 1);
7895 cst
&= (HOST_WIDE_INT
) -1
7896 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7897 change
= (cst
== 0);
7898 #ifdef LOAD_EXTEND_OP
7900 && !flag_syntax_only
7901 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7904 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7905 and0
= fold_convert_loc (loc
, uns
, and0
);
7906 and1
= fold_convert_loc (loc
, uns
, and1
);
7912 tem
= force_fit_type_double (type
, tree_to_double_int (and1
),
7913 0, TREE_OVERFLOW (and1
));
7914 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7915 fold_convert_loc (loc
, type
, and0
), tem
);
7919 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7920 when one of the new casts will fold away. Conservatively we assume
7921 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7922 if (POINTER_TYPE_P (type
)
7923 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7924 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7925 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7926 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7928 tree arg00
= TREE_OPERAND (arg0
, 0);
7929 tree arg01
= TREE_OPERAND (arg0
, 1);
7931 return fold_build2_loc (loc
,
7932 TREE_CODE (arg0
), type
,
7933 fold_convert_loc (loc
, type
, arg00
),
7934 fold_convert_loc (loc
, sizetype
, arg01
));
7937 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7938 of the same precision, and X is an integer type not narrower than
7939 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7940 if (INTEGRAL_TYPE_P (type
)
7941 && TREE_CODE (op0
) == BIT_NOT_EXPR
7942 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7943 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7944 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7946 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7947 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7948 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7949 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7950 fold_convert_loc (loc
, type
, tem
));
7953 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7954 type of X and Y (integer types only). */
7955 if (INTEGRAL_TYPE_P (type
)
7956 && TREE_CODE (op0
) == MULT_EXPR
7957 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7958 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7960 /* Be careful not to introduce new overflows. */
7962 if (TYPE_OVERFLOW_WRAPS (type
))
7965 mult_type
= unsigned_type_for (type
);
7967 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7969 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
7970 fold_convert_loc (loc
, mult_type
,
7971 TREE_OPERAND (op0
, 0)),
7972 fold_convert_loc (loc
, mult_type
,
7973 TREE_OPERAND (op0
, 1)));
7974 return fold_convert_loc (loc
, type
, tem
);
7978 tem
= fold_convert_const (code
, type
, op0
);
7979 return tem
? tem
: NULL_TREE
;
7981 case ADDR_SPACE_CONVERT_EXPR
:
7982 if (integer_zerop (arg0
))
7983 return fold_convert_const (code
, type
, arg0
);
7986 case FIXED_CONVERT_EXPR
:
7987 tem
= fold_convert_const (code
, type
, arg0
);
7988 return tem
? tem
: NULL_TREE
;
7990 case VIEW_CONVERT_EXPR
:
7991 if (TREE_TYPE (op0
) == type
)
7993 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
7994 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
7995 type
, TREE_OPERAND (op0
, 0));
7996 if (TREE_CODE (op0
) == MEM_REF
)
7997 return fold_build2_loc (loc
, MEM_REF
, type
,
7998 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8000 /* For integral conversions with the same precision or pointer
8001 conversions use a NOP_EXPR instead. */
8002 if ((INTEGRAL_TYPE_P (type
)
8003 || POINTER_TYPE_P (type
))
8004 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8005 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8006 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8007 return fold_convert_loc (loc
, type
, op0
);
8009 /* Strip inner integral conversions that do not change the precision. */
8010 if (CONVERT_EXPR_P (op0
)
8011 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8012 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8013 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8014 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8015 && (TYPE_PRECISION (TREE_TYPE (op0
))
8016 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8017 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8018 type
, TREE_OPERAND (op0
, 0));
8020 return fold_view_convert_expr (type
, op0
);
8023 tem
= fold_negate_expr (loc
, arg0
);
8025 return fold_convert_loc (loc
, type
, tem
);
8029 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8030 return fold_abs_const (arg0
, type
);
8031 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8032 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8033 /* Convert fabs((double)float) into (double)fabsf(float). */
8034 else if (TREE_CODE (arg0
) == NOP_EXPR
8035 && TREE_CODE (type
) == REAL_TYPE
)
8037 tree targ0
= strip_float_extensions (arg0
);
8039 return fold_convert_loc (loc
, type
,
8040 fold_build1_loc (loc
, ABS_EXPR
,
8044 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8045 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8047 else if (tree_expr_nonnegative_p (arg0
))
8050 /* Strip sign ops from argument. */
8051 if (TREE_CODE (type
) == REAL_TYPE
)
8053 tem
= fold_strip_sign_ops (arg0
);
8055 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8056 fold_convert_loc (loc
, type
, tem
));
8061 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8062 return fold_convert_loc (loc
, type
, arg0
);
8063 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8065 tree itype
= TREE_TYPE (type
);
8066 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8067 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8068 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8069 negate_expr (ipart
));
8071 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8073 tree itype
= TREE_TYPE (type
);
8074 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8075 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8076 return build_complex (type
, rpart
, negate_expr (ipart
));
8078 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8079 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8083 if (TREE_CODE (arg0
) == INTEGER_CST
)
8084 return fold_not_const (arg0
, type
);
8085 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8086 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8087 /* Convert ~ (-A) to A - 1. */
8088 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8089 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8090 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8091 build_int_cst (type
, 1));
8092 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8093 else if (INTEGRAL_TYPE_P (type
)
8094 && ((TREE_CODE (arg0
) == MINUS_EXPR
8095 && integer_onep (TREE_OPERAND (arg0
, 1)))
8096 || (TREE_CODE (arg0
) == PLUS_EXPR
8097 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8098 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8099 fold_convert_loc (loc
, type
,
8100 TREE_OPERAND (arg0
, 0)));
8101 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8102 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8103 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8104 fold_convert_loc (loc
, type
,
8105 TREE_OPERAND (arg0
, 0)))))
8106 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8107 fold_convert_loc (loc
, type
,
8108 TREE_OPERAND (arg0
, 1)));
8109 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8110 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8111 fold_convert_loc (loc
, type
,
8112 TREE_OPERAND (arg0
, 1)))))
8113 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8114 fold_convert_loc (loc
, type
,
8115 TREE_OPERAND (arg0
, 0)), tem
);
8116 /* Perform BIT_NOT_EXPR on each element individually. */
8117 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8119 tree elements
= TREE_VECTOR_CST_ELTS (arg0
), elem
, list
= NULL_TREE
;
8120 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
8122 for (i
= 0; i
< count
; i
++)
8126 elem
= TREE_VALUE (elements
);
8127 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8128 if (elem
== NULL_TREE
)
8130 elements
= TREE_CHAIN (elements
);
8133 elem
= build_int_cst (TREE_TYPE (type
), -1);
8134 list
= tree_cons (NULL_TREE
, elem
, list
);
8137 return build_vector (type
, nreverse (list
));
8142 case TRUTH_NOT_EXPR
:
8143 /* The argument to invert_truthvalue must have Boolean type. */
8144 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8145 arg0
= fold_convert_loc (loc
, boolean_type_node
, arg0
);
8147 /* Note that the operand of this must be an int
8148 and its values must be 0 or 1.
8149 ("true" is a fixed value perhaps depending on the language,
8150 but we don't handle values other than 1 correctly yet.) */
8151 tem
= fold_truth_not_expr (loc
, arg0
);
8154 return fold_convert_loc (loc
, type
, tem
);
8157 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8158 return fold_convert_loc (loc
, type
, arg0
);
8159 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8160 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8161 TREE_OPERAND (arg0
, 1));
8162 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8163 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8164 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8166 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8167 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8168 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8169 TREE_OPERAND (arg0
, 0)),
8170 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8171 TREE_OPERAND (arg0
, 1)));
8172 return fold_convert_loc (loc
, type
, tem
);
8174 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8176 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8177 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8178 TREE_OPERAND (arg0
, 0));
8179 return fold_convert_loc (loc
, type
, tem
);
8181 if (TREE_CODE (arg0
) == CALL_EXPR
)
8183 tree fn
= get_callee_fndecl (arg0
);
8184 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8185 switch (DECL_FUNCTION_CODE (fn
))
8187 CASE_FLT_FN (BUILT_IN_CEXPI
):
8188 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8190 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8200 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8201 return build_zero_cst (type
);
8202 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8203 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8204 TREE_OPERAND (arg0
, 0));
8205 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8206 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8207 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8209 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8210 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8211 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8212 TREE_OPERAND (arg0
, 0)),
8213 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8214 TREE_OPERAND (arg0
, 1)));
8215 return fold_convert_loc (loc
, type
, tem
);
8217 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8219 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8220 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8221 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8223 if (TREE_CODE (arg0
) == CALL_EXPR
)
8225 tree fn
= get_callee_fndecl (arg0
);
8226 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8227 switch (DECL_FUNCTION_CODE (fn
))
8229 CASE_FLT_FN (BUILT_IN_CEXPI
):
8230 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8232 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8242 /* Fold *&X to X if X is an lvalue. */
8243 if (TREE_CODE (op0
) == ADDR_EXPR
)
8245 tree op00
= TREE_OPERAND (op0
, 0);
8246 if ((TREE_CODE (op00
) == VAR_DECL
8247 || TREE_CODE (op00
) == PARM_DECL
8248 || TREE_CODE (op00
) == RESULT_DECL
)
8249 && !TREE_READONLY (op00
))
8256 } /* switch (code) */
8259 /* Given an expression tree EXP, set the EXPR_FOLDED flag, and if it is
8260 a nop, recursively set the EXPR_FOLDED flag of its operand. */
8263 set_expr_folded_flag (tree exp
)
8265 EXPR_FOLDED (exp
) = 1;
8267 /* If EXP is a nop (i.e. NON_LVALUE_EXPRs and NOP_EXPRs), we need to
8268 recursively set the EXPR_FOLDED flag of its operand because the
8269 expression will be stripped later. */
8270 while ((CONVERT_EXPR_P (exp
)
8271 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
8272 && TREE_OPERAND (exp
, 0) != error_mark_node
)
8274 exp
= TREE_OPERAND (exp
, 0);
8275 EXPR_FOLDED (exp
) = 1;
8279 /* Fold a unary expression of code CODE and type TYPE with operand
8280 OP0. Return the folded expression if folding is successful.
8281 Otherwise, return NULL_TREE.
8282 This is a wrapper around fold_unary_1 function (which does the
8283 actual folding). Set the EXPR_FOLDED flag of the folded expression
8284 if folding is successful. */
8287 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
8289 tree tem
= fold_unary_loc_1 (loc
, code
, type
, op0
);
8291 set_expr_folded_flag (tem
);
8295 /* If the operation was a conversion do _not_ mark a resulting constant
8296 with TREE_OVERFLOW if the original constant was not. These conversions
8297 have implementation defined behavior and retaining the TREE_OVERFLOW
8298 flag here would confuse later passes such as VRP. */
8300 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8301 tree type
, tree op0
)
8303 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8305 && TREE_CODE (res
) == INTEGER_CST
8306 && TREE_CODE (op0
) == INTEGER_CST
8307 && CONVERT_EXPR_CODE_P (code
))
8308 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8313 /* Fold a binary expression of code CODE and type TYPE with operands
8314 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8315 Return the folded expression if folding is successful. Otherwise,
8316 return NULL_TREE. */
8319 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8321 enum tree_code compl_code
;
8323 if (code
== MIN_EXPR
)
8324 compl_code
= MAX_EXPR
;
8325 else if (code
== MAX_EXPR
)
8326 compl_code
= MIN_EXPR
;
8330 /* MIN (MAX (a, b), b) == b. */
8331 if (TREE_CODE (op0
) == compl_code
8332 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8333 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8335 /* MIN (MAX (b, a), b) == b. */
8336 if (TREE_CODE (op0
) == compl_code
8337 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8338 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8339 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8341 /* MIN (a, MAX (a, b)) == a. */
8342 if (TREE_CODE (op1
) == compl_code
8343 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8344 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8345 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8347 /* MIN (a, MAX (b, a)) == a. */
8348 if (TREE_CODE (op1
) == compl_code
8349 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8350 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8351 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8356 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8357 by changing CODE to reduce the magnitude of constants involved in
8358 ARG0 of the comparison.
8359 Returns a canonicalized comparison tree if a simplification was
8360 possible, otherwise returns NULL_TREE.
8361 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8362 valid if signed overflow is undefined. */
8365 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8366 tree arg0
, tree arg1
,
8367 bool *strict_overflow_p
)
8369 enum tree_code code0
= TREE_CODE (arg0
);
8370 tree t
, cst0
= NULL_TREE
;
8374 /* Match A +- CST code arg1 and CST code arg1. We can change the
8375 first form only if overflow is undefined. */
8376 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8377 /* In principle pointers also have undefined overflow behavior,
8378 but that causes problems elsewhere. */
8379 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8380 && (code0
== MINUS_EXPR
8381 || code0
== PLUS_EXPR
)
8382 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8383 || code0
== INTEGER_CST
))
8386 /* Identify the constant in arg0 and its sign. */
8387 if (code0
== INTEGER_CST
)
8390 cst0
= TREE_OPERAND (arg0
, 1);
8391 sgn0
= tree_int_cst_sgn (cst0
);
8393 /* Overflowed constants and zero will cause problems. */
8394 if (integer_zerop (cst0
)
8395 || TREE_OVERFLOW (cst0
))
8398 /* See if we can reduce the magnitude of the constant in
8399 arg0 by changing the comparison code. */
8400 if (code0
== INTEGER_CST
)
8402 /* CST <= arg1 -> CST-1 < arg1. */
8403 if (code
== LE_EXPR
&& sgn0
== 1)
8405 /* -CST < arg1 -> -CST-1 <= arg1. */
8406 else if (code
== LT_EXPR
&& sgn0
== -1)
8408 /* CST > arg1 -> CST-1 >= arg1. */
8409 else if (code
== GT_EXPR
&& sgn0
== 1)
8411 /* -CST >= arg1 -> -CST-1 > arg1. */
8412 else if (code
== GE_EXPR
&& sgn0
== -1)
8416 /* arg1 code' CST' might be more canonical. */
8421 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8423 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8425 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8426 else if (code
== GT_EXPR
8427 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8429 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8430 else if (code
== LE_EXPR
8431 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8433 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8434 else if (code
== GE_EXPR
8435 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8439 *strict_overflow_p
= true;
8442 /* Now build the constant reduced in magnitude. But not if that
8443 would produce one outside of its types range. */
8444 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8446 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8447 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8449 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8450 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8451 /* We cannot swap the comparison here as that would cause us to
8452 endlessly recurse. */
8455 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8456 cst0
, build_int_cst (TREE_TYPE (cst0
), 1), 0);
8457 if (code0
!= INTEGER_CST
)
8458 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8460 /* If swapping might yield to a more canonical form, do so. */
8462 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8464 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8467 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8468 overflow further. Try to decrease the magnitude of constants involved
8469 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8470 and put sole constants at the second argument position.
8471 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8474 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8475 tree arg0
, tree arg1
)
8478 bool strict_overflow_p
;
8479 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8480 "when reducing constant in comparison");
8482 /* Try canonicalization by simplifying arg0. */
8483 strict_overflow_p
= false;
8484 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8485 &strict_overflow_p
);
8488 if (strict_overflow_p
)
8489 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8493 /* Try canonicalization by simplifying arg1 using the swapped
8495 code
= swap_tree_comparison (code
);
8496 strict_overflow_p
= false;
8497 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8498 &strict_overflow_p
);
8499 if (t
&& strict_overflow_p
)
8500 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8504 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8505 space. This is used to avoid issuing overflow warnings for
8506 expressions like &p->x which can not wrap. */
8509 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8511 unsigned HOST_WIDE_INT offset_low
, total_low
;
8512 HOST_WIDE_INT size
, offset_high
, total_high
;
8514 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8520 if (offset
== NULL_TREE
)
8525 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8529 offset_low
= TREE_INT_CST_LOW (offset
);
8530 offset_high
= TREE_INT_CST_HIGH (offset
);
8533 if (add_double_with_sign (offset_low
, offset_high
,
8534 bitpos
/ BITS_PER_UNIT
, 0,
8535 &total_low
, &total_high
,
8539 if (total_high
!= 0)
8542 size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8546 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8548 if (TREE_CODE (base
) == ADDR_EXPR
)
8550 HOST_WIDE_INT base_size
;
8552 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8553 if (base_size
> 0 && size
< base_size
)
8557 return total_low
> (unsigned HOST_WIDE_INT
) size
;
8560 /* Subroutine of fold_binary. This routine performs all of the
8561 transformations that are common to the equality/inequality
8562 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8563 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8564 fold_binary should call fold_binary. Fold a comparison with
8565 tree code CODE and type TYPE with operands OP0 and OP1. Return
8566 the folded comparison or NULL_TREE. */
8569 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8572 tree arg0
, arg1
, tem
;
8577 STRIP_SIGN_NOPS (arg0
);
8578 STRIP_SIGN_NOPS (arg1
);
8580 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8581 if (tem
!= NULL_TREE
)
8584 /* If one arg is a real or integer constant, put it last. */
8585 if (tree_swap_operands_p (arg0
, arg1
, true))
8586 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8588 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8589 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8590 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8591 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8592 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8593 && (TREE_CODE (arg1
) == INTEGER_CST
8594 && !TREE_OVERFLOW (arg1
)))
8596 tree const1
= TREE_OPERAND (arg0
, 1);
8598 tree variable
= TREE_OPERAND (arg0
, 0);
8601 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8603 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8604 TREE_TYPE (arg1
), const2
, const1
);
8606 /* If the constant operation overflowed this can be
8607 simplified as a comparison against INT_MAX/INT_MIN. */
8608 if (TREE_CODE (lhs
) == INTEGER_CST
8609 && TREE_OVERFLOW (lhs
))
8611 int const1_sgn
= tree_int_cst_sgn (const1
);
8612 enum tree_code code2
= code
;
8614 /* Get the sign of the constant on the lhs if the
8615 operation were VARIABLE + CONST1. */
8616 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8617 const1_sgn
= -const1_sgn
;
8619 /* The sign of the constant determines if we overflowed
8620 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8621 Canonicalize to the INT_MIN overflow by swapping the comparison
8623 if (const1_sgn
== -1)
8624 code2
= swap_tree_comparison (code
);
8626 /* We now can look at the canonicalized case
8627 VARIABLE + 1 CODE2 INT_MIN
8628 and decide on the result. */
8629 if (code2
== LT_EXPR
8631 || code2
== EQ_EXPR
)
8632 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8633 else if (code2
== NE_EXPR
8635 || code2
== GT_EXPR
)
8636 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8639 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8640 && (TREE_CODE (lhs
) != INTEGER_CST
8641 || !TREE_OVERFLOW (lhs
)))
8643 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
8644 fold_overflow_warning ("assuming signed overflow does not occur "
8645 "when changing X +- C1 cmp C2 to "
8647 WARN_STRICT_OVERFLOW_COMPARISON
);
8648 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
8652 /* For comparisons of pointers we can decompose it to a compile time
8653 comparison of the base objects and the offsets into the object.
8654 This requires at least one operand being an ADDR_EXPR or a
8655 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8656 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8657 && (TREE_CODE (arg0
) == ADDR_EXPR
8658 || TREE_CODE (arg1
) == ADDR_EXPR
8659 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8660 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8662 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8663 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8664 enum machine_mode mode
;
8665 int volatilep
, unsignedp
;
8666 bool indirect_base0
= false, indirect_base1
= false;
8668 /* Get base and offset for the access. Strip ADDR_EXPR for
8669 get_inner_reference, but put it back by stripping INDIRECT_REF
8670 off the base object if possible. indirect_baseN will be true
8671 if baseN is not an address but refers to the object itself. */
8673 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8675 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8676 &bitsize
, &bitpos0
, &offset0
, &mode
,
8677 &unsignedp
, &volatilep
, false);
8678 if (TREE_CODE (base0
) == INDIRECT_REF
)
8679 base0
= TREE_OPERAND (base0
, 0);
8681 indirect_base0
= true;
8683 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8685 base0
= TREE_OPERAND (arg0
, 0);
8686 STRIP_SIGN_NOPS (base0
);
8687 if (TREE_CODE (base0
) == ADDR_EXPR
)
8689 base0
= TREE_OPERAND (base0
, 0);
8690 indirect_base0
= true;
8692 offset0
= TREE_OPERAND (arg0
, 1);
8696 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8698 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8699 &bitsize
, &bitpos1
, &offset1
, &mode
,
8700 &unsignedp
, &volatilep
, false);
8701 if (TREE_CODE (base1
) == INDIRECT_REF
)
8702 base1
= TREE_OPERAND (base1
, 0);
8704 indirect_base1
= true;
8706 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8708 base1
= TREE_OPERAND (arg1
, 0);
8709 STRIP_SIGN_NOPS (base1
);
8710 if (TREE_CODE (base1
) == ADDR_EXPR
)
8712 base1
= TREE_OPERAND (base1
, 0);
8713 indirect_base1
= true;
8715 offset1
= TREE_OPERAND (arg1
, 1);
8718 /* A local variable can never be pointed to by
8719 the default SSA name of an incoming parameter. */
8720 if ((TREE_CODE (arg0
) == ADDR_EXPR
8722 && TREE_CODE (base0
) == VAR_DECL
8723 && auto_var_in_fn_p (base0
, current_function_decl
)
8725 && TREE_CODE (base1
) == SSA_NAME
8726 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
8727 && SSA_NAME_IS_DEFAULT_DEF (base1
))
8728 || (TREE_CODE (arg1
) == ADDR_EXPR
8730 && TREE_CODE (base1
) == VAR_DECL
8731 && auto_var_in_fn_p (base1
, current_function_decl
)
8733 && TREE_CODE (base0
) == SSA_NAME
8734 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
8735 && SSA_NAME_IS_DEFAULT_DEF (base0
)))
8737 if (code
== NE_EXPR
)
8738 return constant_boolean_node (1, type
);
8739 else if (code
== EQ_EXPR
)
8740 return constant_boolean_node (0, type
);
8742 /* If we have equivalent bases we might be able to simplify. */
8743 else if (indirect_base0
== indirect_base1
8744 && operand_equal_p (base0
, base1
, 0))
8746 /* We can fold this expression to a constant if the non-constant
8747 offset parts are equal. */
8748 if ((offset0
== offset1
8749 || (offset0
&& offset1
8750 && operand_equal_p (offset0
, offset1
, 0)))
8753 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8758 && bitpos0
!= bitpos1
8759 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8760 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8761 fold_overflow_warning (("assuming pointer wraparound does not "
8762 "occur when comparing P +- C1 with "
8764 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8769 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8771 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8773 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8775 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8777 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8779 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8783 /* We can simplify the comparison to a comparison of the variable
8784 offset parts if the constant offset parts are equal.
8785 Be careful to use signed size type here because otherwise we
8786 mess with array offsets in the wrong way. This is possible
8787 because pointer arithmetic is restricted to retain within an
8788 object and overflow on pointer differences is undefined as of
8789 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8790 else if (bitpos0
== bitpos1
8791 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8792 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8794 /* By converting to signed size type we cover middle-end pointer
8795 arithmetic which operates on unsigned pointer types of size
8796 type size and ARRAY_REF offsets which are properly sign or
8797 zero extended from their type in case it is narrower than
8799 if (offset0
== NULL_TREE
)
8800 offset0
= build_int_cst (ssizetype
, 0);
8802 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8803 if (offset1
== NULL_TREE
)
8804 offset1
= build_int_cst (ssizetype
, 0);
8806 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
8810 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8811 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8812 fold_overflow_warning (("assuming pointer wraparound does not "
8813 "occur when comparing P +- C1 with "
8815 WARN_STRICT_OVERFLOW_COMPARISON
);
8817 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
8820 /* For non-equal bases we can simplify if they are addresses
8821 of local binding decls or constants. */
8822 else if (indirect_base0
&& indirect_base1
8823 /* We know that !operand_equal_p (base0, base1, 0)
8824 because the if condition was false. But make
8825 sure two decls are not the same. */
8827 && TREE_CODE (arg0
) == ADDR_EXPR
8828 && TREE_CODE (arg1
) == ADDR_EXPR
8829 && (((TREE_CODE (base0
) == VAR_DECL
8830 || TREE_CODE (base0
) == PARM_DECL
)
8831 && (targetm
.binds_local_p (base0
)
8832 || CONSTANT_CLASS_P (base1
)))
8833 || CONSTANT_CLASS_P (base0
))
8834 && (((TREE_CODE (base1
) == VAR_DECL
8835 || TREE_CODE (base1
) == PARM_DECL
)
8836 && (targetm
.binds_local_p (base1
)
8837 || CONSTANT_CLASS_P (base0
)))
8838 || CONSTANT_CLASS_P (base1
)))
8840 if (code
== EQ_EXPR
)
8841 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
8843 else if (code
== NE_EXPR
)
8844 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
8847 /* For equal offsets we can simplify to a comparison of the
8849 else if (bitpos0
== bitpos1
8851 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8853 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8854 && ((offset0
== offset1
)
8855 || (offset0
&& offset1
8856 && operand_equal_p (offset0
, offset1
, 0))))
8859 base0
= build_fold_addr_expr_loc (loc
, base0
);
8861 base1
= build_fold_addr_expr_loc (loc
, base1
);
8862 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
8866 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8867 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8868 the resulting offset is smaller in absolute value than the
8870 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8871 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8872 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8873 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8874 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8875 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8876 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8878 tree const1
= TREE_OPERAND (arg0
, 1);
8879 tree const2
= TREE_OPERAND (arg1
, 1);
8880 tree variable1
= TREE_OPERAND (arg0
, 0);
8881 tree variable2
= TREE_OPERAND (arg1
, 0);
8883 const char * const warnmsg
= G_("assuming signed overflow does not "
8884 "occur when combining constants around "
8887 /* Put the constant on the side where it doesn't overflow and is
8888 of lower absolute value than before. */
8889 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8890 ? MINUS_EXPR
: PLUS_EXPR
,
8892 if (!TREE_OVERFLOW (cst
)
8893 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
8895 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8896 return fold_build2_loc (loc
, code
, type
,
8898 fold_build2_loc (loc
,
8899 TREE_CODE (arg1
), TREE_TYPE (arg1
),
8903 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8904 ? MINUS_EXPR
: PLUS_EXPR
,
8906 if (!TREE_OVERFLOW (cst
)
8907 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
8909 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8910 return fold_build2_loc (loc
, code
, type
,
8911 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
8917 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8918 signed arithmetic case. That form is created by the compiler
8919 often enough for folding it to be of value. One example is in
8920 computing loop trip counts after Operator Strength Reduction. */
8921 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8922 && TREE_CODE (arg0
) == MULT_EXPR
8923 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8924 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8925 && integer_zerop (arg1
))
8927 tree const1
= TREE_OPERAND (arg0
, 1);
8928 tree const2
= arg1
; /* zero */
8929 tree variable1
= TREE_OPERAND (arg0
, 0);
8930 enum tree_code cmp_code
= code
;
8932 /* Handle unfolded multiplication by zero. */
8933 if (integer_zerop (const1
))
8934 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
8936 fold_overflow_warning (("assuming signed overflow does not occur when "
8937 "eliminating multiplication in comparison "
8939 WARN_STRICT_OVERFLOW_COMPARISON
);
8941 /* If const1 is negative we swap the sense of the comparison. */
8942 if (tree_int_cst_sgn (const1
) < 0)
8943 cmp_code
= swap_tree_comparison (cmp_code
);
8945 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
8948 tem
= maybe_canonicalize_comparison (loc
, code
, type
, op0
, op1
);
8952 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
8954 tree targ0
= strip_float_extensions (arg0
);
8955 tree targ1
= strip_float_extensions (arg1
);
8956 tree newtype
= TREE_TYPE (targ0
);
8958 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
8959 newtype
= TREE_TYPE (targ1
);
8961 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8962 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
8963 return fold_build2_loc (loc
, code
, type
,
8964 fold_convert_loc (loc
, newtype
, targ0
),
8965 fold_convert_loc (loc
, newtype
, targ1
));
8967 /* (-a) CMP (-b) -> b CMP a */
8968 if (TREE_CODE (arg0
) == NEGATE_EXPR
8969 && TREE_CODE (arg1
) == NEGATE_EXPR
)
8970 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
8971 TREE_OPERAND (arg0
, 0));
8973 if (TREE_CODE (arg1
) == REAL_CST
)
8975 REAL_VALUE_TYPE cst
;
8976 cst
= TREE_REAL_CST (arg1
);
8978 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8979 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8980 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
8981 TREE_OPERAND (arg0
, 0),
8982 build_real (TREE_TYPE (arg1
),
8983 real_value_negate (&cst
)));
8985 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8986 /* a CMP (-0) -> a CMP 0 */
8987 if (REAL_VALUE_MINUS_ZERO (cst
))
8988 return fold_build2_loc (loc
, code
, type
, arg0
,
8989 build_real (TREE_TYPE (arg1
), dconst0
));
8991 /* x != NaN is always true, other ops are always false. */
8992 if (REAL_VALUE_ISNAN (cst
)
8993 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
8995 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
8996 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
8999 /* Fold comparisons against infinity. */
9000 if (REAL_VALUE_ISINF (cst
)
9001 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9003 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9004 if (tem
!= NULL_TREE
)
9009 /* If this is a comparison of a real constant with a PLUS_EXPR
9010 or a MINUS_EXPR of a real constant, we can convert it into a
9011 comparison with a revised real constant as long as no overflow
9012 occurs when unsafe_math_optimizations are enabled. */
9013 if (flag_unsafe_math_optimizations
9014 && TREE_CODE (arg1
) == REAL_CST
9015 && (TREE_CODE (arg0
) == PLUS_EXPR
9016 || TREE_CODE (arg0
) == MINUS_EXPR
)
9017 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9018 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9019 ? MINUS_EXPR
: PLUS_EXPR
,
9020 arg1
, TREE_OPERAND (arg0
, 1)))
9021 && !TREE_OVERFLOW (tem
))
9022 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9024 /* Likewise, we can simplify a comparison of a real constant with
9025 a MINUS_EXPR whose first operand is also a real constant, i.e.
9026 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9027 floating-point types only if -fassociative-math is set. */
9028 if (flag_associative_math
9029 && TREE_CODE (arg1
) == REAL_CST
9030 && TREE_CODE (arg0
) == MINUS_EXPR
9031 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9032 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9034 && !TREE_OVERFLOW (tem
))
9035 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9036 TREE_OPERAND (arg0
, 1), tem
);
9038 /* Fold comparisons against built-in math functions. */
9039 if (TREE_CODE (arg1
) == REAL_CST
9040 && flag_unsafe_math_optimizations
9041 && ! flag_errno_math
)
9043 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9045 if (fcode
!= END_BUILTINS
)
9047 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9048 if (tem
!= NULL_TREE
)
9054 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9055 && CONVERT_EXPR_P (arg0
))
9057 /* If we are widening one operand of an integer comparison,
9058 see if the other operand is similarly being widened. Perhaps we
9059 can do the comparison in the narrower type. */
9060 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9064 /* Or if we are changing signedness. */
9065 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9070 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9071 constant, we can simplify it. */
9072 if (TREE_CODE (arg1
) == INTEGER_CST
9073 && (TREE_CODE (arg0
) == MIN_EXPR
9074 || TREE_CODE (arg0
) == MAX_EXPR
)
9075 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9077 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9082 /* Simplify comparison of something with itself. (For IEEE
9083 floating-point, we can only do some of these simplifications.) */
9084 if (operand_equal_p (arg0
, arg1
, 0))
9089 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9090 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9091 return constant_boolean_node (1, type
);
9096 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9097 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9098 return constant_boolean_node (1, type
);
9099 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9102 /* For NE, we can only do this simplification if integer
9103 or we don't honor IEEE floating point NaNs. */
9104 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9105 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9107 /* ... fall through ... */
9110 return constant_boolean_node (0, type
);
9116 /* If we are comparing an expression that just has comparisons
9117 of two integer values, arithmetic expressions of those comparisons,
9118 and constants, we can simplify it. There are only three cases
9119 to check: the two values can either be equal, the first can be
9120 greater, or the second can be greater. Fold the expression for
9121 those three values. Since each value must be 0 or 1, we have
9122 eight possibilities, each of which corresponds to the constant 0
9123 or 1 or one of the six possible comparisons.
9125 This handles common cases like (a > b) == 0 but also handles
9126 expressions like ((x > y) - (y > x)) > 0, which supposedly
9127 occur in macroized code. */
9129 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9131 tree cval1
= 0, cval2
= 0;
9134 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9135 /* Don't handle degenerate cases here; they should already
9136 have been handled anyway. */
9137 && cval1
!= 0 && cval2
!= 0
9138 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9139 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9140 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9141 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9142 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9143 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9144 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9146 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9147 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9149 /* We can't just pass T to eval_subst in case cval1 or cval2
9150 was the same as ARG1. */
9153 = fold_build2_loc (loc
, code
, type
,
9154 eval_subst (loc
, arg0
, cval1
, maxval
,
9158 = fold_build2_loc (loc
, code
, type
,
9159 eval_subst (loc
, arg0
, cval1
, maxval
,
9163 = fold_build2_loc (loc
, code
, type
,
9164 eval_subst (loc
, arg0
, cval1
, minval
,
9168 /* All three of these results should be 0 or 1. Confirm they are.
9169 Then use those values to select the proper code to use. */
9171 if (TREE_CODE (high_result
) == INTEGER_CST
9172 && TREE_CODE (equal_result
) == INTEGER_CST
9173 && TREE_CODE (low_result
) == INTEGER_CST
)
9175 /* Make a 3-bit mask with the high-order bit being the
9176 value for `>', the next for '=', and the low for '<'. */
9177 switch ((integer_onep (high_result
) * 4)
9178 + (integer_onep (equal_result
) * 2)
9179 + integer_onep (low_result
))
9183 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9204 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9209 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9210 SET_EXPR_LOCATION (tem
, loc
);
9213 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9218 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9219 into a single range test. */
9220 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9221 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9222 && TREE_CODE (arg1
) == INTEGER_CST
9223 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9224 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9225 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9226 && !TREE_OVERFLOW (arg1
))
9228 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9229 if (tem
!= NULL_TREE
)
9233 /* Fold ~X op ~Y as Y op X. */
9234 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9235 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9237 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9238 return fold_build2_loc (loc
, code
, type
,
9239 fold_convert_loc (loc
, cmp_type
,
9240 TREE_OPERAND (arg1
, 0)),
9241 TREE_OPERAND (arg0
, 0));
9244 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9245 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9246 && TREE_CODE (arg1
) == INTEGER_CST
)
9248 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9249 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9250 TREE_OPERAND (arg0
, 0),
9251 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9252 fold_convert_loc (loc
, cmp_type
, arg1
)));
9259 /* Subroutine of fold_binary. Optimize complex multiplications of the
9260 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9261 argument EXPR represents the expression "z" of type TYPE. */
9264 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9266 tree itype
= TREE_TYPE (type
);
9267 tree rpart
, ipart
, tem
;
9269 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9271 rpart
= TREE_OPERAND (expr
, 0);
9272 ipart
= TREE_OPERAND (expr
, 1);
9274 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9276 rpart
= TREE_REALPART (expr
);
9277 ipart
= TREE_IMAGPART (expr
);
9281 expr
= save_expr (expr
);
9282 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9283 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9286 rpart
= save_expr (rpart
);
9287 ipart
= save_expr (ipart
);
9288 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9289 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9290 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9291 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9292 build_zero_cst (itype
));
9296 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9297 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9298 guarantees that P and N have the same least significant log2(M) bits.
9299 N is not otherwise constrained. In particular, N is not normalized to
9300 0 <= N < M as is common. In general, the precise value of P is unknown.
9301 M is chosen as large as possible such that constant N can be determined.
9303 Returns M and sets *RESIDUE to N.
9305 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9306 account. This is not always possible due to PR 35705.
9309 static unsigned HOST_WIDE_INT
9310 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9311 bool allow_func_align
)
9313 enum tree_code code
;
9317 code
= TREE_CODE (expr
);
9318 if (code
== ADDR_EXPR
)
9320 expr
= TREE_OPERAND (expr
, 0);
9321 if (handled_component_p (expr
))
9323 HOST_WIDE_INT bitsize
, bitpos
;
9325 enum machine_mode mode
;
9326 int unsignedp
, volatilep
;
9328 expr
= get_inner_reference (expr
, &bitsize
, &bitpos
, &offset
,
9329 &mode
, &unsignedp
, &volatilep
, false);
9330 *residue
= bitpos
/ BITS_PER_UNIT
;
9333 if (TREE_CODE (offset
) == INTEGER_CST
)
9334 *residue
+= TREE_INT_CST_LOW (offset
);
9336 /* We don't handle more complicated offset expressions. */
9342 && (allow_func_align
|| TREE_CODE (expr
) != FUNCTION_DECL
))
9343 return DECL_ALIGN_UNIT (expr
);
9345 else if (code
== POINTER_PLUS_EXPR
)
9348 unsigned HOST_WIDE_INT modulus
;
9349 enum tree_code inner_code
;
9351 op0
= TREE_OPERAND (expr
, 0);
9353 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9356 op1
= TREE_OPERAND (expr
, 1);
9358 inner_code
= TREE_CODE (op1
);
9359 if (inner_code
== INTEGER_CST
)
9361 *residue
+= TREE_INT_CST_LOW (op1
);
9364 else if (inner_code
== MULT_EXPR
)
9366 op1
= TREE_OPERAND (op1
, 1);
9367 if (TREE_CODE (op1
) == INTEGER_CST
)
9369 unsigned HOST_WIDE_INT align
;
9371 /* Compute the greatest power-of-2 divisor of op1. */
9372 align
= TREE_INT_CST_LOW (op1
);
9375 /* If align is non-zero and less than *modulus, replace
9376 *modulus with align., If align is 0, then either op1 is 0
9377 or the greatest power-of-2 divisor of op1 doesn't fit in an
9378 unsigned HOST_WIDE_INT. In either case, no additional
9379 constraint is imposed. */
9381 modulus
= MIN (modulus
, align
);
9388 /* If we get here, we were unable to determine anything useful about the
9394 /* Fold a binary expression of code CODE and type TYPE with operands
9395 OP0 and OP1. LOC is the location of the resulting expression.
9396 Return the folded expression if folding is successful. Otherwise,
9397 return NULL_TREE. */
9400 fold_binary_loc_1 (location_t loc
,
9401 enum tree_code code
, tree type
, tree op0
, tree op1
)
9403 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9404 tree arg0
, arg1
, tem
;
9405 tree t1
= NULL_TREE
;
9406 bool strict_overflow_p
;
9408 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9409 && TREE_CODE_LENGTH (code
) == 2
9411 && op1
!= NULL_TREE
);
9416 /* Strip any conversions that don't change the mode. This is
9417 safe for every expression, except for a comparison expression
9418 because its signedness is derived from its operands. So, in
9419 the latter case, only strip conversions that don't change the
9420 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9423 Note that this is done as an internal manipulation within the
9424 constant folder, in order to find the simplest representation
9425 of the arguments so that their form can be studied. In any
9426 cases, the appropriate type conversions should be put back in
9427 the tree that will get out of the constant folder. */
9429 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9431 STRIP_SIGN_NOPS (arg0
);
9432 STRIP_SIGN_NOPS (arg1
);
9440 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9441 constant but we can't do arithmetic on them. */
9442 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9443 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9444 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9445 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9446 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9447 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
9449 if (kind
== tcc_binary
)
9451 /* Make sure type and arg0 have the same saturating flag. */
9452 gcc_assert (TYPE_SATURATING (type
)
9453 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9454 tem
= const_binop (code
, arg0
, arg1
);
9456 else if (kind
== tcc_comparison
)
9457 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9461 if (tem
!= NULL_TREE
)
9463 if (TREE_TYPE (tem
) != type
)
9464 tem
= fold_convert_loc (loc
, type
, tem
);
9469 /* If this is a commutative operation, and ARG0 is a constant, move it
9470 to ARG1 to reduce the number of tests below. */
9471 if (commutative_tree_code (code
)
9472 && tree_swap_operands_p (arg0
, arg1
, true))
9473 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9475 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9477 First check for cases where an arithmetic operation is applied to a
9478 compound, conditional, or comparison operation. Push the arithmetic
9479 operation inside the compound or conditional to see if any folding
9480 can then be done. Convert comparison to conditional for this purpose.
9481 The also optimizes non-constant cases that used to be done in
9484 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9485 one of the operands is a comparison and the other is a comparison, a
9486 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9487 code below would make the expression more complex. Change it to a
9488 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9489 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9491 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9492 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9493 && ((truth_value_p (TREE_CODE (arg0
))
9494 && (truth_value_p (TREE_CODE (arg1
))
9495 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9496 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9497 || (truth_value_p (TREE_CODE (arg1
))
9498 && (truth_value_p (TREE_CODE (arg0
))
9499 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9500 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9502 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9503 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9506 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9507 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9509 if (code
== EQ_EXPR
)
9510 tem
= invert_truthvalue_loc (loc
, tem
);
9512 return fold_convert_loc (loc
, type
, tem
);
9515 if (TREE_CODE_CLASS (code
) == tcc_binary
9516 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9518 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9520 tem
= fold_build2_loc (loc
, code
, type
,
9521 fold_convert_loc (loc
, TREE_TYPE (op0
),
9522 TREE_OPERAND (arg0
, 1)), op1
);
9523 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9526 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9527 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9529 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9530 fold_convert_loc (loc
, TREE_TYPE (op1
),
9531 TREE_OPERAND (arg1
, 1)));
9532 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9536 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
9538 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9540 /*cond_first_p=*/1);
9541 if (tem
!= NULL_TREE
)
9545 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
9547 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9549 /*cond_first_p=*/0);
9550 if (tem
!= NULL_TREE
)
9558 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9559 if (TREE_CODE (arg0
) == ADDR_EXPR
9560 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9562 tree iref
= TREE_OPERAND (arg0
, 0);
9563 return fold_build2 (MEM_REF
, type
,
9564 TREE_OPERAND (iref
, 0),
9565 int_const_binop (PLUS_EXPR
, arg1
,
9566 TREE_OPERAND (iref
, 1), 0));
9569 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9570 if (TREE_CODE (arg0
) == ADDR_EXPR
9571 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9574 HOST_WIDE_INT coffset
;
9575 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9579 return fold_build2 (MEM_REF
, type
,
9580 build_fold_addr_expr (base
),
9581 int_const_binop (PLUS_EXPR
, arg1
,
9582 size_int (coffset
), 0));
9587 case POINTER_PLUS_EXPR
:
9588 /* 0 +p index -> (type)index */
9589 if (integer_zerop (arg0
))
9590 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
9592 /* PTR +p 0 -> PTR */
9593 if (integer_zerop (arg1
))
9594 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9596 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9597 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9598 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9599 return fold_convert_loc (loc
, type
,
9600 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9601 fold_convert_loc (loc
, sizetype
,
9603 fold_convert_loc (loc
, sizetype
,
9606 /* index +p PTR -> PTR +p index */
9607 if (POINTER_TYPE_P (TREE_TYPE (arg1
))
9608 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9609 return fold_build2_loc (loc
, POINTER_PLUS_EXPR
, type
,
9610 fold_convert_loc (loc
, type
, arg1
),
9611 fold_convert_loc (loc
, sizetype
, arg0
));
9613 /* (PTR +p B) +p A -> PTR +p (B + A) */
9614 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9617 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
9618 tree arg00
= TREE_OPERAND (arg0
, 0);
9619 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9620 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
9621 return fold_convert_loc (loc
, type
,
9622 fold_build2_loc (loc
, POINTER_PLUS_EXPR
,
9627 /* PTR_CST +p CST -> CST1 */
9628 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9629 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
9630 fold_convert_loc (loc
, type
, arg1
));
9632 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9633 of the array. Loop optimizer sometimes produce this type of
9635 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9637 tem
= try_move_mult_to_index (loc
, arg0
,
9638 fold_convert_loc (loc
, sizetype
, arg1
));
9640 return fold_convert_loc (loc
, type
, tem
);
9646 /* A + (-B) -> A - B */
9647 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9648 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9649 fold_convert_loc (loc
, type
, arg0
),
9650 fold_convert_loc (loc
, type
,
9651 TREE_OPERAND (arg1
, 0)));
9652 /* (-A) + B -> B - A */
9653 if (TREE_CODE (arg0
) == NEGATE_EXPR
9654 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
9655 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9656 fold_convert_loc (loc
, type
, arg1
),
9657 fold_convert_loc (loc
, type
,
9658 TREE_OPERAND (arg0
, 0)));
9660 if (INTEGRAL_TYPE_P (type
))
9662 /* Convert ~A + 1 to -A. */
9663 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9664 && integer_onep (arg1
))
9665 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
9666 fold_convert_loc (loc
, type
,
9667 TREE_OPERAND (arg0
, 0)));
9670 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9671 && !TYPE_OVERFLOW_TRAPS (type
))
9673 tree tem
= TREE_OPERAND (arg0
, 0);
9676 if (operand_equal_p (tem
, arg1
, 0))
9678 t1
= build_int_cst_type (type
, -1);
9679 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
9684 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9685 && !TYPE_OVERFLOW_TRAPS (type
))
9687 tree tem
= TREE_OPERAND (arg1
, 0);
9690 if (operand_equal_p (arg0
, tem
, 0))
9692 t1
= build_int_cst_type (type
, -1);
9693 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
9697 /* X + (X / CST) * -CST is X % CST. */
9698 if (TREE_CODE (arg1
) == MULT_EXPR
9699 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9700 && operand_equal_p (arg0
,
9701 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9703 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9704 tree cst1
= TREE_OPERAND (arg1
, 1);
9705 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9707 if (sum
&& integer_zerop (sum
))
9708 return fold_convert_loc (loc
, type
,
9709 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9710 TREE_TYPE (arg0
), arg0
,
9715 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9716 same or one. Make sure type is not saturating.
9717 fold_plusminus_mult_expr will re-associate. */
9718 if ((TREE_CODE (arg0
) == MULT_EXPR
9719 || TREE_CODE (arg1
) == MULT_EXPR
)
9720 && !TYPE_SATURATING (type
)
9721 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9723 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9728 if (! FLOAT_TYPE_P (type
))
9730 if (integer_zerop (arg1
))
9731 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9733 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9734 with a constant, and the two constants have no bits in common,
9735 we should treat this as a BIT_IOR_EXPR since this may produce more
9737 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9738 && TREE_CODE (arg1
) == BIT_AND_EXPR
9739 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9740 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9741 && integer_zerop (const_binop (BIT_AND_EXPR
,
9742 TREE_OPERAND (arg0
, 1),
9743 TREE_OPERAND (arg1
, 1))))
9745 code
= BIT_IOR_EXPR
;
9749 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9750 (plus (plus (mult) (mult)) (foo)) so that we can
9751 take advantage of the factoring cases below. */
9752 if (((TREE_CODE (arg0
) == PLUS_EXPR
9753 || TREE_CODE (arg0
) == MINUS_EXPR
)
9754 && TREE_CODE (arg1
) == MULT_EXPR
)
9755 || ((TREE_CODE (arg1
) == PLUS_EXPR
9756 || TREE_CODE (arg1
) == MINUS_EXPR
)
9757 && TREE_CODE (arg0
) == MULT_EXPR
))
9759 tree parg0
, parg1
, parg
, marg
;
9760 enum tree_code pcode
;
9762 if (TREE_CODE (arg1
) == MULT_EXPR
)
9763 parg
= arg0
, marg
= arg1
;
9765 parg
= arg1
, marg
= arg0
;
9766 pcode
= TREE_CODE (parg
);
9767 parg0
= TREE_OPERAND (parg
, 0);
9768 parg1
= TREE_OPERAND (parg
, 1);
9772 if (TREE_CODE (parg0
) == MULT_EXPR
9773 && TREE_CODE (parg1
) != MULT_EXPR
)
9774 return fold_build2_loc (loc
, pcode
, type
,
9775 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9776 fold_convert_loc (loc
, type
,
9778 fold_convert_loc (loc
, type
,
9780 fold_convert_loc (loc
, type
, parg1
));
9781 if (TREE_CODE (parg0
) != MULT_EXPR
9782 && TREE_CODE (parg1
) == MULT_EXPR
)
9784 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9785 fold_convert_loc (loc
, type
, parg0
),
9786 fold_build2_loc (loc
, pcode
, type
,
9787 fold_convert_loc (loc
, type
, marg
),
9788 fold_convert_loc (loc
, type
,
9794 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9795 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
9796 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9798 /* Likewise if the operands are reversed. */
9799 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
9800 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
9802 /* Convert X + -C into X - C. */
9803 if (TREE_CODE (arg1
) == REAL_CST
9804 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
9806 tem
= fold_negate_const (arg1
, type
);
9807 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
9808 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9809 fold_convert_loc (loc
, type
, arg0
),
9810 fold_convert_loc (loc
, type
, tem
));
9813 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9814 to __complex__ ( x, y ). This is not the same for SNaNs or
9815 if signed zeros are involved. */
9816 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9817 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9818 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9820 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9821 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9822 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9823 bool arg0rz
= false, arg0iz
= false;
9824 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9825 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9827 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9828 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9829 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9831 tree rp
= arg1r
? arg1r
9832 : build1 (REALPART_EXPR
, rtype
, arg1
);
9833 tree ip
= arg0i
? arg0i
9834 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9835 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9837 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9839 tree rp
= arg0r
? arg0r
9840 : build1 (REALPART_EXPR
, rtype
, arg0
);
9841 tree ip
= arg1i
? arg1i
9842 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9843 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9848 if (flag_unsafe_math_optimizations
9849 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9850 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9851 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9854 /* Convert x+x into x*2.0. */
9855 if (operand_equal_p (arg0
, arg1
, 0)
9856 && SCALAR_FLOAT_TYPE_P (type
))
9857 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
9858 build_real (type
, dconst2
));
9860 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9861 We associate floats only if the user has specified
9862 -fassociative-math. */
9863 if (flag_associative_math
9864 && TREE_CODE (arg1
) == PLUS_EXPR
9865 && TREE_CODE (arg0
) != MULT_EXPR
)
9867 tree tree10
= TREE_OPERAND (arg1
, 0);
9868 tree tree11
= TREE_OPERAND (arg1
, 1);
9869 if (TREE_CODE (tree11
) == MULT_EXPR
9870 && TREE_CODE (tree10
) == MULT_EXPR
)
9873 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
9874 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
9877 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9878 We associate floats only if the user has specified
9879 -fassociative-math. */
9880 if (flag_associative_math
9881 && TREE_CODE (arg0
) == PLUS_EXPR
9882 && TREE_CODE (arg1
) != MULT_EXPR
)
9884 tree tree00
= TREE_OPERAND (arg0
, 0);
9885 tree tree01
= TREE_OPERAND (arg0
, 1);
9886 if (TREE_CODE (tree01
) == MULT_EXPR
9887 && TREE_CODE (tree00
) == MULT_EXPR
)
9890 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
9891 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
9897 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9898 is a rotate of A by C1 bits. */
9899 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9900 is a rotate of A by B bits. */
9902 enum tree_code code0
, code1
;
9904 code0
= TREE_CODE (arg0
);
9905 code1
= TREE_CODE (arg1
);
9906 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9907 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9908 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9909 TREE_OPERAND (arg1
, 0), 0)
9910 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9911 TYPE_UNSIGNED (rtype
))
9912 /* Only create rotates in complete modes. Other cases are not
9913 expanded properly. */
9914 && TYPE_PRECISION (rtype
) == GET_MODE_PRECISION (TYPE_MODE (rtype
)))
9916 tree tree01
, tree11
;
9917 enum tree_code code01
, code11
;
9919 tree01
= TREE_OPERAND (arg0
, 1);
9920 tree11
= TREE_OPERAND (arg1
, 1);
9921 STRIP_NOPS (tree01
);
9922 STRIP_NOPS (tree11
);
9923 code01
= TREE_CODE (tree01
);
9924 code11
= TREE_CODE (tree11
);
9925 if (code01
== INTEGER_CST
9926 && code11
== INTEGER_CST
9927 && TREE_INT_CST_HIGH (tree01
) == 0
9928 && TREE_INT_CST_HIGH (tree11
) == 0
9929 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
9930 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9932 tem
= build2_loc (loc
, LROTATE_EXPR
,
9933 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9934 TREE_OPERAND (arg0
, 0),
9935 code0
== LSHIFT_EXPR
? tree01
: tree11
);
9936 return fold_convert_loc (loc
, type
, tem
);
9938 else if (code11
== MINUS_EXPR
)
9940 tree tree110
, tree111
;
9941 tree110
= TREE_OPERAND (tree11
, 0);
9942 tree111
= TREE_OPERAND (tree11
, 1);
9943 STRIP_NOPS (tree110
);
9944 STRIP_NOPS (tree111
);
9945 if (TREE_CODE (tree110
) == INTEGER_CST
9946 && 0 == compare_tree_int (tree110
,
9948 (TREE_TYPE (TREE_OPERAND
9950 && operand_equal_p (tree01
, tree111
, 0))
9952 fold_convert_loc (loc
, type
,
9953 build2 ((code0
== LSHIFT_EXPR
9956 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9957 TREE_OPERAND (arg0
, 0), tree01
));
9959 else if (code01
== MINUS_EXPR
)
9961 tree tree010
, tree011
;
9962 tree010
= TREE_OPERAND (tree01
, 0);
9963 tree011
= TREE_OPERAND (tree01
, 1);
9964 STRIP_NOPS (tree010
);
9965 STRIP_NOPS (tree011
);
9966 if (TREE_CODE (tree010
) == INTEGER_CST
9967 && 0 == compare_tree_int (tree010
,
9969 (TREE_TYPE (TREE_OPERAND
9971 && operand_equal_p (tree11
, tree011
, 0))
9972 return fold_convert_loc
9974 build2 ((code0
!= LSHIFT_EXPR
9977 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9978 TREE_OPERAND (arg0
, 0), tree11
));
9984 /* In most languages, can't associate operations on floats through
9985 parentheses. Rather than remember where the parentheses were, we
9986 don't associate floats at all, unless the user has specified
9988 And, we need to make sure type is not saturating. */
9990 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
9991 && !TYPE_SATURATING (type
))
9993 tree var0
, con0
, lit0
, minus_lit0
;
9994 tree var1
, con1
, lit1
, minus_lit1
;
9997 /* Split both trees into variables, constants, and literals. Then
9998 associate each group together, the constants with literals,
9999 then the result with variables. This increases the chances of
10000 literals being recombined later and of generating relocatable
10001 expressions for the sum of a constant and literal. */
10002 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10003 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10004 code
== MINUS_EXPR
);
10006 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10007 if (code
== MINUS_EXPR
)
10010 /* With undefined overflow we can only associate constants with one
10011 variable, and constants whose association doesn't overflow. */
10012 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10013 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10020 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10021 tmp0
= TREE_OPERAND (tmp0
, 0);
10022 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10023 tmp1
= TREE_OPERAND (tmp1
, 0);
10024 /* The only case we can still associate with two variables
10025 is if they are the same, modulo negation. */
10026 if (!operand_equal_p (tmp0
, tmp1
, 0))
10030 if (ok
&& lit0
&& lit1
)
10032 tree tmp0
= fold_convert (type
, lit0
);
10033 tree tmp1
= fold_convert (type
, lit1
);
10035 if (!TREE_OVERFLOW (tmp0
) && !TREE_OVERFLOW (tmp1
)
10036 && TREE_OVERFLOW (fold_build2 (code
, type
, tmp0
, tmp1
)))
10041 /* Only do something if we found more than two objects. Otherwise,
10042 nothing has changed and we risk infinite recursion. */
10044 && (2 < ((var0
!= 0) + (var1
!= 0)
10045 + (con0
!= 0) + (con1
!= 0)
10046 + (lit0
!= 0) + (lit1
!= 0)
10047 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10049 var0
= associate_trees (loc
, var0
, var1
, code
, type
);
10050 con0
= associate_trees (loc
, con0
, con1
, code
, type
);
10051 lit0
= associate_trees (loc
, lit0
, lit1
, code
, type
);
10052 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
, code
, type
);
10054 /* Preserve the MINUS_EXPR if the negative part of the literal is
10055 greater than the positive part. Otherwise, the multiplicative
10056 folding code (i.e extract_muldiv) may be fooled in case
10057 unsigned constants are subtracted, like in the following
10058 example: ((X*2 + 4) - 8U)/2. */
10059 if (minus_lit0
&& lit0
)
10061 if (TREE_CODE (lit0
) == INTEGER_CST
10062 && TREE_CODE (minus_lit0
) == INTEGER_CST
10063 && tree_int_cst_lt (lit0
, minus_lit0
))
10065 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10071 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10080 fold_convert_loc (loc
, type
,
10081 associate_trees (loc
, var0
, minus_lit0
,
10082 MINUS_EXPR
, type
));
10085 con0
= associate_trees (loc
, con0
, minus_lit0
,
10088 fold_convert_loc (loc
, type
,
10089 associate_trees (loc
, var0
, con0
,
10094 con0
= associate_trees (loc
, con0
, lit0
, code
, type
);
10096 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10104 /* Pointer simplifications for subtraction, simple reassociations. */
10105 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10107 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10108 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10109 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10111 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10112 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10113 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10114 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10115 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10116 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10118 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10121 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10122 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10124 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10125 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10126 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10127 fold_convert_loc (loc
, type
, arg1
));
10129 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10132 /* A - (-B) -> A + B */
10133 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10134 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10135 fold_convert_loc (loc
, type
,
10136 TREE_OPERAND (arg1
, 0)));
10137 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10138 if (TREE_CODE (arg0
) == NEGATE_EXPR
10139 && (FLOAT_TYPE_P (type
)
10140 || INTEGRAL_TYPE_P (type
))
10141 && negate_expr_p (arg1
)
10142 && reorder_operands_p (arg0
, arg1
))
10143 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10144 fold_convert_loc (loc
, type
,
10145 negate_expr (arg1
)),
10146 fold_convert_loc (loc
, type
,
10147 TREE_OPERAND (arg0
, 0)));
10148 /* Convert -A - 1 to ~A. */
10149 if (INTEGRAL_TYPE_P (type
)
10150 && TREE_CODE (arg0
) == NEGATE_EXPR
10151 && integer_onep (arg1
)
10152 && !TYPE_OVERFLOW_TRAPS (type
))
10153 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10154 fold_convert_loc (loc
, type
,
10155 TREE_OPERAND (arg0
, 0)));
10157 /* Convert -1 - A to ~A. */
10158 if (INTEGRAL_TYPE_P (type
)
10159 && integer_all_onesp (arg0
))
10160 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10163 /* X - (X / CST) * CST is X % CST. */
10164 if (INTEGRAL_TYPE_P (type
)
10165 && TREE_CODE (arg1
) == MULT_EXPR
10166 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10167 && operand_equal_p (arg0
,
10168 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10169 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10170 TREE_OPERAND (arg1
, 1), 0))
10172 fold_convert_loc (loc
, type
,
10173 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10174 arg0
, TREE_OPERAND (arg1
, 1)));
10176 if (! FLOAT_TYPE_P (type
))
10178 if (integer_zerop (arg0
))
10179 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10180 if (integer_zerop (arg1
))
10181 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10183 /* Fold A - (A & B) into ~B & A. */
10184 if (!TREE_SIDE_EFFECTS (arg0
)
10185 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10187 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10189 tree arg10
= fold_convert_loc (loc
, type
,
10190 TREE_OPERAND (arg1
, 0));
10191 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10192 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10194 fold_convert_loc (loc
, type
, arg0
));
10196 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10198 tree arg11
= fold_convert_loc (loc
,
10199 type
, TREE_OPERAND (arg1
, 1));
10200 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10201 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10203 fold_convert_loc (loc
, type
, arg0
));
10207 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10208 any power of 2 minus 1. */
10209 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10210 && TREE_CODE (arg1
) == BIT_AND_EXPR
10211 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10212 TREE_OPERAND (arg1
, 0), 0))
10214 tree mask0
= TREE_OPERAND (arg0
, 1);
10215 tree mask1
= TREE_OPERAND (arg1
, 1);
10216 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10218 if (operand_equal_p (tem
, mask1
, 0))
10220 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10221 TREE_OPERAND (arg0
, 0), mask1
);
10222 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10227 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10228 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10229 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10231 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10232 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10233 (-ARG1 + ARG0) reduces to -ARG1. */
10234 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10235 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10237 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10238 __complex__ ( x, -y ). This is not the same for SNaNs or if
10239 signed zeros are involved. */
10240 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10241 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10242 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10244 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10245 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10246 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10247 bool arg0rz
= false, arg0iz
= false;
10248 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10249 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10251 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10252 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10253 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10255 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10257 : build1 (REALPART_EXPR
, rtype
, arg1
));
10258 tree ip
= arg0i
? arg0i
10259 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10260 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10262 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10264 tree rp
= arg0r
? arg0r
10265 : build1 (REALPART_EXPR
, rtype
, arg0
);
10266 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10268 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10269 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10274 /* Fold &x - &x. This can happen from &x.foo - &x.
10275 This is unsafe for certain floats even in non-IEEE formats.
10276 In IEEE, it is unsafe because it does wrong for NaNs.
10277 Also note that operand_equal_p is always false if an operand
10280 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10281 && operand_equal_p (arg0
, arg1
, 0))
10282 return build_zero_cst (type
);
10284 /* A - B -> A + (-B) if B is easily negatable. */
10285 if (negate_expr_p (arg1
)
10286 && ((FLOAT_TYPE_P (type
)
10287 /* Avoid this transformation if B is a positive REAL_CST. */
10288 && (TREE_CODE (arg1
) != REAL_CST
10289 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10290 || INTEGRAL_TYPE_P (type
)))
10291 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10292 fold_convert_loc (loc
, type
, arg0
),
10293 fold_convert_loc (loc
, type
,
10294 negate_expr (arg1
)));
10296 /* Try folding difference of addresses. */
10298 HOST_WIDE_INT diff
;
10300 if ((TREE_CODE (arg0
) == ADDR_EXPR
10301 || TREE_CODE (arg1
) == ADDR_EXPR
)
10302 && ptr_difference_const (arg0
, arg1
, &diff
))
10303 return build_int_cst_type (type
, diff
);
10306 /* Fold &a[i] - &a[j] to i-j. */
10307 if (TREE_CODE (arg0
) == ADDR_EXPR
10308 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10309 && TREE_CODE (arg1
) == ADDR_EXPR
10310 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10312 tree aref0
= TREE_OPERAND (arg0
, 0);
10313 tree aref1
= TREE_OPERAND (arg1
, 0);
10314 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
10315 TREE_OPERAND (aref1
, 0), 0))
10317 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
10318 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
10319 tree esz
= array_ref_element_size (aref0
);
10320 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
10321 return fold_build2_loc (loc
, MULT_EXPR
, type
, diff
,
10322 fold_convert_loc (loc
, type
, esz
));
10327 if (FLOAT_TYPE_P (type
)
10328 && flag_unsafe_math_optimizations
10329 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10330 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10331 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10334 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10335 same or one. Make sure type is not saturating.
10336 fold_plusminus_mult_expr will re-associate. */
10337 if ((TREE_CODE (arg0
) == MULT_EXPR
10338 || TREE_CODE (arg1
) == MULT_EXPR
)
10339 && !TYPE_SATURATING (type
)
10340 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10342 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10350 /* (-A) * (-B) -> A * B */
10351 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10352 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10353 fold_convert_loc (loc
, type
,
10354 TREE_OPERAND (arg0
, 0)),
10355 fold_convert_loc (loc
, type
,
10356 negate_expr (arg1
)));
10357 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10358 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10359 fold_convert_loc (loc
, type
,
10360 negate_expr (arg0
)),
10361 fold_convert_loc (loc
, type
,
10362 TREE_OPERAND (arg1
, 0)));
10364 if (! FLOAT_TYPE_P (type
))
10366 if (integer_zerop (arg1
))
10367 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10368 if (integer_onep (arg1
))
10369 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10370 /* Transform x * -1 into -x. Make sure to do the negation
10371 on the original operand with conversions not stripped
10372 because we can only strip non-sign-changing conversions. */
10373 if (integer_all_onesp (arg1
))
10374 return fold_convert_loc (loc
, type
, negate_expr (op0
));
10375 /* Transform x * -C into -x * C if x is easily negatable. */
10376 if (TREE_CODE (arg1
) == INTEGER_CST
10377 && tree_int_cst_sgn (arg1
) == -1
10378 && negate_expr_p (arg0
)
10379 && (tem
= negate_expr (arg1
)) != arg1
10380 && !TREE_OVERFLOW (tem
))
10381 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10382 fold_convert_loc (loc
, type
,
10383 negate_expr (arg0
)),
10386 /* (a * (1 << b)) is (a << b) */
10387 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10388 && integer_onep (TREE_OPERAND (arg1
, 0)))
10389 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10390 TREE_OPERAND (arg1
, 1));
10391 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10392 && integer_onep (TREE_OPERAND (arg0
, 0)))
10393 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10394 TREE_OPERAND (arg0
, 1));
10396 /* (A + A) * C -> A * 2 * C */
10397 if (TREE_CODE (arg0
) == PLUS_EXPR
10398 && TREE_CODE (arg1
) == INTEGER_CST
10399 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10400 TREE_OPERAND (arg0
, 1), 0))
10401 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10402 omit_one_operand_loc (loc
, type
,
10403 TREE_OPERAND (arg0
, 0),
10404 TREE_OPERAND (arg0
, 1)),
10405 fold_build2_loc (loc
, MULT_EXPR
, type
,
10406 build_int_cst (type
, 2) , arg1
));
10408 strict_overflow_p
= false;
10409 if (TREE_CODE (arg1
) == INTEGER_CST
10410 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10411 &strict_overflow_p
)))
10413 if (strict_overflow_p
)
10414 fold_overflow_warning (("assuming signed overflow does not "
10415 "occur when simplifying "
10417 WARN_STRICT_OVERFLOW_MISC
);
10418 return fold_convert_loc (loc
, type
, tem
);
10421 /* Optimize z * conj(z) for integer complex numbers. */
10422 if (TREE_CODE (arg0
) == CONJ_EXPR
10423 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10424 return fold_mult_zconjz (loc
, type
, arg1
);
10425 if (TREE_CODE (arg1
) == CONJ_EXPR
10426 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10427 return fold_mult_zconjz (loc
, type
, arg0
);
10431 /* Maybe fold x * 0 to 0. The expressions aren't the same
10432 when x is NaN, since x * 0 is also NaN. Nor are they the
10433 same in modes with signed zeros, since multiplying a
10434 negative value by 0 gives -0, not +0. */
10435 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10436 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10437 && real_zerop (arg1
))
10438 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10439 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10440 Likewise for complex arithmetic with signed zeros. */
10441 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10442 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10443 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10444 && real_onep (arg1
))
10445 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10447 /* Transform x * -1.0 into -x. */
10448 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10449 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10450 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10451 && real_minus_onep (arg1
))
10452 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
10454 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10455 the result for floating point types due to rounding so it is applied
10456 only if -fassociative-math was specify. */
10457 if (flag_associative_math
10458 && TREE_CODE (arg0
) == RDIV_EXPR
10459 && TREE_CODE (arg1
) == REAL_CST
10460 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10462 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10465 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10466 TREE_OPERAND (arg0
, 1));
10469 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10470 if (operand_equal_p (arg0
, arg1
, 0))
10472 tree tem
= fold_strip_sign_ops (arg0
);
10473 if (tem
!= NULL_TREE
)
10475 tem
= fold_convert_loc (loc
, type
, tem
);
10476 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10480 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10481 This is not the same for NaNs or if signed zeros are
10483 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10484 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10485 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10486 && TREE_CODE (arg1
) == COMPLEX_CST
10487 && real_zerop (TREE_REALPART (arg1
)))
10489 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10490 if (real_onep (TREE_IMAGPART (arg1
)))
10492 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10493 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10495 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10496 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10498 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10499 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10500 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10504 /* Optimize z * conj(z) for floating point complex numbers.
10505 Guarded by flag_unsafe_math_optimizations as non-finite
10506 imaginary components don't produce scalar results. */
10507 if (flag_unsafe_math_optimizations
10508 && TREE_CODE (arg0
) == CONJ_EXPR
10509 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10510 return fold_mult_zconjz (loc
, type
, arg1
);
10511 if (flag_unsafe_math_optimizations
10512 && TREE_CODE (arg1
) == CONJ_EXPR
10513 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10514 return fold_mult_zconjz (loc
, type
, arg0
);
10516 if (flag_unsafe_math_optimizations
)
10518 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10519 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10521 /* Optimizations of root(...)*root(...). */
10522 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10525 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10526 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10528 /* Optimize sqrt(x)*sqrt(x) as x. */
10529 if (BUILTIN_SQRT_P (fcode0
)
10530 && operand_equal_p (arg00
, arg10
, 0)
10531 && ! HONOR_SNANS (TYPE_MODE (type
)))
10534 /* Optimize root(x)*root(y) as root(x*y). */
10535 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10536 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10537 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10540 /* Optimize expN(x)*expN(y) as expN(x+y). */
10541 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10543 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10544 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10545 CALL_EXPR_ARG (arg0
, 0),
10546 CALL_EXPR_ARG (arg1
, 0));
10547 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10550 /* Optimizations of pow(...)*pow(...). */
10551 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10552 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10553 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10555 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10556 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10557 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10558 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10560 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10561 if (operand_equal_p (arg01
, arg11
, 0))
10563 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10564 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10566 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10569 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10570 if (operand_equal_p (arg00
, arg10
, 0))
10572 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10573 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10575 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10579 /* Optimize tan(x)*cos(x) as sin(x). */
10580 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10581 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10582 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10583 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10584 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10585 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10586 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10587 CALL_EXPR_ARG (arg1
, 0), 0))
10589 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10591 if (sinfn
!= NULL_TREE
)
10592 return build_call_expr_loc (loc
, sinfn
, 1,
10593 CALL_EXPR_ARG (arg0
, 0));
10596 /* Optimize x*pow(x,c) as pow(x,c+1). */
10597 if (fcode1
== BUILT_IN_POW
10598 || fcode1
== BUILT_IN_POWF
10599 || fcode1
== BUILT_IN_POWL
)
10601 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10602 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10603 if (TREE_CODE (arg11
) == REAL_CST
10604 && !TREE_OVERFLOW (arg11
)
10605 && operand_equal_p (arg0
, arg10
, 0))
10607 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10611 c
= TREE_REAL_CST (arg11
);
10612 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10613 arg
= build_real (type
, c
);
10614 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10618 /* Optimize pow(x,c)*x as pow(x,c+1). */
10619 if (fcode0
== BUILT_IN_POW
10620 || fcode0
== BUILT_IN_POWF
10621 || fcode0
== BUILT_IN_POWL
)
10623 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10624 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10625 if (TREE_CODE (arg01
) == REAL_CST
10626 && !TREE_OVERFLOW (arg01
)
10627 && operand_equal_p (arg1
, arg00
, 0))
10629 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10633 c
= TREE_REAL_CST (arg01
);
10634 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10635 arg
= build_real (type
, c
);
10636 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10640 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10641 if (optimize_function_for_speed_p (cfun
)
10642 && operand_equal_p (arg0
, arg1
, 0))
10644 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10648 tree arg
= build_real (type
, dconst2
);
10649 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10658 if (integer_all_onesp (arg1
))
10659 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10660 if (integer_zerop (arg1
))
10661 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10662 if (operand_equal_p (arg0
, arg1
, 0))
10663 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10665 /* ~X | X is -1. */
10666 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10667 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10669 t1
= build_zero_cst (type
);
10670 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10671 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10674 /* X | ~X is -1. */
10675 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10676 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10678 t1
= build_zero_cst (type
);
10679 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10680 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10683 /* Canonicalize (X & C1) | C2. */
10684 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10685 && TREE_CODE (arg1
) == INTEGER_CST
10686 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10688 double_int c1
, c2
, c3
, msk
;
10689 int width
= TYPE_PRECISION (type
), w
;
10690 c1
= tree_to_double_int (TREE_OPERAND (arg0
, 1));
10691 c2
= tree_to_double_int (arg1
);
10693 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10694 if (double_int_equal_p (double_int_and (c1
, c2
), c1
))
10695 return omit_one_operand_loc (loc
, type
, arg1
,
10696 TREE_OPERAND (arg0
, 0));
10698 msk
= double_int_mask (width
);
10700 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10701 if (double_int_zero_p (double_int_and_not (msk
,
10702 double_int_ior (c1
, c2
))))
10703 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10704 TREE_OPERAND (arg0
, 0), arg1
);
10706 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10707 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10708 mode which allows further optimizations. */
10709 c1
= double_int_and (c1
, msk
);
10710 c2
= double_int_and (c2
, msk
);
10711 c3
= double_int_and_not (c1
, c2
);
10712 for (w
= BITS_PER_UNIT
;
10713 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
10716 unsigned HOST_WIDE_INT mask
10717 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
10718 if (((c1
.low
| c2
.low
) & mask
) == mask
10719 && (c1
.low
& ~mask
) == 0 && c1
.high
== 0)
10721 c3
= uhwi_to_double_int (mask
);
10725 if (!double_int_equal_p (c3
, c1
))
10726 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10727 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10728 TREE_OPERAND (arg0
, 0),
10729 double_int_to_tree (type
,
10734 /* (X & Y) | Y is (X, Y). */
10735 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10736 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10737 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
10738 /* (X & Y) | X is (Y, X). */
10739 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10740 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10741 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10742 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
10743 /* X | (X & Y) is (Y, X). */
10744 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10745 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10746 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10747 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
10748 /* X | (Y & X) is (Y, X). */
10749 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10750 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10751 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10752 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
10754 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
10755 if (t1
!= NULL_TREE
)
10758 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10760 This results in more efficient code for machines without a NAND
10761 instruction. Combine will canonicalize to the first form
10762 which will allow use of NAND instructions provided by the
10763 backend if they exist. */
10764 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10765 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10768 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10769 build2 (BIT_AND_EXPR
, type
,
10770 fold_convert_loc (loc
, type
,
10771 TREE_OPERAND (arg0
, 0)),
10772 fold_convert_loc (loc
, type
,
10773 TREE_OPERAND (arg1
, 0))));
10776 /* See if this can be simplified into a rotate first. If that
10777 is unsuccessful continue in the association code. */
10781 if (integer_zerop (arg1
))
10782 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10783 if (integer_all_onesp (arg1
))
10784 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
10785 if (operand_equal_p (arg0
, arg1
, 0))
10786 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10788 /* ~X ^ X is -1. */
10789 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10790 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10792 t1
= build_zero_cst (type
);
10793 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10794 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10797 /* X ^ ~X is -1. */
10798 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10799 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10801 t1
= build_zero_cst (type
);
10802 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10803 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10806 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10807 with a constant, and the two constants have no bits in common,
10808 we should treat this as a BIT_IOR_EXPR since this may produce more
10809 simplifications. */
10810 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10811 && TREE_CODE (arg1
) == BIT_AND_EXPR
10812 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10813 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10814 && integer_zerop (const_binop (BIT_AND_EXPR
,
10815 TREE_OPERAND (arg0
, 1),
10816 TREE_OPERAND (arg1
, 1))))
10818 code
= BIT_IOR_EXPR
;
10822 /* (X | Y) ^ X -> Y & ~ X*/
10823 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10824 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10826 tree t2
= TREE_OPERAND (arg0
, 1);
10827 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10829 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10830 fold_convert_loc (loc
, type
, t2
),
10831 fold_convert_loc (loc
, type
, t1
));
10835 /* (Y | X) ^ X -> Y & ~ X*/
10836 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10837 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10839 tree t2
= TREE_OPERAND (arg0
, 0);
10840 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10842 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10843 fold_convert_loc (loc
, type
, t2
),
10844 fold_convert_loc (loc
, type
, t1
));
10848 /* X ^ (X | Y) -> Y & ~ X*/
10849 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10850 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
10852 tree t2
= TREE_OPERAND (arg1
, 1);
10853 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10855 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10856 fold_convert_loc (loc
, type
, t2
),
10857 fold_convert_loc (loc
, type
, t1
));
10861 /* X ^ (Y | X) -> Y & ~ X*/
10862 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10863 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
10865 tree t2
= TREE_OPERAND (arg1
, 0);
10866 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10868 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10869 fold_convert_loc (loc
, type
, t2
),
10870 fold_convert_loc (loc
, type
, t1
));
10874 /* Convert ~X ^ ~Y to X ^ Y. */
10875 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10876 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10877 return fold_build2_loc (loc
, code
, type
,
10878 fold_convert_loc (loc
, type
,
10879 TREE_OPERAND (arg0
, 0)),
10880 fold_convert_loc (loc
, type
,
10881 TREE_OPERAND (arg1
, 0)));
10883 /* Convert ~X ^ C to X ^ ~C. */
10884 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10885 && TREE_CODE (arg1
) == INTEGER_CST
)
10886 return fold_build2_loc (loc
, code
, type
,
10887 fold_convert_loc (loc
, type
,
10888 TREE_OPERAND (arg0
, 0)),
10889 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
10891 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10892 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10893 && integer_onep (TREE_OPERAND (arg0
, 1))
10894 && integer_onep (arg1
))
10895 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
10896 build_int_cst (TREE_TYPE (arg0
), 0));
10898 /* Fold (X & Y) ^ Y as ~X & Y. */
10899 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10900 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10902 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10903 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10904 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10905 fold_convert_loc (loc
, type
, arg1
));
10907 /* Fold (X & Y) ^ X as ~Y & X. */
10908 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10909 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10910 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10912 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10913 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10914 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10915 fold_convert_loc (loc
, type
, arg1
));
10917 /* Fold X ^ (X & Y) as X & ~Y. */
10918 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10919 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10921 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10922 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10923 fold_convert_loc (loc
, type
, arg0
),
10924 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
10926 /* Fold X ^ (Y & X) as ~Y & X. */
10927 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10928 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10929 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10931 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10932 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10933 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
10934 fold_convert_loc (loc
, type
, arg0
));
10937 /* See if this can be simplified into a rotate first. If that
10938 is unsuccessful continue in the association code. */
10942 if (integer_all_onesp (arg1
))
10943 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10944 if (integer_zerop (arg1
))
10945 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10946 if (operand_equal_p (arg0
, arg1
, 0))
10947 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10949 /* ~X & X is always zero. */
10950 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10951 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10952 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
10954 /* X & ~X is always zero. */
10955 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10956 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10957 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10959 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10960 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10961 && TREE_CODE (arg1
) == INTEGER_CST
10962 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10964 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
10965 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10966 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10967 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
10968 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
10970 fold_convert_loc (loc
, type
,
10971 fold_build2_loc (loc
, BIT_IOR_EXPR
,
10972 type
, tmp2
, tmp3
));
10975 /* (X | Y) & Y is (X, Y). */
10976 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10977 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10978 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
10979 /* (X | Y) & X is (Y, X). */
10980 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10981 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10982 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10983 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
10984 /* X & (X | Y) is (Y, X). */
10985 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10986 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10987 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10988 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
10989 /* X & (Y | X) is (Y, X). */
10990 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10991 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10992 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10993 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
10995 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10996 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
10997 && integer_onep (TREE_OPERAND (arg0
, 1))
10998 && integer_onep (arg1
))
11000 tem
= TREE_OPERAND (arg0
, 0);
11001 return fold_build2_loc (loc
, EQ_EXPR
, type
,
11002 fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
11003 build_int_cst (TREE_TYPE (tem
), 1)),
11004 build_int_cst (TREE_TYPE (tem
), 0));
11006 /* Fold ~X & 1 as (X & 1) == 0. */
11007 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11008 && integer_onep (arg1
))
11010 tem
= TREE_OPERAND (arg0
, 0);
11011 return fold_build2_loc (loc
, EQ_EXPR
, type
,
11012 fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
11013 build_int_cst (TREE_TYPE (tem
), 1)),
11014 build_int_cst (TREE_TYPE (tem
), 0));
11017 /* Fold (X ^ Y) & Y as ~X & Y. */
11018 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11019 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11021 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11022 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11023 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11024 fold_convert_loc (loc
, type
, arg1
));
11026 /* Fold (X ^ Y) & X as ~Y & X. */
11027 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11028 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11029 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11031 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11032 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11033 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11034 fold_convert_loc (loc
, type
, arg1
));
11036 /* Fold X & (X ^ Y) as X & ~Y. */
11037 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11038 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11040 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11041 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11042 fold_convert_loc (loc
, type
, arg0
),
11043 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11045 /* Fold X & (Y ^ X) as ~Y & X. */
11046 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11047 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11048 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11050 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11051 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11052 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11053 fold_convert_loc (loc
, type
, arg0
));
11056 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11057 ((A & N) + B) & M -> (A + B) & M
11058 Similarly if (N & M) == 0,
11059 ((A | N) + B) & M -> (A + B) & M
11060 and for - instead of + (or unary - instead of +)
11061 and/or ^ instead of |.
11062 If B is constant and (B & M) == 0, fold into A & M. */
11063 if (host_integerp (arg1
, 1))
11065 unsigned HOST_WIDE_INT cst1
= tree_low_cst (arg1
, 1);
11066 if (~cst1
&& (cst1
& (cst1
+ 1)) == 0
11067 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11068 && (TREE_CODE (arg0
) == PLUS_EXPR
11069 || TREE_CODE (arg0
) == MINUS_EXPR
11070 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11071 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11072 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11076 unsigned HOST_WIDE_INT cst0
;
11078 /* Now we know that arg0 is (C + D) or (C - D) or
11079 -C and arg1 (M) is == (1LL << cst) - 1.
11080 Store C into PMOP[0] and D into PMOP[1]. */
11081 pmop
[0] = TREE_OPERAND (arg0
, 0);
11083 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11085 pmop
[1] = TREE_OPERAND (arg0
, 1);
11089 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11090 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11094 for (; which
>= 0; which
--)
11095 switch (TREE_CODE (pmop
[which
]))
11100 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11103 /* tree_low_cst not used, because we don't care about
11105 cst0
= TREE_INT_CST_LOW (TREE_OPERAND (pmop
[which
], 1));
11107 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11112 else if (cst0
!= 0)
11114 /* If C or D is of the form (A & N) where
11115 (N & M) == M, or of the form (A | N) or
11116 (A ^ N) where (N & M) == 0, replace it with A. */
11117 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11120 /* If C or D is a N where (N & M) == 0, it can be
11121 omitted (assumed 0). */
11122 if ((TREE_CODE (arg0
) == PLUS_EXPR
11123 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11124 && (TREE_INT_CST_LOW (pmop
[which
]) & cst1
) == 0)
11125 pmop
[which
] = NULL
;
11131 /* Only build anything new if we optimized one or both arguments
11133 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11134 || (TREE_CODE (arg0
) != NEGATE_EXPR
11135 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11137 tree utype
= TREE_TYPE (arg0
);
11138 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11140 /* Perform the operations in a type that has defined
11141 overflow behavior. */
11142 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11143 if (pmop
[0] != NULL
)
11144 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11145 if (pmop
[1] != NULL
)
11146 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11149 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11150 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11151 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11153 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11154 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11156 else if (pmop
[0] != NULL
)
11158 else if (pmop
[1] != NULL
)
11161 return build_int_cst (type
, 0);
11163 else if (pmop
[0] == NULL
)
11164 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11166 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11168 /* TEM is now the new binary +, - or unary - replacement. */
11169 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11170 fold_convert_loc (loc
, utype
, arg1
));
11171 return fold_convert_loc (loc
, type
, tem
);
11176 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11177 if (t1
!= NULL_TREE
)
11179 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11180 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11181 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11184 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11186 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
11187 && (~TREE_INT_CST_LOW (arg1
)
11188 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
11190 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11193 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11195 This results in more efficient code for machines without a NOR
11196 instruction. Combine will canonicalize to the first form
11197 which will allow use of NOR instructions provided by the
11198 backend if they exist. */
11199 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11200 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11202 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11203 build2 (BIT_IOR_EXPR
, type
,
11204 fold_convert_loc (loc
, type
,
11205 TREE_OPERAND (arg0
, 0)),
11206 fold_convert_loc (loc
, type
,
11207 TREE_OPERAND (arg1
, 0))));
11210 /* If arg0 is derived from the address of an object or function, we may
11211 be able to fold this expression using the object or function's
11213 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
11215 unsigned HOST_WIDE_INT modulus
, residue
;
11216 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
11218 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11219 integer_onep (arg1
));
11221 /* This works because modulus is a power of 2. If this weren't the
11222 case, we'd have to replace it by its greatest power-of-2
11223 divisor: modulus & -modulus. */
11225 return build_int_cst (type
, residue
& low
);
11228 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11229 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11230 if the new mask might be further optimized. */
11231 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11232 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11233 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
11234 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
11235 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
11236 < TYPE_PRECISION (TREE_TYPE (arg0
))
11237 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11238 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
11240 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
11241 unsigned HOST_WIDE_INT mask
11242 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11243 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11244 tree shift_type
= TREE_TYPE (arg0
);
11246 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11247 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11248 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11249 && TYPE_PRECISION (TREE_TYPE (arg0
))
11250 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
11252 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11253 tree arg00
= TREE_OPERAND (arg0
, 0);
11254 /* See if more bits can be proven as zero because of
11256 if (TREE_CODE (arg00
) == NOP_EXPR
11257 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11259 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11260 if (TYPE_PRECISION (inner_type
)
11261 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
11262 && TYPE_PRECISION (inner_type
) < prec
)
11264 prec
= TYPE_PRECISION (inner_type
);
11265 /* See if we can shorten the right shift. */
11267 shift_type
= inner_type
;
11270 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11271 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11272 zerobits
<<= prec
- shiftc
;
11273 /* For arithmetic shift if sign bit could be set, zerobits
11274 can contain actually sign bits, so no transformation is
11275 possible, unless MASK masks them all away. In that
11276 case the shift needs to be converted into logical shift. */
11277 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11278 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11280 if ((mask
& zerobits
) == 0)
11281 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11287 /* ((X << 16) & 0xff00) is (X, 0). */
11288 if ((mask
& zerobits
) == mask
)
11289 return omit_one_operand_loc (loc
, type
,
11290 build_int_cst (type
, 0), arg0
);
11292 newmask
= mask
| zerobits
;
11293 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11297 /* Only do the transformation if NEWMASK is some integer
11299 for (prec
= BITS_PER_UNIT
;
11300 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11301 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11303 if (prec
< HOST_BITS_PER_WIDE_INT
11304 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11308 if (shift_type
!= TREE_TYPE (arg0
))
11310 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11311 fold_convert_loc (loc
, shift_type
,
11312 TREE_OPERAND (arg0
, 0)),
11313 TREE_OPERAND (arg0
, 1));
11314 tem
= fold_convert_loc (loc
, type
, tem
);
11318 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11319 if (!tree_int_cst_equal (newmaskt
, arg1
))
11320 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11328 /* Don't touch a floating-point divide by zero unless the mode
11329 of the constant can represent infinity. */
11330 if (TREE_CODE (arg1
) == REAL_CST
11331 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11332 && real_zerop (arg1
))
11335 /* Optimize A / A to 1.0 if we don't care about
11336 NaNs or Infinities. Skip the transformation
11337 for non-real operands. */
11338 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11339 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11340 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11341 && operand_equal_p (arg0
, arg1
, 0))
11343 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11345 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11348 /* The complex version of the above A / A optimization. */
11349 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11350 && operand_equal_p (arg0
, arg1
, 0))
11352 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11353 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11354 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11356 tree r
= build_real (elem_type
, dconst1
);
11357 /* omit_two_operands will call fold_convert for us. */
11358 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11362 /* (-A) / (-B) -> A / B */
11363 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11364 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11365 TREE_OPERAND (arg0
, 0),
11366 negate_expr (arg1
));
11367 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11368 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11369 negate_expr (arg0
),
11370 TREE_OPERAND (arg1
, 0));
11372 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11373 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11374 && real_onep (arg1
))
11375 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11377 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11378 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11379 && real_minus_onep (arg1
))
11380 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
11381 negate_expr (arg0
)));
11383 /* If ARG1 is a constant, we can convert this to a multiply by the
11384 reciprocal. This does not have the same rounding properties,
11385 so only do this if -freciprocal-math. We can actually
11386 always safely do it if ARG1 is a power of two, but it's hard to
11387 tell if it is or not in a portable manner. */
11388 if (TREE_CODE (arg1
) == REAL_CST
)
11390 if (flag_reciprocal_math
11391 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
11393 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
11394 /* Find the reciprocal if optimizing and the result is exact. */
11398 r
= TREE_REAL_CST (arg1
);
11399 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
11401 tem
= build_real (type
, r
);
11402 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11403 fold_convert_loc (loc
, type
, arg0
), tem
);
11407 /* Convert A/B/C to A/(B*C). */
11408 if (flag_reciprocal_math
11409 && TREE_CODE (arg0
) == RDIV_EXPR
)
11410 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11411 fold_build2_loc (loc
, MULT_EXPR
, type
,
11412 TREE_OPERAND (arg0
, 1), arg1
));
11414 /* Convert A/(B/C) to (A/B)*C. */
11415 if (flag_reciprocal_math
11416 && TREE_CODE (arg1
) == RDIV_EXPR
)
11417 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11418 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11419 TREE_OPERAND (arg1
, 0)),
11420 TREE_OPERAND (arg1
, 1));
11422 /* Convert C1/(X*C2) into (C1/C2)/X. */
11423 if (flag_reciprocal_math
11424 && TREE_CODE (arg1
) == MULT_EXPR
11425 && TREE_CODE (arg0
) == REAL_CST
11426 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11428 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11429 TREE_OPERAND (arg1
, 1));
11431 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11432 TREE_OPERAND (arg1
, 0));
11435 if (flag_unsafe_math_optimizations
)
11437 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11438 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11440 /* Optimize sin(x)/cos(x) as tan(x). */
11441 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11442 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11443 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11444 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11445 CALL_EXPR_ARG (arg1
, 0), 0))
11447 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11449 if (tanfn
!= NULL_TREE
)
11450 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11453 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11454 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11455 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11456 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11457 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11458 CALL_EXPR_ARG (arg1
, 0), 0))
11460 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11462 if (tanfn
!= NULL_TREE
)
11464 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11465 CALL_EXPR_ARG (arg0
, 0));
11466 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11467 build_real (type
, dconst1
), tmp
);
11471 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11472 NaNs or Infinities. */
11473 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11474 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11475 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11477 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11478 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11480 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11481 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11482 && operand_equal_p (arg00
, arg01
, 0))
11484 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11486 if (cosfn
!= NULL_TREE
)
11487 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11491 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11492 NaNs or Infinities. */
11493 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11494 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11495 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11497 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11498 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11500 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11501 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11502 && operand_equal_p (arg00
, arg01
, 0))
11504 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11506 if (cosfn
!= NULL_TREE
)
11508 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11509 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11510 build_real (type
, dconst1
),
11516 /* Optimize pow(x,c)/x as pow(x,c-1). */
11517 if (fcode0
== BUILT_IN_POW
11518 || fcode0
== BUILT_IN_POWF
11519 || fcode0
== BUILT_IN_POWL
)
11521 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11522 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11523 if (TREE_CODE (arg01
) == REAL_CST
11524 && !TREE_OVERFLOW (arg01
)
11525 && operand_equal_p (arg1
, arg00
, 0))
11527 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11531 c
= TREE_REAL_CST (arg01
);
11532 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11533 arg
= build_real (type
, c
);
11534 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11538 /* Optimize a/root(b/c) into a*root(c/b). */
11539 if (BUILTIN_ROOT_P (fcode1
))
11541 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11543 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11545 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11546 tree b
= TREE_OPERAND (rootarg
, 0);
11547 tree c
= TREE_OPERAND (rootarg
, 1);
11549 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
11551 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
11552 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
11556 /* Optimize x/expN(y) into x*expN(-y). */
11557 if (BUILTIN_EXPONENT_P (fcode1
))
11559 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11560 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11561 arg1
= build_call_expr_loc (loc
,
11563 fold_convert_loc (loc
, type
, arg
));
11564 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11567 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11568 if (fcode1
== BUILT_IN_POW
11569 || fcode1
== BUILT_IN_POWF
11570 || fcode1
== BUILT_IN_POWL
)
11572 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11573 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11574 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11575 tree neg11
= fold_convert_loc (loc
, type
,
11576 negate_expr (arg11
));
11577 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
11578 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11583 case TRUNC_DIV_EXPR
:
11584 /* Optimize (X & (-A)) / A where A is a power of 2,
11586 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11587 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
11588 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
11590 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
11591 arg1
, TREE_OPERAND (arg0
, 1));
11592 if (sum
&& integer_zerop (sum
)) {
11593 unsigned long pow2
;
11595 if (TREE_INT_CST_LOW (arg1
))
11596 pow2
= exact_log2 (TREE_INT_CST_LOW (arg1
));
11598 pow2
= exact_log2 (TREE_INT_CST_HIGH (arg1
))
11599 + HOST_BITS_PER_WIDE_INT
;
11601 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11602 TREE_OPERAND (arg0
, 0),
11603 build_int_cst (NULL_TREE
, pow2
));
11609 case FLOOR_DIV_EXPR
:
11610 /* Simplify A / (B << N) where A and B are positive and B is
11611 a power of 2, to A >> (N + log2(B)). */
11612 strict_overflow_p
= false;
11613 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11614 && (TYPE_UNSIGNED (type
)
11615 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11617 tree sval
= TREE_OPERAND (arg1
, 0);
11618 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11620 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11621 unsigned long pow2
;
11623 if (TREE_INT_CST_LOW (sval
))
11624 pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
11626 pow2
= exact_log2 (TREE_INT_CST_HIGH (sval
))
11627 + HOST_BITS_PER_WIDE_INT
;
11629 if (strict_overflow_p
)
11630 fold_overflow_warning (("assuming signed overflow does not "
11631 "occur when simplifying A / (B << N)"),
11632 WARN_STRICT_OVERFLOW_MISC
);
11634 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11635 sh_cnt
, build_int_cst (NULL_TREE
, pow2
));
11636 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11637 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11641 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11642 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11643 if (INTEGRAL_TYPE_P (type
)
11644 && TYPE_UNSIGNED (type
)
11645 && code
== FLOOR_DIV_EXPR
)
11646 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
11650 case ROUND_DIV_EXPR
:
11651 case CEIL_DIV_EXPR
:
11652 case EXACT_DIV_EXPR
:
11653 if (integer_onep (arg1
))
11654 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11655 if (integer_zerop (arg1
))
11657 /* X / -1 is -X. */
11658 if (!TYPE_UNSIGNED (type
)
11659 && TREE_CODE (arg1
) == INTEGER_CST
11660 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11661 && TREE_INT_CST_HIGH (arg1
) == -1)
11662 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
11664 /* Convert -A / -B to A / B when the type is signed and overflow is
11666 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11667 && TREE_CODE (arg0
) == NEGATE_EXPR
11668 && negate_expr_p (arg1
))
11670 if (INTEGRAL_TYPE_P (type
))
11671 fold_overflow_warning (("assuming signed overflow does not occur "
11672 "when distributing negation across "
11674 WARN_STRICT_OVERFLOW_MISC
);
11675 return fold_build2_loc (loc
, code
, type
,
11676 fold_convert_loc (loc
, type
,
11677 TREE_OPERAND (arg0
, 0)),
11678 fold_convert_loc (loc
, type
,
11679 negate_expr (arg1
)));
11681 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11682 && TREE_CODE (arg1
) == NEGATE_EXPR
11683 && negate_expr_p (arg0
))
11685 if (INTEGRAL_TYPE_P (type
))
11686 fold_overflow_warning (("assuming signed overflow does not occur "
11687 "when distributing negation across "
11689 WARN_STRICT_OVERFLOW_MISC
);
11690 return fold_build2_loc (loc
, code
, type
,
11691 fold_convert_loc (loc
, type
,
11692 negate_expr (arg0
)),
11693 fold_convert_loc (loc
, type
,
11694 TREE_OPERAND (arg1
, 0)));
11697 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11698 operation, EXACT_DIV_EXPR.
11700 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11701 At one time others generated faster code, it's not clear if they do
11702 after the last round to changes to the DIV code in expmed.c. */
11703 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11704 && multiple_of_p (type
, arg0
, arg1
))
11705 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11707 strict_overflow_p
= false;
11708 if (TREE_CODE (arg1
) == INTEGER_CST
11709 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11710 &strict_overflow_p
)))
11712 if (strict_overflow_p
)
11713 fold_overflow_warning (("assuming signed overflow does not occur "
11714 "when simplifying division"),
11715 WARN_STRICT_OVERFLOW_MISC
);
11716 return fold_convert_loc (loc
, type
, tem
);
11721 case CEIL_MOD_EXPR
:
11722 case FLOOR_MOD_EXPR
:
11723 case ROUND_MOD_EXPR
:
11724 case TRUNC_MOD_EXPR
:
11725 /* X % 1 is always zero, but be sure to preserve any side
11727 if (integer_onep (arg1
))
11728 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11730 /* X % 0, return X % 0 unchanged so that we can get the
11731 proper warnings and errors. */
11732 if (integer_zerop (arg1
))
11735 /* 0 % X is always zero, but be sure to preserve any side
11736 effects in X. Place this after checking for X == 0. */
11737 if (integer_zerop (arg0
))
11738 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11740 /* X % -1 is zero. */
11741 if (!TYPE_UNSIGNED (type
)
11742 && TREE_CODE (arg1
) == INTEGER_CST
11743 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11744 && TREE_INT_CST_HIGH (arg1
) == -1)
11745 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11747 /* X % -C is the same as X % C. */
11748 if (code
== TRUNC_MOD_EXPR
11749 && !TYPE_UNSIGNED (type
)
11750 && TREE_CODE (arg1
) == INTEGER_CST
11751 && !TREE_OVERFLOW (arg1
)
11752 && TREE_INT_CST_HIGH (arg1
) < 0
11753 && !TYPE_OVERFLOW_TRAPS (type
)
11754 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11755 && !sign_bit_p (arg1
, arg1
))
11756 return fold_build2_loc (loc
, code
, type
,
11757 fold_convert_loc (loc
, type
, arg0
),
11758 fold_convert_loc (loc
, type
,
11759 negate_expr (arg1
)));
11761 /* X % -Y is the same as X % Y. */
11762 if (code
== TRUNC_MOD_EXPR
11763 && !TYPE_UNSIGNED (type
)
11764 && TREE_CODE (arg1
) == NEGATE_EXPR
11765 && !TYPE_OVERFLOW_TRAPS (type
))
11766 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
11767 fold_convert_loc (loc
, type
,
11768 TREE_OPERAND (arg1
, 0)));
11770 strict_overflow_p
= false;
11771 if (TREE_CODE (arg1
) == INTEGER_CST
11772 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11773 &strict_overflow_p
)))
11775 if (strict_overflow_p
)
11776 fold_overflow_warning (("assuming signed overflow does not occur "
11777 "when simplifying modulus"),
11778 WARN_STRICT_OVERFLOW_MISC
);
11779 return fold_convert_loc (loc
, type
, tem
);
11782 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11783 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11784 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
11785 && (TYPE_UNSIGNED (type
)
11786 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11789 /* Also optimize A % (C << N) where C is a power of 2,
11790 to A & ((C << N) - 1). */
11791 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
11792 c
= TREE_OPERAND (arg1
, 0);
11794 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
11797 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
11798 build_int_cst (TREE_TYPE (arg1
), 1));
11799 if (strict_overflow_p
)
11800 fold_overflow_warning (("assuming signed overflow does not "
11801 "occur when simplifying "
11802 "X % (power of two)"),
11803 WARN_STRICT_OVERFLOW_MISC
);
11804 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11805 fold_convert_loc (loc
, type
, arg0
),
11806 fold_convert_loc (loc
, type
, mask
));
11814 if (integer_all_onesp (arg0
))
11815 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11819 /* Optimize -1 >> x for arithmetic right shifts. */
11820 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
11821 && tree_expr_nonnegative_p (arg1
))
11822 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11823 /* ... fall through ... */
11827 if (integer_zerop (arg1
))
11828 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11829 if (integer_zerop (arg0
))
11830 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11832 /* Since negative shift count is not well-defined,
11833 don't try to compute it in the compiler. */
11834 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11837 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11838 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
11839 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11840 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11841 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11843 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
11844 + TREE_INT_CST_LOW (arg1
));
11846 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11847 being well defined. */
11848 if (low
>= TYPE_PRECISION (type
))
11850 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
11851 low
= low
% TYPE_PRECISION (type
);
11852 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
11853 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 0),
11854 TREE_OPERAND (arg0
, 0));
11856 low
= TYPE_PRECISION (type
) - 1;
11859 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
11860 build_int_cst (type
, low
));
11863 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11864 into x & ((unsigned)-1 >> c) for unsigned types. */
11865 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
11866 || (TYPE_UNSIGNED (type
)
11867 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
11868 && host_integerp (arg1
, false)
11869 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11870 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11871 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11873 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
11874 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
11880 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11882 lshift
= build_int_cst (type
, -1);
11883 lshift
= int_const_binop (code
, lshift
, arg1
, 0);
11885 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
11889 /* Rewrite an LROTATE_EXPR by a constant into an
11890 RROTATE_EXPR by a new constant. */
11891 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
11893 tree tem
= build_int_cst (TREE_TYPE (arg1
),
11894 TYPE_PRECISION (type
));
11895 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
11896 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
11899 /* If we have a rotate of a bit operation with the rotate count and
11900 the second operand of the bit operation both constant,
11901 permute the two operations. */
11902 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11903 && (TREE_CODE (arg0
) == BIT_AND_EXPR
11904 || TREE_CODE (arg0
) == BIT_IOR_EXPR
11905 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
11906 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11907 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
11908 fold_build2_loc (loc
, code
, type
,
11909 TREE_OPERAND (arg0
, 0), arg1
),
11910 fold_build2_loc (loc
, code
, type
,
11911 TREE_OPERAND (arg0
, 1), arg1
));
11913 /* Two consecutive rotates adding up to the precision of the
11914 type can be ignored. */
11915 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
11916 && TREE_CODE (arg0
) == RROTATE_EXPR
11917 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11918 && TREE_INT_CST_HIGH (arg1
) == 0
11919 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
11920 && ((TREE_INT_CST_LOW (arg1
)
11921 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
11922 == (unsigned int) TYPE_PRECISION (type
)))
11923 return TREE_OPERAND (arg0
, 0);
11925 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11926 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11927 if the latter can be further optimized. */
11928 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
11929 && TREE_CODE (arg0
) == BIT_AND_EXPR
11930 && TREE_CODE (arg1
) == INTEGER_CST
11931 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11933 tree mask
= fold_build2_loc (loc
, code
, type
,
11934 fold_convert_loc (loc
, type
,
11935 TREE_OPERAND (arg0
, 1)),
11937 tree shift
= fold_build2_loc (loc
, code
, type
,
11938 fold_convert_loc (loc
, type
,
11939 TREE_OPERAND (arg0
, 0)),
11941 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
11949 if (operand_equal_p (arg0
, arg1
, 0))
11950 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11951 if (INTEGRAL_TYPE_P (type
)
11952 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
11953 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11954 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
11960 if (operand_equal_p (arg0
, arg1
, 0))
11961 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11962 if (INTEGRAL_TYPE_P (type
)
11963 && TYPE_MAX_VALUE (type
)
11964 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
11965 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11966 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
11971 case TRUTH_ANDIF_EXPR
:
11972 /* Note that the operands of this must be ints
11973 and their values must be 0 or 1.
11974 ("true" is a fixed value perhaps depending on the language.) */
11975 /* If first arg is constant zero, return it. */
11976 if (integer_zerop (arg0
))
11977 return fold_convert_loc (loc
, type
, arg0
);
11978 case TRUTH_AND_EXPR
:
11979 /* If either arg is constant true, drop it. */
11980 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
11981 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
11982 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
11983 /* Preserve sequence points. */
11984 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
11985 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11986 /* If second arg is constant zero, result is zero, but first arg
11987 must be evaluated. */
11988 if (integer_zerop (arg1
))
11989 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11990 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11991 case will be handled here. */
11992 if (integer_zerop (arg0
))
11993 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11995 /* !X && X is always false. */
11996 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11997 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11998 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11999 /* X && !X is always false. */
12000 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12001 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12002 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12004 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12005 means A >= Y && A != MAX, but in this case we know that
12008 if (!TREE_SIDE_EFFECTS (arg0
)
12009 && !TREE_SIDE_EFFECTS (arg1
))
12011 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12012 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12013 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12015 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12016 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12017 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12021 /* We only do these simplifications if we are optimizing. */
12025 /* Check for things like (A || B) && (A || C). We can convert this
12026 to A || (B && C). Note that either operator can be any of the four
12027 truth and/or operations and the transformation will still be
12028 valid. Also note that we only care about order for the
12029 ANDIF and ORIF operators. If B contains side effects, this
12030 might change the truth-value of A. */
12031 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
12032 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
12033 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
12034 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
12035 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
12036 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
12038 tree a00
= TREE_OPERAND (arg0
, 0);
12039 tree a01
= TREE_OPERAND (arg0
, 1);
12040 tree a10
= TREE_OPERAND (arg1
, 0);
12041 tree a11
= TREE_OPERAND (arg1
, 1);
12042 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
12043 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
12044 && (code
== TRUTH_AND_EXPR
12045 || code
== TRUTH_OR_EXPR
));
12047 if (operand_equal_p (a00
, a10
, 0))
12048 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
12049 fold_build2_loc (loc
, code
, type
, a01
, a11
));
12050 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
12051 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
12052 fold_build2_loc (loc
, code
, type
, a01
, a10
));
12053 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
12054 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
12055 fold_build2_loc (loc
, code
, type
, a00
, a11
));
12057 /* This case if tricky because we must either have commutative
12058 operators or else A10 must not have side-effects. */
12060 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
12061 && operand_equal_p (a01
, a11
, 0))
12062 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12063 fold_build2_loc (loc
, code
, type
, a00
, a10
),
12067 /* See if we can build a range comparison. */
12068 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
12071 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
12072 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
12074 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
12076 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12079 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
12080 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
12082 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
12084 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12087 /* Check for the possibility of merging component references. If our
12088 lhs is another similar operation, try to merge its rhs with our
12089 rhs. Then try to merge our lhs and rhs. */
12090 if (TREE_CODE (arg0
) == code
12091 && 0 != (tem
= fold_truthop (loc
, code
, type
,
12092 TREE_OPERAND (arg0
, 1), arg1
)))
12093 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12095 if ((tem
= fold_truthop (loc
, code
, type
, arg0
, arg1
)) != 0)
12100 case TRUTH_ORIF_EXPR
:
12101 /* Note that the operands of this must be ints
12102 and their values must be 0 or true.
12103 ("true" is a fixed value perhaps depending on the language.) */
12104 /* If first arg is constant true, return it. */
12105 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12106 return fold_convert_loc (loc
, type
, arg0
);
12107 case TRUTH_OR_EXPR
:
12108 /* If either arg is constant zero, drop it. */
12109 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12110 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12111 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12112 /* Preserve sequence points. */
12113 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12114 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12115 /* If second arg is constant true, result is true, but we must
12116 evaluate first arg. */
12117 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12118 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12119 /* Likewise for first arg, but note this only occurs here for
12121 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12122 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12124 /* !X || X is always true. */
12125 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12126 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12127 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12128 /* X || !X is always true. */
12129 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12130 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12131 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12135 case TRUTH_XOR_EXPR
:
12136 /* If the second arg is constant zero, drop it. */
12137 if (integer_zerop (arg1
))
12138 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12139 /* If the second arg is constant true, this is a logical inversion. */
12140 if (integer_onep (arg1
))
12142 /* Only call invert_truthvalue if operand is a truth value. */
12143 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
12144 tem
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
12146 tem
= invert_truthvalue_loc (loc
, arg0
);
12147 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12149 /* Identical arguments cancel to zero. */
12150 if (operand_equal_p (arg0
, arg1
, 0))
12151 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12153 /* !X ^ X is always true. */
12154 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12155 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12156 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12158 /* X ^ !X is always true. */
12159 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12160 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12161 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12167 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12168 if (tem
!= NULL_TREE
)
12171 /* bool_var != 0 becomes bool_var. */
12172 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12173 && code
== NE_EXPR
)
12174 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12176 /* bool_var == 1 becomes bool_var. */
12177 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12178 && code
== EQ_EXPR
)
12179 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12181 /* bool_var != 1 becomes !bool_var. */
12182 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12183 && code
== NE_EXPR
)
12184 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
12185 fold_convert_loc (loc
, type
, arg0
));
12187 /* bool_var == 0 becomes !bool_var. */
12188 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12189 && code
== EQ_EXPR
)
12190 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
12191 fold_convert_loc (loc
, type
, arg0
));
12193 /* !exp != 0 becomes !exp */
12194 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12195 && code
== NE_EXPR
)
12196 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12198 /* If this is an equality comparison of the address of two non-weak,
12199 unaliased symbols neither of which are extern (since we do not
12200 have access to attributes for externs), then we know the result. */
12201 if (TREE_CODE (arg0
) == ADDR_EXPR
12202 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12203 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12204 && ! lookup_attribute ("alias",
12205 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12206 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12207 && TREE_CODE (arg1
) == ADDR_EXPR
12208 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12209 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12210 && ! lookup_attribute ("alias",
12211 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12212 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12214 /* We know that we're looking at the address of two
12215 non-weak, unaliased, static _DECL nodes.
12217 It is both wasteful and incorrect to call operand_equal_p
12218 to compare the two ADDR_EXPR nodes. It is wasteful in that
12219 all we need to do is test pointer equality for the arguments
12220 to the two ADDR_EXPR nodes. It is incorrect to use
12221 operand_equal_p as that function is NOT equivalent to a
12222 C equality test. It can in fact return false for two
12223 objects which would test as equal using the C equality
12225 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12226 return constant_boolean_node (equal
12227 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12231 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12232 a MINUS_EXPR of a constant, we can convert it into a comparison with
12233 a revised constant as long as no overflow occurs. */
12234 if (TREE_CODE (arg1
) == INTEGER_CST
12235 && (TREE_CODE (arg0
) == PLUS_EXPR
12236 || TREE_CODE (arg0
) == MINUS_EXPR
)
12237 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12238 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
12239 ? MINUS_EXPR
: PLUS_EXPR
,
12240 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12242 TREE_OPERAND (arg0
, 1)))
12243 && !TREE_OVERFLOW (tem
))
12244 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12246 /* Similarly for a NEGATE_EXPR. */
12247 if (TREE_CODE (arg0
) == NEGATE_EXPR
12248 && TREE_CODE (arg1
) == INTEGER_CST
12249 && 0 != (tem
= negate_expr (arg1
))
12250 && TREE_CODE (tem
) == INTEGER_CST
12251 && !TREE_OVERFLOW (tem
))
12252 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12254 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12255 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12256 && TREE_CODE (arg1
) == INTEGER_CST
12257 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12258 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12259 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12260 fold_convert_loc (loc
,
12263 TREE_OPERAND (arg0
, 1)));
12265 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12266 if ((TREE_CODE (arg0
) == PLUS_EXPR
12267 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12268 || TREE_CODE (arg0
) == MINUS_EXPR
)
12269 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12270 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12271 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12273 tree val
= TREE_OPERAND (arg0
, 1);
12274 return omit_two_operands_loc (loc
, type
,
12275 fold_build2_loc (loc
, code
, type
,
12277 build_int_cst (TREE_TYPE (val
),
12279 TREE_OPERAND (arg0
, 0), arg1
);
12282 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12283 if (TREE_CODE (arg0
) == MINUS_EXPR
12284 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12285 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0)
12286 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
12288 return omit_two_operands_loc (loc
, type
,
12290 ? boolean_true_node
: boolean_false_node
,
12291 TREE_OPERAND (arg0
, 1), arg1
);
12294 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12295 for !=. Don't do this for ordered comparisons due to overflow. */
12296 if (TREE_CODE (arg0
) == MINUS_EXPR
12297 && integer_zerop (arg1
))
12298 return fold_build2_loc (loc
, code
, type
,
12299 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
12301 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12302 if (TREE_CODE (arg0
) == ABS_EXPR
12303 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12304 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12306 /* If this is an EQ or NE comparison with zero and ARG0 is
12307 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12308 two operations, but the latter can be done in one less insn
12309 on machines that have only two-operand insns or on which a
12310 constant cannot be the first operand. */
12311 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12312 && integer_zerop (arg1
))
12314 tree arg00
= TREE_OPERAND (arg0
, 0);
12315 tree arg01
= TREE_OPERAND (arg0
, 1);
12316 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12317 && integer_onep (TREE_OPERAND (arg00
, 0)))
12319 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12320 arg01
, TREE_OPERAND (arg00
, 1));
12321 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12322 build_int_cst (TREE_TYPE (arg0
), 1));
12323 return fold_build2_loc (loc
, code
, type
,
12324 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12327 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12328 && integer_onep (TREE_OPERAND (arg01
, 0)))
12330 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12331 arg00
, TREE_OPERAND (arg01
, 1));
12332 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12333 build_int_cst (TREE_TYPE (arg0
), 1));
12334 return fold_build2_loc (loc
, code
, type
,
12335 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12340 /* If this is an NE or EQ comparison of zero against the result of a
12341 signed MOD operation whose second operand is a power of 2, make
12342 the MOD operation unsigned since it is simpler and equivalent. */
12343 if (integer_zerop (arg1
)
12344 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12345 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12346 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12347 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12348 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12349 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12351 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12352 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12353 fold_convert_loc (loc
, newtype
,
12354 TREE_OPERAND (arg0
, 0)),
12355 fold_convert_loc (loc
, newtype
,
12356 TREE_OPERAND (arg0
, 1)));
12358 return fold_build2_loc (loc
, code
, type
, newmod
,
12359 fold_convert_loc (loc
, newtype
, arg1
));
12362 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12363 C1 is a valid shift constant, and C2 is a power of two, i.e.
12365 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12366 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12367 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12369 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12370 && integer_zerop (arg1
))
12372 tree itype
= TREE_TYPE (arg0
);
12373 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
12374 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12376 /* Check for a valid shift count. */
12377 if (TREE_INT_CST_HIGH (arg001
) == 0
12378 && TREE_INT_CST_LOW (arg001
) < prec
)
12380 tree arg01
= TREE_OPERAND (arg0
, 1);
12381 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12382 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12383 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12384 can be rewritten as (X & (C2 << C1)) != 0. */
12385 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12387 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12388 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12389 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12391 /* Otherwise, for signed (arithmetic) shifts,
12392 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12393 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12394 else if (!TYPE_UNSIGNED (itype
))
12395 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12396 arg000
, build_int_cst (itype
, 0));
12397 /* Otherwise, of unsigned (logical) shifts,
12398 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12399 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12401 return omit_one_operand_loc (loc
, type
,
12402 code
== EQ_EXPR
? integer_one_node
12403 : integer_zero_node
,
12408 /* If this is an NE comparison of zero with an AND of one, remove the
12409 comparison since the AND will give the correct value. */
12410 if (code
== NE_EXPR
12411 && integer_zerop (arg1
)
12412 && TREE_CODE (arg0
) == BIT_AND_EXPR
12413 && integer_onep (TREE_OPERAND (arg0
, 1)))
12414 return fold_convert_loc (loc
, type
, arg0
);
12416 /* If we have (A & C) == C where C is a power of 2, convert this into
12417 (A & C) != 0. Similarly for NE_EXPR. */
12418 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12419 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12420 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12421 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12422 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12423 integer_zero_node
));
12425 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12426 bit, then fold the expression into A < 0 or A >= 0. */
12427 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12431 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12432 Similarly for NE_EXPR. */
12433 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12434 && TREE_CODE (arg1
) == INTEGER_CST
12435 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12437 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12438 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12439 TREE_OPERAND (arg0
, 1));
12440 tree dandnotc
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12442 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12443 if (integer_nonzerop (dandnotc
))
12444 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12447 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12448 Similarly for NE_EXPR. */
12449 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12450 && TREE_CODE (arg1
) == INTEGER_CST
12451 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12453 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12454 tree candnotd
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12455 TREE_OPERAND (arg0
, 1), notd
);
12456 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12457 if (integer_nonzerop (candnotd
))
12458 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12461 /* If this is a comparison of a field, we may be able to simplify it. */
12462 if ((TREE_CODE (arg0
) == COMPONENT_REF
12463 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12464 /* Handle the constant case even without -O
12465 to make sure the warnings are given. */
12466 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12468 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12473 /* Optimize comparisons of strlen vs zero to a compare of the
12474 first character of the string vs zero. To wit,
12475 strlen(ptr) == 0 => *ptr == 0
12476 strlen(ptr) != 0 => *ptr != 0
12477 Other cases should reduce to one of these two (or a constant)
12478 due to the return value of strlen being unsigned. */
12479 if (TREE_CODE (arg0
) == CALL_EXPR
12480 && integer_zerop (arg1
))
12482 tree fndecl
= get_callee_fndecl (arg0
);
12485 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12486 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12487 && call_expr_nargs (arg0
) == 1
12488 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12490 tree iref
= build_fold_indirect_ref_loc (loc
,
12491 CALL_EXPR_ARG (arg0
, 0));
12492 return fold_build2_loc (loc
, code
, type
, iref
,
12493 build_int_cst (TREE_TYPE (iref
), 0));
12497 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12498 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12499 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12500 && integer_zerop (arg1
)
12501 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12503 tree arg00
= TREE_OPERAND (arg0
, 0);
12504 tree arg01
= TREE_OPERAND (arg0
, 1);
12505 tree itype
= TREE_TYPE (arg00
);
12506 if (TREE_INT_CST_HIGH (arg01
) == 0
12507 && TREE_INT_CST_LOW (arg01
)
12508 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
12510 if (TYPE_UNSIGNED (itype
))
12512 itype
= signed_type_for (itype
);
12513 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12515 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12516 type
, arg00
, build_int_cst (itype
, 0));
12520 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12521 if (integer_zerop (arg1
)
12522 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12523 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12524 TREE_OPERAND (arg0
, 1));
12526 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12527 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12528 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12529 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12530 build_int_cst (TREE_TYPE (arg1
), 0));
12531 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12532 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12533 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12534 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12535 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12536 build_int_cst (TREE_TYPE (arg1
), 0));
12538 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12539 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12540 && TREE_CODE (arg1
) == INTEGER_CST
12541 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12542 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12543 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12544 TREE_OPERAND (arg0
, 1), arg1
));
12546 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12547 (X & C) == 0 when C is a single bit. */
12548 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12549 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12550 && integer_zerop (arg1
)
12551 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12553 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12554 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12555 TREE_OPERAND (arg0
, 1));
12556 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12560 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12561 constant C is a power of two, i.e. a single bit. */
12562 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12563 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12564 && integer_zerop (arg1
)
12565 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12566 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12567 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12569 tree arg00
= TREE_OPERAND (arg0
, 0);
12570 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12571 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12574 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12575 when is C is a power of two, i.e. a single bit. */
12576 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12577 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12578 && integer_zerop (arg1
)
12579 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12580 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12581 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12583 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12584 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
12585 arg000
, TREE_OPERAND (arg0
, 1));
12586 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12587 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12590 if (integer_zerop (arg1
)
12591 && tree_expr_nonzero_p (arg0
))
12593 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12594 return omit_one_operand_loc (loc
, type
, res
, arg0
);
12597 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12598 if (TREE_CODE (arg0
) == NEGATE_EXPR
12599 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12600 return fold_build2_loc (loc
, code
, type
,
12601 TREE_OPERAND (arg0
, 0),
12602 TREE_OPERAND (arg1
, 0));
12604 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12605 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12606 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12608 tree arg00
= TREE_OPERAND (arg0
, 0);
12609 tree arg01
= TREE_OPERAND (arg0
, 1);
12610 tree arg10
= TREE_OPERAND (arg1
, 0);
12611 tree arg11
= TREE_OPERAND (arg1
, 1);
12612 tree itype
= TREE_TYPE (arg0
);
12614 if (operand_equal_p (arg01
, arg11
, 0))
12615 return fold_build2_loc (loc
, code
, type
,
12616 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12617 fold_build2_loc (loc
,
12618 BIT_XOR_EXPR
, itype
,
12621 build_int_cst (itype
, 0));
12623 if (operand_equal_p (arg01
, arg10
, 0))
12624 return fold_build2_loc (loc
, code
, type
,
12625 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12626 fold_build2_loc (loc
,
12627 BIT_XOR_EXPR
, itype
,
12630 build_int_cst (itype
, 0));
12632 if (operand_equal_p (arg00
, arg11
, 0))
12633 return fold_build2_loc (loc
, code
, type
,
12634 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12635 fold_build2_loc (loc
,
12636 BIT_XOR_EXPR
, itype
,
12639 build_int_cst (itype
, 0));
12641 if (operand_equal_p (arg00
, arg10
, 0))
12642 return fold_build2_loc (loc
, code
, type
,
12643 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12644 fold_build2_loc (loc
,
12645 BIT_XOR_EXPR
, itype
,
12648 build_int_cst (itype
, 0));
12651 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12652 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12654 tree arg00
= TREE_OPERAND (arg0
, 0);
12655 tree arg01
= TREE_OPERAND (arg0
, 1);
12656 tree arg10
= TREE_OPERAND (arg1
, 0);
12657 tree arg11
= TREE_OPERAND (arg1
, 1);
12658 tree itype
= TREE_TYPE (arg0
);
12660 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12661 operand_equal_p guarantees no side-effects so we don't need
12662 to use omit_one_operand on Z. */
12663 if (operand_equal_p (arg01
, arg11
, 0))
12664 return fold_build2_loc (loc
, code
, type
, arg00
, arg10
);
12665 if (operand_equal_p (arg01
, arg10
, 0))
12666 return fold_build2_loc (loc
, code
, type
, arg00
, arg11
);
12667 if (operand_equal_p (arg00
, arg11
, 0))
12668 return fold_build2_loc (loc
, code
, type
, arg01
, arg10
);
12669 if (operand_equal_p (arg00
, arg10
, 0))
12670 return fold_build2_loc (loc
, code
, type
, arg01
, arg11
);
12672 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12673 if (TREE_CODE (arg01
) == INTEGER_CST
12674 && TREE_CODE (arg11
) == INTEGER_CST
)
12675 return fold_build2_loc (loc
, code
, type
,
12676 fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
,
12677 fold_build2_loc (loc
,
12678 BIT_XOR_EXPR
, itype
,
12683 /* Attempt to simplify equality/inequality comparisons of complex
12684 values. Only lower the comparison if the result is known or
12685 can be simplified to a single scalar comparison. */
12686 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12687 || TREE_CODE (arg0
) == COMPLEX_CST
)
12688 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12689 || TREE_CODE (arg1
) == COMPLEX_CST
))
12691 tree real0
, imag0
, real1
, imag1
;
12694 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12696 real0
= TREE_OPERAND (arg0
, 0);
12697 imag0
= TREE_OPERAND (arg0
, 1);
12701 real0
= TREE_REALPART (arg0
);
12702 imag0
= TREE_IMAGPART (arg0
);
12705 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12707 real1
= TREE_OPERAND (arg1
, 0);
12708 imag1
= TREE_OPERAND (arg1
, 1);
12712 real1
= TREE_REALPART (arg1
);
12713 imag1
= TREE_IMAGPART (arg1
);
12716 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
12717 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12719 if (integer_zerop (rcond
))
12721 if (code
== EQ_EXPR
)
12722 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12724 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
12728 if (code
== NE_EXPR
)
12729 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12731 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
12735 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
12736 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12738 if (integer_zerop (icond
))
12740 if (code
== EQ_EXPR
)
12741 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12743 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
12747 if (code
== NE_EXPR
)
12748 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12750 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
12761 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12762 if (tem
!= NULL_TREE
)
12765 /* Transform comparisons of the form X +- C CMP X. */
12766 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12767 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12768 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12769 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
12770 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12771 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
12773 tree arg01
= TREE_OPERAND (arg0
, 1);
12774 enum tree_code code0
= TREE_CODE (arg0
);
12777 if (TREE_CODE (arg01
) == REAL_CST
)
12778 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12780 is_positive
= tree_int_cst_sgn (arg01
);
12782 /* (X - c) > X becomes false. */
12783 if (code
== GT_EXPR
12784 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12785 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12787 if (TREE_CODE (arg01
) == INTEGER_CST
12788 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12789 fold_overflow_warning (("assuming signed overflow does not "
12790 "occur when assuming that (X - c) > X "
12791 "is always false"),
12792 WARN_STRICT_OVERFLOW_ALL
);
12793 return constant_boolean_node (0, type
);
12796 /* Likewise (X + c) < X becomes false. */
12797 if (code
== LT_EXPR
12798 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12799 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12801 if (TREE_CODE (arg01
) == INTEGER_CST
12802 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12803 fold_overflow_warning (("assuming signed overflow does not "
12804 "occur when assuming that "
12805 "(X + c) < X is always false"),
12806 WARN_STRICT_OVERFLOW_ALL
);
12807 return constant_boolean_node (0, type
);
12810 /* Convert (X - c) <= X to true. */
12811 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12813 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12814 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12816 if (TREE_CODE (arg01
) == INTEGER_CST
12817 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12818 fold_overflow_warning (("assuming signed overflow does not "
12819 "occur when assuming that "
12820 "(X - c) <= X is always true"),
12821 WARN_STRICT_OVERFLOW_ALL
);
12822 return constant_boolean_node (1, type
);
12825 /* Convert (X + c) >= X to true. */
12826 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12828 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12829 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12831 if (TREE_CODE (arg01
) == INTEGER_CST
12832 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12833 fold_overflow_warning (("assuming signed overflow does not "
12834 "occur when assuming that "
12835 "(X + c) >= X is always true"),
12836 WARN_STRICT_OVERFLOW_ALL
);
12837 return constant_boolean_node (1, type
);
12840 if (TREE_CODE (arg01
) == INTEGER_CST
)
12842 /* Convert X + c > X and X - c < X to true for integers. */
12843 if (code
== GT_EXPR
12844 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12845 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12847 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12848 fold_overflow_warning (("assuming signed overflow does "
12849 "not occur when assuming that "
12850 "(X + c) > X is always true"),
12851 WARN_STRICT_OVERFLOW_ALL
);
12852 return constant_boolean_node (1, type
);
12855 if (code
== LT_EXPR
12856 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12857 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12859 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12860 fold_overflow_warning (("assuming signed overflow does "
12861 "not occur when assuming that "
12862 "(X - c) < X is always true"),
12863 WARN_STRICT_OVERFLOW_ALL
);
12864 return constant_boolean_node (1, type
);
12867 /* Convert X + c <= X and X - c >= X to false for integers. */
12868 if (code
== LE_EXPR
12869 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12870 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12872 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12873 fold_overflow_warning (("assuming signed overflow does "
12874 "not occur when assuming that "
12875 "(X + c) <= X is always false"),
12876 WARN_STRICT_OVERFLOW_ALL
);
12877 return constant_boolean_node (0, type
);
12880 if (code
== GE_EXPR
12881 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12882 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12884 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12885 fold_overflow_warning (("assuming signed overflow does "
12886 "not occur when assuming that "
12887 "(X - c) >= X is always false"),
12888 WARN_STRICT_OVERFLOW_ALL
);
12889 return constant_boolean_node (0, type
);
12894 /* Comparisons with the highest or lowest possible integer of
12895 the specified precision will have known values. */
12897 tree arg1_type
= TREE_TYPE (arg1
);
12898 unsigned int width
= TYPE_PRECISION (arg1_type
);
12900 if (TREE_CODE (arg1
) == INTEGER_CST
12901 && width
<= 2 * HOST_BITS_PER_WIDE_INT
12902 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
12904 HOST_WIDE_INT signed_max_hi
;
12905 unsigned HOST_WIDE_INT signed_max_lo
;
12906 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
12908 if (width
<= HOST_BITS_PER_WIDE_INT
)
12910 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
12915 if (TYPE_UNSIGNED (arg1_type
))
12917 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
12923 max_lo
= signed_max_lo
;
12924 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
12930 width
-= HOST_BITS_PER_WIDE_INT
;
12931 signed_max_lo
= -1;
12932 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
12937 if (TYPE_UNSIGNED (arg1_type
))
12939 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
12944 max_hi
= signed_max_hi
;
12945 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
12949 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
12950 && TREE_INT_CST_LOW (arg1
) == max_lo
)
12954 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12957 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
12960 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12963 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
12965 /* The GE_EXPR and LT_EXPR cases above are not normally
12966 reached because of previous transformations. */
12971 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12973 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
12977 arg1
= const_binop (PLUS_EXPR
, arg1
,
12978 build_int_cst (TREE_TYPE (arg1
), 1));
12979 return fold_build2_loc (loc
, EQ_EXPR
, type
,
12980 fold_convert_loc (loc
,
12981 TREE_TYPE (arg1
), arg0
),
12984 arg1
= const_binop (PLUS_EXPR
, arg1
,
12985 build_int_cst (TREE_TYPE (arg1
), 1));
12986 return fold_build2_loc (loc
, NE_EXPR
, type
,
12987 fold_convert_loc (loc
, TREE_TYPE (arg1
),
12993 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
12995 && TREE_INT_CST_LOW (arg1
) == min_lo
)
12999 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13002 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13005 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13008 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13013 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13015 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
13019 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13020 return fold_build2_loc (loc
, NE_EXPR
, type
,
13021 fold_convert_loc (loc
,
13022 TREE_TYPE (arg1
), arg0
),
13025 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13026 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13027 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13034 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
13035 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
13036 && TYPE_UNSIGNED (arg1_type
)
13037 /* We will flip the signedness of the comparison operator
13038 associated with the mode of arg1, so the sign bit is
13039 specified by this mode. Check that arg1 is the signed
13040 max associated with this sign bit. */
13041 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
13042 /* signed_type does not work on pointer types. */
13043 && INTEGRAL_TYPE_P (arg1_type
))
13045 /* The following case also applies to X < signed_max+1
13046 and X >= signed_max+1 because previous transformations. */
13047 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13050 st
= signed_type_for (TREE_TYPE (arg1
));
13051 return fold_build2_loc (loc
,
13052 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13053 type
, fold_convert_loc (loc
, st
, arg0
),
13054 build_int_cst (st
, 0));
13060 /* If we are comparing an ABS_EXPR with a constant, we can
13061 convert all the cases into explicit comparisons, but they may
13062 well not be faster than doing the ABS and one comparison.
13063 But ABS (X) <= C is a range comparison, which becomes a subtraction
13064 and a comparison, and is probably faster. */
13065 if (code
== LE_EXPR
13066 && TREE_CODE (arg1
) == INTEGER_CST
13067 && TREE_CODE (arg0
) == ABS_EXPR
13068 && ! TREE_SIDE_EFFECTS (arg0
)
13069 && (0 != (tem
= negate_expr (arg1
)))
13070 && TREE_CODE (tem
) == INTEGER_CST
13071 && !TREE_OVERFLOW (tem
))
13072 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13073 build2 (GE_EXPR
, type
,
13074 TREE_OPERAND (arg0
, 0), tem
),
13075 build2 (LE_EXPR
, type
,
13076 TREE_OPERAND (arg0
, 0), arg1
));
13078 /* Convert ABS_EXPR<x> >= 0 to true. */
13079 strict_overflow_p
= false;
13080 if (code
== GE_EXPR
13081 && (integer_zerop (arg1
)
13082 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13083 && real_zerop (arg1
)))
13084 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13086 if (strict_overflow_p
)
13087 fold_overflow_warning (("assuming signed overflow does not occur "
13088 "when simplifying comparison of "
13089 "absolute value and zero"),
13090 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13091 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13094 /* Convert ABS_EXPR<x> < 0 to false. */
13095 strict_overflow_p
= false;
13096 if (code
== LT_EXPR
13097 && (integer_zerop (arg1
) || real_zerop (arg1
))
13098 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13100 if (strict_overflow_p
)
13101 fold_overflow_warning (("assuming signed overflow does not occur "
13102 "when simplifying comparison of "
13103 "absolute value and zero"),
13104 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13105 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13108 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13109 and similarly for >= into !=. */
13110 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13111 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13112 && TREE_CODE (arg1
) == LSHIFT_EXPR
13113 && integer_onep (TREE_OPERAND (arg1
, 0)))
13114 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13115 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13116 TREE_OPERAND (arg1
, 1)),
13117 build_int_cst (TREE_TYPE (arg0
), 0));
13119 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13120 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13121 && CONVERT_EXPR_P (arg1
)
13122 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13123 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13125 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13126 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13127 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13128 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13129 build_int_cst (TREE_TYPE (arg0
), 0));
13134 case UNORDERED_EXPR
:
13142 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13144 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13145 if (t1
!= NULL_TREE
)
13149 /* If the first operand is NaN, the result is constant. */
13150 if (TREE_CODE (arg0
) == REAL_CST
13151 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13152 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13154 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13155 ? integer_zero_node
13156 : integer_one_node
;
13157 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13160 /* If the second operand is NaN, the result is constant. */
13161 if (TREE_CODE (arg1
) == REAL_CST
13162 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13163 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13165 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13166 ? integer_zero_node
13167 : integer_one_node
;
13168 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13171 /* Simplify unordered comparison of something with itself. */
13172 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13173 && operand_equal_p (arg0
, arg1
, 0))
13174 return constant_boolean_node (1, type
);
13176 if (code
== LTGT_EXPR
13177 && !flag_trapping_math
13178 && operand_equal_p (arg0
, arg1
, 0))
13179 return constant_boolean_node (0, type
);
13181 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13183 tree targ0
= strip_float_extensions (arg0
);
13184 tree targ1
= strip_float_extensions (arg1
);
13185 tree newtype
= TREE_TYPE (targ0
);
13187 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13188 newtype
= TREE_TYPE (targ1
);
13190 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13191 return fold_build2_loc (loc
, code
, type
,
13192 fold_convert_loc (loc
, newtype
, targ0
),
13193 fold_convert_loc (loc
, newtype
, targ1
));
13198 case COMPOUND_EXPR
:
13199 /* When pedantic, a compound expression can be neither an lvalue
13200 nor an integer constant expression. */
13201 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13203 /* Don't let (0, 0) be null pointer constant. */
13204 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13205 : fold_convert_loc (loc
, type
, arg1
);
13206 return pedantic_non_lvalue_loc (loc
, tem
);
13209 if ((TREE_CODE (arg0
) == REAL_CST
13210 && TREE_CODE (arg1
) == REAL_CST
)
13211 || (TREE_CODE (arg0
) == INTEGER_CST
13212 && TREE_CODE (arg1
) == INTEGER_CST
))
13213 return build_complex (type
, arg0
, arg1
);
13217 /* An ASSERT_EXPR should never be passed to fold_binary. */
13218 gcc_unreachable ();
13222 } /* switch (code) */
13225 /* Fold a binary expression of code CODE and type TYPE with operands
13226 OP0 and OP1. Return the folded expression if folding is
13227 successful. Otherwise, return NULL_TREE.
13228 This is a wrapper around fold_binary_1 function (which does the
13229 actual folding). Set the EXPR_FOLDED flag of the folded expression
13230 if folding is successful. */
13232 fold_binary_loc (location_t loc
,
13233 enum tree_code code
, tree type
, tree op0
, tree op1
)
13235 tree tem
= fold_binary_loc_1 (loc
, code
, type
, op0
, op1
);
13237 set_expr_folded_flag (tem
);
13241 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13242 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13246 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13248 switch (TREE_CODE (*tp
))
13254 *walk_subtrees
= 0;
13256 /* ... fall through ... */
13263 /* Return whether the sub-tree ST contains a label which is accessible from
13264 outside the sub-tree. */
13267 contains_label_p (tree st
)
13270 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13273 /* Fold a ternary expression of code CODE and type TYPE with operands
13274 OP0, OP1, and OP2. Return the folded expression if folding is
13275 successful. Otherwise, return NULL_TREE. */
13278 fold_ternary_loc_1 (location_t loc
, enum tree_code code
, tree type
,
13279 tree op0
, tree op1
, tree op2
)
13282 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13283 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13285 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13286 && TREE_CODE_LENGTH (code
) == 3);
13288 /* Strip any conversions that don't change the mode. This is safe
13289 for every expression, except for a comparison expression because
13290 its signedness is derived from its operands. So, in the latter
13291 case, only strip conversions that don't change the signedness.
13293 Note that this is done as an internal manipulation within the
13294 constant folder, in order to find the simplest representation of
13295 the arguments so that their form can be studied. In any cases,
13296 the appropriate type conversions should be put back in the tree
13297 that will get out of the constant folder. */
13318 case COMPONENT_REF
:
13319 if (TREE_CODE (arg0
) == CONSTRUCTOR
13320 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13322 unsigned HOST_WIDE_INT idx
;
13324 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13331 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13332 so all simple results must be passed through pedantic_non_lvalue. */
13333 if (TREE_CODE (arg0
) == INTEGER_CST
)
13335 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13336 tem
= integer_zerop (arg0
) ? op2
: op1
;
13337 /* Only optimize constant conditions when the selected branch
13338 has the same type as the COND_EXPR. This avoids optimizing
13339 away "c ? x : throw", where the throw has a void type.
13340 Avoid throwing away that operand which contains label. */
13341 if ((!TREE_SIDE_EFFECTS (unused_op
)
13342 || !contains_label_p (unused_op
))
13343 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13344 || VOID_TYPE_P (type
)))
13345 return pedantic_non_lvalue_loc (loc
, tem
);
13348 if (operand_equal_p (arg1
, op2
, 0))
13349 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
13351 /* If we have A op B ? A : C, we may be able to convert this to a
13352 simpler expression, depending on the operation and the values
13353 of B and C. Signed zeros prevent all of these transformations,
13354 for reasons given above each one.
13356 Also try swapping the arguments and inverting the conditional. */
13357 if (COMPARISON_CLASS_P (arg0
)
13358 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13359 arg1
, TREE_OPERAND (arg0
, 1))
13360 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
13362 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13367 if (COMPARISON_CLASS_P (arg0
)
13368 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13370 TREE_OPERAND (arg0
, 1))
13371 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
13373 tem
= fold_truth_not_expr (loc
, arg0
);
13374 if (tem
&& COMPARISON_CLASS_P (tem
))
13376 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13382 /* If the second operand is simpler than the third, swap them
13383 since that produces better jump optimization results. */
13384 if (truth_value_p (TREE_CODE (arg0
))
13385 && tree_swap_operands_p (op1
, op2
, false))
13387 /* See if this can be inverted. If it can't, possibly because
13388 it was a floating-point inequality comparison, don't do
13390 tem
= fold_truth_not_expr (loc
, arg0
);
13392 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13395 /* Convert A ? 1 : 0 to simply A. */
13396 if (integer_onep (op1
)
13397 && integer_zerop (op2
)
13398 /* If we try to convert OP0 to our type, the
13399 call to fold will try to move the conversion inside
13400 a COND, which will recurse. In that case, the COND_EXPR
13401 is probably the best choice, so leave it alone. */
13402 && type
== TREE_TYPE (arg0
))
13403 return pedantic_non_lvalue_loc (loc
, arg0
);
13405 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13406 over COND_EXPR in cases such as floating point comparisons. */
13407 if (integer_zerop (op1
)
13408 && integer_onep (op2
)
13409 && truth_value_p (TREE_CODE (arg0
)))
13410 return pedantic_non_lvalue_loc (loc
,
13411 fold_convert_loc (loc
, type
,
13412 invert_truthvalue_loc (loc
,
13415 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13416 if (TREE_CODE (arg0
) == LT_EXPR
13417 && integer_zerop (TREE_OPERAND (arg0
, 1))
13418 && integer_zerop (op2
)
13419 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13421 /* sign_bit_p only checks ARG1 bits within A's precision.
13422 If <sign bit of A> has wider type than A, bits outside
13423 of A's precision in <sign bit of A> need to be checked.
13424 If they are all 0, this optimization needs to be done
13425 in unsigned A's type, if they are all 1 in signed A's type,
13426 otherwise this can't be done. */
13427 if (TYPE_PRECISION (TREE_TYPE (tem
))
13428 < TYPE_PRECISION (TREE_TYPE (arg1
))
13429 && TYPE_PRECISION (TREE_TYPE (tem
))
13430 < TYPE_PRECISION (type
))
13432 unsigned HOST_WIDE_INT mask_lo
;
13433 HOST_WIDE_INT mask_hi
;
13434 int inner_width
, outer_width
;
13437 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13438 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13439 if (outer_width
> TYPE_PRECISION (type
))
13440 outer_width
= TYPE_PRECISION (type
);
13442 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
13444 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
13445 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
13451 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
13452 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
13454 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
13456 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
13457 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13461 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
13462 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13464 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
13465 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
13467 tem_type
= signed_type_for (TREE_TYPE (tem
));
13468 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13470 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
13471 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
13473 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13474 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13482 fold_convert_loc (loc
, type
,
13483 fold_build2_loc (loc
, BIT_AND_EXPR
,
13484 TREE_TYPE (tem
), tem
,
13485 fold_convert_loc (loc
,
13490 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13491 already handled above. */
13492 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13493 && integer_onep (TREE_OPERAND (arg0
, 1))
13494 && integer_zerop (op2
)
13495 && integer_pow2p (arg1
))
13497 tree tem
= TREE_OPERAND (arg0
, 0);
13499 if (TREE_CODE (tem
) == RSHIFT_EXPR
13500 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
13501 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13502 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
13503 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13504 TREE_OPERAND (tem
, 0), arg1
);
13507 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13508 is probably obsolete because the first operand should be a
13509 truth value (that's why we have the two cases above), but let's
13510 leave it in until we can confirm this for all front-ends. */
13511 if (integer_zerop (op2
)
13512 && TREE_CODE (arg0
) == NE_EXPR
13513 && integer_zerop (TREE_OPERAND (arg0
, 1))
13514 && integer_pow2p (arg1
)
13515 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13516 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13517 arg1
, OEP_ONLY_CONST
))
13518 return pedantic_non_lvalue_loc (loc
,
13519 fold_convert_loc (loc
, type
,
13520 TREE_OPERAND (arg0
, 0)));
13522 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13523 if (integer_zerop (op2
)
13524 && truth_value_p (TREE_CODE (arg0
))
13525 && truth_value_p (TREE_CODE (arg1
)))
13526 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13527 fold_convert_loc (loc
, type
, arg0
),
13530 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13531 if (integer_onep (op2
)
13532 && truth_value_p (TREE_CODE (arg0
))
13533 && truth_value_p (TREE_CODE (arg1
)))
13535 /* Only perform transformation if ARG0 is easily inverted. */
13536 tem
= fold_truth_not_expr (loc
, arg0
);
13538 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
13539 fold_convert_loc (loc
, type
, tem
),
13543 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13544 if (integer_zerop (arg1
)
13545 && truth_value_p (TREE_CODE (arg0
))
13546 && truth_value_p (TREE_CODE (op2
)))
13548 /* Only perform transformation if ARG0 is easily inverted. */
13549 tem
= fold_truth_not_expr (loc
, arg0
);
13551 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13552 fold_convert_loc (loc
, type
, tem
),
13556 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13557 if (integer_onep (arg1
)
13558 && truth_value_p (TREE_CODE (arg0
))
13559 && truth_value_p (TREE_CODE (op2
)))
13560 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
13561 fold_convert_loc (loc
, type
, arg0
),
13567 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13568 of fold_ternary on them. */
13569 gcc_unreachable ();
13571 case BIT_FIELD_REF
:
13572 if ((TREE_CODE (arg0
) == VECTOR_CST
13573 || (TREE_CODE (arg0
) == CONSTRUCTOR
&& TREE_CONSTANT (arg0
)))
13574 && type
== TREE_TYPE (TREE_TYPE (arg0
)))
13576 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
13577 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
13580 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
13581 && (idx
% width
) == 0
13582 && (idx
= idx
/ width
)
13583 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
13585 tree elements
= NULL_TREE
;
13587 if (TREE_CODE (arg0
) == VECTOR_CST
)
13588 elements
= TREE_VECTOR_CST_ELTS (arg0
);
13591 unsigned HOST_WIDE_INT idx
;
13594 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0
), idx
, value
)
13595 elements
= tree_cons (NULL_TREE
, value
, elements
);
13597 while (idx
-- > 0 && elements
)
13598 elements
= TREE_CHAIN (elements
);
13600 return TREE_VALUE (elements
);
13602 return build_zero_cst (type
);
13606 /* A bit-field-ref that referenced the full argument can be stripped. */
13607 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
13608 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_low_cst (arg1
, 1)
13609 && integer_zerop (op2
))
13610 return fold_convert_loc (loc
, type
, arg0
);
13615 /* For integers we can decompose the FMA if possible. */
13616 if (TREE_CODE (arg0
) == INTEGER_CST
13617 && TREE_CODE (arg1
) == INTEGER_CST
)
13618 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
13619 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
13620 if (integer_zerop (arg2
))
13621 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
13623 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
13627 } /* switch (code) */
13630 /* Fold a ternary expression of code CODE and type TYPE with operands
13631 OP0, OP1, and OP2. Return the folded expression if folding is
13632 successful. Otherwise, return NULL_TREE.
13633 This is a wrapper around fold_ternary_1 function (which does the
13634 actual folding). Set the EXPR_FOLDED flag of the folded expression
13635 if folding is successful. */
13638 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13639 tree op0
, tree op1
, tree op2
)
13641 tree tem
= fold_ternary_loc_1 (loc
, code
, type
, op0
, op1
, op2
);
13643 set_expr_folded_flag (tem
);
13647 /* Perform constant folding and related simplification of EXPR.
13648 The related simplifications include x*1 => x, x*0 => 0, etc.,
13649 and application of the associative law.
13650 NOP_EXPR conversions may be removed freely (as long as we
13651 are careful not to change the type of the overall expression).
13652 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13653 but we can constant-fold them if they have constant operands. */
13655 #ifdef ENABLE_FOLD_CHECKING
13656 # define fold(x) fold_1 (x)
13657 static tree
fold_1 (tree
);
13663 const tree t
= expr
;
13664 enum tree_code code
= TREE_CODE (t
);
13665 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13667 location_t loc
= EXPR_LOCATION (expr
);
13669 /* Return right away if a constant. */
13670 if (kind
== tcc_constant
)
13673 /* CALL_EXPR-like objects with variable numbers of operands are
13674 treated specially. */
13675 if (kind
== tcc_vl_exp
)
13677 if (code
== CALL_EXPR
)
13679 tem
= fold_call_expr (loc
, expr
, false);
13680 return tem
? tem
: expr
;
13685 if (IS_EXPR_CODE_CLASS (kind
))
13687 tree type
= TREE_TYPE (t
);
13688 tree op0
, op1
, op2
;
13690 switch (TREE_CODE_LENGTH (code
))
13693 op0
= TREE_OPERAND (t
, 0);
13694 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13695 return tem
? tem
: expr
;
13697 op0
= TREE_OPERAND (t
, 0);
13698 op1
= TREE_OPERAND (t
, 1);
13699 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13700 return tem
? tem
: expr
;
13702 op0
= TREE_OPERAND (t
, 0);
13703 op1
= TREE_OPERAND (t
, 1);
13704 op2
= TREE_OPERAND (t
, 2);
13705 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13706 return tem
? tem
: expr
;
13716 tree op0
= TREE_OPERAND (t
, 0);
13717 tree op1
= TREE_OPERAND (t
, 1);
13719 if (TREE_CODE (op1
) == INTEGER_CST
13720 && TREE_CODE (op0
) == CONSTRUCTOR
13721 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
13723 VEC(constructor_elt
,gc
) *elts
= CONSTRUCTOR_ELTS (op0
);
13724 unsigned HOST_WIDE_INT end
= VEC_length (constructor_elt
, elts
);
13725 unsigned HOST_WIDE_INT begin
= 0;
13727 /* Find a matching index by means of a binary search. */
13728 while (begin
!= end
)
13730 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
13731 tree index
= VEC_index (constructor_elt
, elts
, middle
)->index
;
13733 if (TREE_CODE (index
) == INTEGER_CST
13734 && tree_int_cst_lt (index
, op1
))
13735 begin
= middle
+ 1;
13736 else if (TREE_CODE (index
) == INTEGER_CST
13737 && tree_int_cst_lt (op1
, index
))
13739 else if (TREE_CODE (index
) == RANGE_EXPR
13740 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
13741 begin
= middle
+ 1;
13742 else if (TREE_CODE (index
) == RANGE_EXPR
13743 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
13746 return VEC_index (constructor_elt
, elts
, middle
)->value
;
13754 return fold (DECL_INITIAL (t
));
13758 } /* switch (code) */
13761 #ifdef ENABLE_FOLD_CHECKING
13764 static void fold_checksum_tree (const_tree
, struct md5_ctx
*, htab_t
);
13765 static void fold_check_failed (const_tree
, const_tree
);
13766 void print_fold_checksum (const_tree
);
13768 /* When --enable-checking=fold, compute a digest of expr before
13769 and after actual fold call to see if fold did not accidentally
13770 change original expr. */
13776 struct md5_ctx ctx
;
13777 unsigned char checksum_before
[16], checksum_after
[16];
13780 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13781 md5_init_ctx (&ctx
);
13782 fold_checksum_tree (expr
, &ctx
, ht
);
13783 md5_finish_ctx (&ctx
, checksum_before
);
13786 ret
= fold_1 (expr
);
13788 md5_init_ctx (&ctx
);
13789 fold_checksum_tree (expr
, &ctx
, ht
);
13790 md5_finish_ctx (&ctx
, checksum_after
);
13793 if (memcmp (checksum_before
, checksum_after
, 16))
13794 fold_check_failed (expr
, ret
);
13800 print_fold_checksum (const_tree expr
)
13802 struct md5_ctx ctx
;
13803 unsigned char checksum
[16], cnt
;
13806 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13807 md5_init_ctx (&ctx
);
13808 fold_checksum_tree (expr
, &ctx
, ht
);
13809 md5_finish_ctx (&ctx
, checksum
);
13811 for (cnt
= 0; cnt
< 16; ++cnt
)
13812 fprintf (stderr
, "%02x", checksum
[cnt
]);
13813 putc ('\n', stderr
);
13817 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
13819 internal_error ("fold check: original tree changed by fold");
13823 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
13826 enum tree_code code
;
13827 union tree_node buf
;
13832 gcc_assert ((sizeof (struct tree_exp
) + 5 * sizeof (tree
)
13833 <= sizeof (struct tree_function_decl
))
13834 && sizeof (struct tree_type
) <= sizeof (struct tree_function_decl
));
13837 slot
= (void **) htab_find_slot (ht
, expr
, INSERT
);
13840 *slot
= CONST_CAST_TREE (expr
);
13841 code
= TREE_CODE (expr
);
13842 if (TREE_CODE_CLASS (code
) == tcc_declaration
13843 && DECL_ASSEMBLER_NAME_SET_P (expr
))
13845 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13846 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13847 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
13848 expr
= (tree
) &buf
;
13850 else if (TREE_CODE_CLASS (code
) == tcc_type
13851 && (TYPE_POINTER_TO (expr
)
13852 || TYPE_REFERENCE_TO (expr
)
13853 || TYPE_CACHED_VALUES_P (expr
)
13854 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
13855 || TYPE_NEXT_VARIANT (expr
)))
13857 /* Allow these fields to be modified. */
13859 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13860 expr
= tmp
= (tree
) &buf
;
13861 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
13862 TYPE_POINTER_TO (tmp
) = NULL
;
13863 TYPE_REFERENCE_TO (tmp
) = NULL
;
13864 TYPE_NEXT_VARIANT (tmp
) = NULL
;
13865 if (TYPE_CACHED_VALUES_P (tmp
))
13867 TYPE_CACHED_VALUES_P (tmp
) = 0;
13868 TYPE_CACHED_VALUES (tmp
) = NULL
;
13871 md5_process_bytes (expr
, tree_size (expr
), ctx
);
13872 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
13873 if (TREE_CODE_CLASS (code
) != tcc_type
13874 && TREE_CODE_CLASS (code
) != tcc_declaration
13875 && code
!= TREE_LIST
13876 && code
!= SSA_NAME
)
13877 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
13878 switch (TREE_CODE_CLASS (code
))
13884 md5_process_bytes (TREE_STRING_POINTER (expr
),
13885 TREE_STRING_LENGTH (expr
), ctx
);
13888 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
13889 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
13892 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
13898 case tcc_exceptional
:
13902 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
13903 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
13904 expr
= TREE_CHAIN (expr
);
13905 goto recursive_label
;
13908 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
13909 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
13915 case tcc_expression
:
13916 case tcc_reference
:
13917 case tcc_comparison
:
13920 case tcc_statement
:
13922 len
= TREE_OPERAND_LENGTH (expr
);
13923 for (i
= 0; i
< len
; ++i
)
13924 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
13926 case tcc_declaration
:
13927 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
13928 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
13929 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
13931 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
13932 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
13933 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
13934 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
13935 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
13937 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
13938 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
13940 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
13942 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
13943 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
13944 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
13948 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
13949 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
13950 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
13951 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
13952 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
13953 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
13954 if (INTEGRAL_TYPE_P (expr
)
13955 || SCALAR_FLOAT_TYPE_P (expr
))
13957 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
13958 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
13960 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
13961 if (TREE_CODE (expr
) == RECORD_TYPE
13962 || TREE_CODE (expr
) == UNION_TYPE
13963 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
13964 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
13965 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
13972 /* Helper function for outputting the checksum of a tree T. When
13973 debugging with gdb, you can "define mynext" to be "next" followed
13974 by "call debug_fold_checksum (op0)", then just trace down till the
13977 DEBUG_FUNCTION
void
13978 debug_fold_checksum (const_tree t
)
13981 unsigned char checksum
[16];
13982 struct md5_ctx ctx
;
13983 htab_t ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13985 md5_init_ctx (&ctx
);
13986 fold_checksum_tree (t
, &ctx
, ht
);
13987 md5_finish_ctx (&ctx
, checksum
);
13990 for (i
= 0; i
< 16; i
++)
13991 fprintf (stderr
, "%d ", checksum
[i
]);
13993 fprintf (stderr
, "\n");
13998 /* Fold a unary tree expression with code CODE of type TYPE with an
13999 operand OP0. LOC is the location of the resulting expression.
14000 Return a folded expression if successful. Otherwise, return a tree
14001 expression with code CODE of type TYPE with an operand OP0. */
14004 fold_build1_stat_loc (location_t loc
,
14005 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14008 #ifdef ENABLE_FOLD_CHECKING
14009 unsigned char checksum_before
[16], checksum_after
[16];
14010 struct md5_ctx ctx
;
14013 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14014 md5_init_ctx (&ctx
);
14015 fold_checksum_tree (op0
, &ctx
, ht
);
14016 md5_finish_ctx (&ctx
, checksum_before
);
14020 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14022 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14024 #ifdef ENABLE_FOLD_CHECKING
14025 md5_init_ctx (&ctx
);
14026 fold_checksum_tree (op0
, &ctx
, ht
);
14027 md5_finish_ctx (&ctx
, checksum_after
);
14030 if (memcmp (checksum_before
, checksum_after
, 16))
14031 fold_check_failed (op0
, tem
);
14036 /* Fold a binary tree expression with code CODE of type TYPE with
14037 operands OP0 and OP1. LOC is the location of the resulting
14038 expression. Return a folded expression if successful. Otherwise,
14039 return a tree expression with code CODE of type TYPE with operands
14043 fold_build2_stat_loc (location_t loc
,
14044 enum tree_code code
, tree type
, tree op0
, tree op1
14048 #ifdef ENABLE_FOLD_CHECKING
14049 unsigned char checksum_before_op0
[16],
14050 checksum_before_op1
[16],
14051 checksum_after_op0
[16],
14052 checksum_after_op1
[16];
14053 struct md5_ctx ctx
;
14056 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14057 md5_init_ctx (&ctx
);
14058 fold_checksum_tree (op0
, &ctx
, ht
);
14059 md5_finish_ctx (&ctx
, checksum_before_op0
);
14062 md5_init_ctx (&ctx
);
14063 fold_checksum_tree (op1
, &ctx
, ht
);
14064 md5_finish_ctx (&ctx
, checksum_before_op1
);
14068 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14070 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14072 #ifdef ENABLE_FOLD_CHECKING
14073 md5_init_ctx (&ctx
);
14074 fold_checksum_tree (op0
, &ctx
, ht
);
14075 md5_finish_ctx (&ctx
, checksum_after_op0
);
14078 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14079 fold_check_failed (op0
, tem
);
14081 md5_init_ctx (&ctx
);
14082 fold_checksum_tree (op1
, &ctx
, ht
);
14083 md5_finish_ctx (&ctx
, checksum_after_op1
);
14086 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14087 fold_check_failed (op1
, tem
);
14092 /* Fold a ternary tree expression with code CODE of type TYPE with
14093 operands OP0, OP1, and OP2. Return a folded expression if
14094 successful. Otherwise, return a tree expression with code CODE of
14095 type TYPE with operands OP0, OP1, and OP2. */
14098 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14099 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14102 #ifdef ENABLE_FOLD_CHECKING
14103 unsigned char checksum_before_op0
[16],
14104 checksum_before_op1
[16],
14105 checksum_before_op2
[16],
14106 checksum_after_op0
[16],
14107 checksum_after_op1
[16],
14108 checksum_after_op2
[16];
14109 struct md5_ctx ctx
;
14112 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14113 md5_init_ctx (&ctx
);
14114 fold_checksum_tree (op0
, &ctx
, ht
);
14115 md5_finish_ctx (&ctx
, checksum_before_op0
);
14118 md5_init_ctx (&ctx
);
14119 fold_checksum_tree (op1
, &ctx
, ht
);
14120 md5_finish_ctx (&ctx
, checksum_before_op1
);
14123 md5_init_ctx (&ctx
);
14124 fold_checksum_tree (op2
, &ctx
, ht
);
14125 md5_finish_ctx (&ctx
, checksum_before_op2
);
14129 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14130 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14132 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14134 #ifdef ENABLE_FOLD_CHECKING
14135 md5_init_ctx (&ctx
);
14136 fold_checksum_tree (op0
, &ctx
, ht
);
14137 md5_finish_ctx (&ctx
, checksum_after_op0
);
14140 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14141 fold_check_failed (op0
, tem
);
14143 md5_init_ctx (&ctx
);
14144 fold_checksum_tree (op1
, &ctx
, ht
);
14145 md5_finish_ctx (&ctx
, checksum_after_op1
);
14148 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14149 fold_check_failed (op1
, tem
);
14151 md5_init_ctx (&ctx
);
14152 fold_checksum_tree (op2
, &ctx
, ht
);
14153 md5_finish_ctx (&ctx
, checksum_after_op2
);
14156 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14157 fold_check_failed (op2
, tem
);
14162 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14163 arguments in ARGARRAY, and a null static chain.
14164 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14165 of type TYPE from the given operands as constructed by build_call_array. */
14168 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14169 int nargs
, tree
*argarray
)
14172 #ifdef ENABLE_FOLD_CHECKING
14173 unsigned char checksum_before_fn
[16],
14174 checksum_before_arglist
[16],
14175 checksum_after_fn
[16],
14176 checksum_after_arglist
[16];
14177 struct md5_ctx ctx
;
14181 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14182 md5_init_ctx (&ctx
);
14183 fold_checksum_tree (fn
, &ctx
, ht
);
14184 md5_finish_ctx (&ctx
, checksum_before_fn
);
14187 md5_init_ctx (&ctx
);
14188 for (i
= 0; i
< nargs
; i
++)
14189 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14190 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14194 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14196 #ifdef ENABLE_FOLD_CHECKING
14197 md5_init_ctx (&ctx
);
14198 fold_checksum_tree (fn
, &ctx
, ht
);
14199 md5_finish_ctx (&ctx
, checksum_after_fn
);
14202 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14203 fold_check_failed (fn
, tem
);
14205 md5_init_ctx (&ctx
);
14206 for (i
= 0; i
< nargs
; i
++)
14207 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14208 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14211 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14212 fold_check_failed (NULL_TREE
, tem
);
14217 /* Perform constant folding and related simplification of initializer
14218 expression EXPR. These behave identically to "fold_buildN" but ignore
14219 potential run-time traps and exceptions that fold must preserve. */
14221 #define START_FOLD_INIT \
14222 int saved_signaling_nans = flag_signaling_nans;\
14223 int saved_trapping_math = flag_trapping_math;\
14224 int saved_rounding_math = flag_rounding_math;\
14225 int saved_trapv = flag_trapv;\
14226 int saved_folding_initializer = folding_initializer;\
14227 flag_signaling_nans = 0;\
14228 flag_trapping_math = 0;\
14229 flag_rounding_math = 0;\
14231 folding_initializer = 1;
14233 #define END_FOLD_INIT \
14234 flag_signaling_nans = saved_signaling_nans;\
14235 flag_trapping_math = saved_trapping_math;\
14236 flag_rounding_math = saved_rounding_math;\
14237 flag_trapv = saved_trapv;\
14238 folding_initializer = saved_folding_initializer;
14241 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14242 tree type
, tree op
)
14247 result
= fold_build1_loc (loc
, code
, type
, op
);
14254 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14255 tree type
, tree op0
, tree op1
)
14260 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14267 fold_build3_initializer_loc (location_t loc
, enum tree_code code
,
14268 tree type
, tree op0
, tree op1
, tree op2
)
14273 result
= fold_build3_loc (loc
, code
, type
, op0
, op1
, op2
);
14280 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14281 int nargs
, tree
*argarray
)
14286 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14292 #undef START_FOLD_INIT
14293 #undef END_FOLD_INIT
14295 /* Determine if first argument is a multiple of second argument. Return 0 if
14296 it is not, or we cannot easily determined it to be.
14298 An example of the sort of thing we care about (at this point; this routine
14299 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14300 fold cases do now) is discovering that
14302 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14308 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14310 This code also handles discovering that
14312 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14314 is a multiple of 8 so we don't have to worry about dealing with a
14315 possible remainder.
14317 Note that we *look* inside a SAVE_EXPR only to determine how it was
14318 calculated; it is not safe for fold to do much of anything else with the
14319 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14320 at run time. For example, the latter example above *cannot* be implemented
14321 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14322 evaluation time of the original SAVE_EXPR is not necessarily the same at
14323 the time the new expression is evaluated. The only optimization of this
14324 sort that would be valid is changing
14326 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14330 SAVE_EXPR (I) * SAVE_EXPR (J)
14332 (where the same SAVE_EXPR (J) is used in the original and the
14333 transformed version). */
14336 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
14338 if (operand_equal_p (top
, bottom
, 0))
14341 if (TREE_CODE (type
) != INTEGER_TYPE
)
14344 switch (TREE_CODE (top
))
14347 /* Bitwise and provides a power of two multiple. If the mask is
14348 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14349 if (!integer_pow2p (bottom
))
14354 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14355 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14359 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14360 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14363 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14367 op1
= TREE_OPERAND (top
, 1);
14368 /* const_binop may not detect overflow correctly,
14369 so check for it explicitly here. */
14370 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
14371 > TREE_INT_CST_LOW (op1
)
14372 && TREE_INT_CST_HIGH (op1
) == 0
14373 && 0 != (t1
= fold_convert (type
,
14374 const_binop (LSHIFT_EXPR
,
14377 && !TREE_OVERFLOW (t1
))
14378 return multiple_of_p (type
, t1
, bottom
);
14383 /* Can't handle conversions from non-integral or wider integral type. */
14384 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14385 || (TYPE_PRECISION (type
)
14386 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14389 /* .. fall through ... */
14392 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14395 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
14396 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
14399 if (TREE_CODE (bottom
) != INTEGER_CST
14400 || integer_zerop (bottom
)
14401 || (TYPE_UNSIGNED (type
)
14402 && (tree_int_cst_sgn (top
) < 0
14403 || tree_int_cst_sgn (bottom
) < 0)))
14405 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
14413 /* Return true if CODE or TYPE is known to be non-negative. */
14416 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
14418 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14419 && truth_value_p (code
))
14420 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14421 have a signed:1 type (where the value is -1 and 0). */
14426 /* Return true if (CODE OP0) is known to be non-negative. If the return
14427 value is based on the assumption that signed overflow is undefined,
14428 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14429 *STRICT_OVERFLOW_P. */
14432 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14433 bool *strict_overflow_p
)
14435 if (TYPE_UNSIGNED (type
))
14441 /* We can't return 1 if flag_wrapv is set because
14442 ABS_EXPR<INT_MIN> = INT_MIN. */
14443 if (!INTEGRAL_TYPE_P (type
))
14445 if (TYPE_OVERFLOW_UNDEFINED (type
))
14447 *strict_overflow_p
= true;
14452 case NON_LVALUE_EXPR
:
14454 case FIX_TRUNC_EXPR
:
14455 return tree_expr_nonnegative_warnv_p (op0
,
14456 strict_overflow_p
);
14460 tree inner_type
= TREE_TYPE (op0
);
14461 tree outer_type
= type
;
14463 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14465 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14466 return tree_expr_nonnegative_warnv_p (op0
,
14467 strict_overflow_p
);
14468 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14470 if (TYPE_UNSIGNED (inner_type
))
14472 return tree_expr_nonnegative_warnv_p (op0
,
14473 strict_overflow_p
);
14476 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
14478 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14479 return tree_expr_nonnegative_warnv_p (op0
,
14480 strict_overflow_p
);
14481 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14482 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14483 && TYPE_UNSIGNED (inner_type
);
14489 return tree_simple_nonnegative_warnv_p (code
, type
);
14492 /* We don't know sign of `t', so be conservative and return false. */
14496 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14497 value is based on the assumption that signed overflow is undefined,
14498 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14499 *STRICT_OVERFLOW_P. */
14502 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14503 tree op1
, bool *strict_overflow_p
)
14505 if (TYPE_UNSIGNED (type
))
14510 case POINTER_PLUS_EXPR
:
14512 if (FLOAT_TYPE_P (type
))
14513 return (tree_expr_nonnegative_warnv_p (op0
,
14515 && tree_expr_nonnegative_warnv_p (op1
,
14516 strict_overflow_p
));
14518 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14519 both unsigned and at least 2 bits shorter than the result. */
14520 if (TREE_CODE (type
) == INTEGER_TYPE
14521 && TREE_CODE (op0
) == NOP_EXPR
14522 && TREE_CODE (op1
) == NOP_EXPR
)
14524 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
14525 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
14526 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14527 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14529 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14530 TYPE_PRECISION (inner2
)) + 1;
14531 return prec
< TYPE_PRECISION (type
);
14537 if (FLOAT_TYPE_P (type
))
14539 /* x * x for floating point x is always non-negative. */
14540 if (operand_equal_p (op0
, op1
, 0))
14542 return (tree_expr_nonnegative_warnv_p (op0
,
14544 && tree_expr_nonnegative_warnv_p (op1
,
14545 strict_overflow_p
));
14548 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14549 both unsigned and their total bits is shorter than the result. */
14550 if (TREE_CODE (type
) == INTEGER_TYPE
14551 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
14552 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
14554 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
14555 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
14557 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
14558 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
14561 bool unsigned0
= TYPE_UNSIGNED (inner0
);
14562 bool unsigned1
= TYPE_UNSIGNED (inner1
);
14564 if (TREE_CODE (op0
) == INTEGER_CST
)
14565 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
14567 if (TREE_CODE (op1
) == INTEGER_CST
)
14568 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
14570 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
14571 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
14573 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
14574 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
14575 : TYPE_PRECISION (inner0
);
14577 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
14578 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
14579 : TYPE_PRECISION (inner1
);
14581 return precision0
+ precision1
< TYPE_PRECISION (type
);
14588 return (tree_expr_nonnegative_warnv_p (op0
,
14590 || tree_expr_nonnegative_warnv_p (op1
,
14591 strict_overflow_p
));
14597 case TRUNC_DIV_EXPR
:
14598 case CEIL_DIV_EXPR
:
14599 case FLOOR_DIV_EXPR
:
14600 case ROUND_DIV_EXPR
:
14601 return (tree_expr_nonnegative_warnv_p (op0
,
14603 && tree_expr_nonnegative_warnv_p (op1
,
14604 strict_overflow_p
));
14606 case TRUNC_MOD_EXPR
:
14607 case CEIL_MOD_EXPR
:
14608 case FLOOR_MOD_EXPR
:
14609 case ROUND_MOD_EXPR
:
14610 return tree_expr_nonnegative_warnv_p (op0
,
14611 strict_overflow_p
);
14613 return tree_simple_nonnegative_warnv_p (code
, type
);
14616 /* We don't know sign of `t', so be conservative and return false. */
14620 /* Return true if T is known to be non-negative. If the return
14621 value is based on the assumption that signed overflow is undefined,
14622 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14623 *STRICT_OVERFLOW_P. */
14626 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14628 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14631 switch (TREE_CODE (t
))
14634 return tree_int_cst_sgn (t
) >= 0;
14637 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
14640 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
14643 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14645 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
14646 strict_overflow_p
));
14648 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14651 /* We don't know sign of `t', so be conservative and return false. */
14655 /* Return true if T is known to be non-negative. If the return
14656 value is based on the assumption that signed overflow is undefined,
14657 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14658 *STRICT_OVERFLOW_P. */
14661 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
14662 tree arg0
, tree arg1
, bool *strict_overflow_p
)
14664 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
14665 switch (DECL_FUNCTION_CODE (fndecl
))
14667 CASE_FLT_FN (BUILT_IN_ACOS
):
14668 CASE_FLT_FN (BUILT_IN_ACOSH
):
14669 CASE_FLT_FN (BUILT_IN_CABS
):
14670 CASE_FLT_FN (BUILT_IN_COSH
):
14671 CASE_FLT_FN (BUILT_IN_ERFC
):
14672 CASE_FLT_FN (BUILT_IN_EXP
):
14673 CASE_FLT_FN (BUILT_IN_EXP10
):
14674 CASE_FLT_FN (BUILT_IN_EXP2
):
14675 CASE_FLT_FN (BUILT_IN_FABS
):
14676 CASE_FLT_FN (BUILT_IN_FDIM
):
14677 CASE_FLT_FN (BUILT_IN_HYPOT
):
14678 CASE_FLT_FN (BUILT_IN_POW10
):
14679 CASE_INT_FN (BUILT_IN_FFS
):
14680 CASE_INT_FN (BUILT_IN_PARITY
):
14681 CASE_INT_FN (BUILT_IN_POPCOUNT
):
14682 case BUILT_IN_BSWAP32
:
14683 case BUILT_IN_BSWAP64
:
14687 CASE_FLT_FN (BUILT_IN_SQRT
):
14688 /* sqrt(-0.0) is -0.0. */
14689 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
14691 return tree_expr_nonnegative_warnv_p (arg0
,
14692 strict_overflow_p
);
14694 CASE_FLT_FN (BUILT_IN_ASINH
):
14695 CASE_FLT_FN (BUILT_IN_ATAN
):
14696 CASE_FLT_FN (BUILT_IN_ATANH
):
14697 CASE_FLT_FN (BUILT_IN_CBRT
):
14698 CASE_FLT_FN (BUILT_IN_CEIL
):
14699 CASE_FLT_FN (BUILT_IN_ERF
):
14700 CASE_FLT_FN (BUILT_IN_EXPM1
):
14701 CASE_FLT_FN (BUILT_IN_FLOOR
):
14702 CASE_FLT_FN (BUILT_IN_FMOD
):
14703 CASE_FLT_FN (BUILT_IN_FREXP
):
14704 CASE_FLT_FN (BUILT_IN_LCEIL
):
14705 CASE_FLT_FN (BUILT_IN_LDEXP
):
14706 CASE_FLT_FN (BUILT_IN_LFLOOR
):
14707 CASE_FLT_FN (BUILT_IN_LLCEIL
):
14708 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
14709 CASE_FLT_FN (BUILT_IN_LLRINT
):
14710 CASE_FLT_FN (BUILT_IN_LLROUND
):
14711 CASE_FLT_FN (BUILT_IN_LRINT
):
14712 CASE_FLT_FN (BUILT_IN_LROUND
):
14713 CASE_FLT_FN (BUILT_IN_MODF
):
14714 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
14715 CASE_FLT_FN (BUILT_IN_RINT
):
14716 CASE_FLT_FN (BUILT_IN_ROUND
):
14717 CASE_FLT_FN (BUILT_IN_SCALB
):
14718 CASE_FLT_FN (BUILT_IN_SCALBLN
):
14719 CASE_FLT_FN (BUILT_IN_SCALBN
):
14720 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
14721 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
14722 CASE_FLT_FN (BUILT_IN_SINH
):
14723 CASE_FLT_FN (BUILT_IN_TANH
):
14724 CASE_FLT_FN (BUILT_IN_TRUNC
):
14725 /* True if the 1st argument is nonnegative. */
14726 return tree_expr_nonnegative_warnv_p (arg0
,
14727 strict_overflow_p
);
14729 CASE_FLT_FN (BUILT_IN_FMAX
):
14730 /* True if the 1st OR 2nd arguments are nonnegative. */
14731 return (tree_expr_nonnegative_warnv_p (arg0
,
14733 || (tree_expr_nonnegative_warnv_p (arg1
,
14734 strict_overflow_p
)));
14736 CASE_FLT_FN (BUILT_IN_FMIN
):
14737 /* True if the 1st AND 2nd arguments are nonnegative. */
14738 return (tree_expr_nonnegative_warnv_p (arg0
,
14740 && (tree_expr_nonnegative_warnv_p (arg1
,
14741 strict_overflow_p
)));
14743 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14744 /* True if the 2nd argument is nonnegative. */
14745 return tree_expr_nonnegative_warnv_p (arg1
,
14746 strict_overflow_p
);
14748 CASE_FLT_FN (BUILT_IN_POWI
):
14749 /* True if the 1st argument is nonnegative or the second
14750 argument is an even integer. */
14751 if (TREE_CODE (arg1
) == INTEGER_CST
14752 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
14754 return tree_expr_nonnegative_warnv_p (arg0
,
14755 strict_overflow_p
);
14757 CASE_FLT_FN (BUILT_IN_POW
):
14758 /* True if the 1st argument is nonnegative or the second
14759 argument is an even integer valued real. */
14760 if (TREE_CODE (arg1
) == REAL_CST
)
14765 c
= TREE_REAL_CST (arg1
);
14766 n
= real_to_integer (&c
);
14769 REAL_VALUE_TYPE cint
;
14770 real_from_integer (&cint
, VOIDmode
, n
,
14771 n
< 0 ? -1 : 0, 0);
14772 if (real_identical (&c
, &cint
))
14776 return tree_expr_nonnegative_warnv_p (arg0
,
14777 strict_overflow_p
);
14782 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
14786 /* Return true if T is known to be non-negative. If the return
14787 value is based on the assumption that signed overflow is undefined,
14788 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14789 *STRICT_OVERFLOW_P. */
14792 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14794 enum tree_code code
= TREE_CODE (t
);
14795 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14802 tree temp
= TARGET_EXPR_SLOT (t
);
14803 t
= TARGET_EXPR_INITIAL (t
);
14805 /* If the initializer is non-void, then it's a normal expression
14806 that will be assigned to the slot. */
14807 if (!VOID_TYPE_P (t
))
14808 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
14810 /* Otherwise, the initializer sets the slot in some way. One common
14811 way is an assignment statement at the end of the initializer. */
14814 if (TREE_CODE (t
) == BIND_EXPR
)
14815 t
= expr_last (BIND_EXPR_BODY (t
));
14816 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
14817 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
14818 t
= expr_last (TREE_OPERAND (t
, 0));
14819 else if (TREE_CODE (t
) == STATEMENT_LIST
)
14824 if (TREE_CODE (t
) == MODIFY_EXPR
14825 && TREE_OPERAND (t
, 0) == temp
)
14826 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14827 strict_overflow_p
);
14834 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
14835 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
14837 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
14838 get_callee_fndecl (t
),
14841 strict_overflow_p
);
14843 case COMPOUND_EXPR
:
14845 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14846 strict_overflow_p
);
14848 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
14849 strict_overflow_p
);
14851 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14852 strict_overflow_p
);
14855 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14859 /* We don't know sign of `t', so be conservative and return false. */
14863 /* Return true if T is known to be non-negative. If the return
14864 value is based on the assumption that signed overflow is undefined,
14865 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14866 *STRICT_OVERFLOW_P. */
14869 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14871 enum tree_code code
;
14872 if (t
== error_mark_node
)
14875 code
= TREE_CODE (t
);
14876 switch (TREE_CODE_CLASS (code
))
14879 case tcc_comparison
:
14880 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14882 TREE_OPERAND (t
, 0),
14883 TREE_OPERAND (t
, 1),
14884 strict_overflow_p
);
14887 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14889 TREE_OPERAND (t
, 0),
14890 strict_overflow_p
);
14893 case tcc_declaration
:
14894 case tcc_reference
:
14895 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
14903 case TRUTH_AND_EXPR
:
14904 case TRUTH_OR_EXPR
:
14905 case TRUTH_XOR_EXPR
:
14906 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14908 TREE_OPERAND (t
, 0),
14909 TREE_OPERAND (t
, 1),
14910 strict_overflow_p
);
14911 case TRUTH_NOT_EXPR
:
14912 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14914 TREE_OPERAND (t
, 0),
14915 strict_overflow_p
);
14922 case WITH_SIZE_EXPR
:
14924 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
14927 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
14931 /* Return true if `t' is known to be non-negative. Handle warnings
14932 about undefined signed overflow. */
14935 tree_expr_nonnegative_p (tree t
)
14937 bool ret
, strict_overflow_p
;
14939 strict_overflow_p
= false;
14940 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
14941 if (strict_overflow_p
)
14942 fold_overflow_warning (("assuming signed overflow does not occur when "
14943 "determining that expression is always "
14945 WARN_STRICT_OVERFLOW_MISC
);
14950 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14951 For floating point we further ensure that T is not denormal.
14952 Similar logic is present in nonzero_address in rtlanal.h.
14954 If the return value is based on the assumption that signed overflow
14955 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14956 change *STRICT_OVERFLOW_P. */
14959 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
14960 bool *strict_overflow_p
)
14965 return tree_expr_nonzero_warnv_p (op0
,
14966 strict_overflow_p
);
14970 tree inner_type
= TREE_TYPE (op0
);
14971 tree outer_type
= type
;
14973 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
14974 && tree_expr_nonzero_warnv_p (op0
,
14975 strict_overflow_p
));
14979 case NON_LVALUE_EXPR
:
14980 return tree_expr_nonzero_warnv_p (op0
,
14981 strict_overflow_p
);
14990 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14991 For floating point we further ensure that T is not denormal.
14992 Similar logic is present in nonzero_address in rtlanal.h.
14994 If the return value is based on the assumption that signed overflow
14995 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14996 change *STRICT_OVERFLOW_P. */
14999 tree_binary_nonzero_warnv_p (enum tree_code code
,
15002 tree op1
, bool *strict_overflow_p
)
15004 bool sub_strict_overflow_p
;
15007 case POINTER_PLUS_EXPR
:
15009 if (TYPE_OVERFLOW_UNDEFINED (type
))
15011 /* With the presence of negative values it is hard
15012 to say something. */
15013 sub_strict_overflow_p
= false;
15014 if (!tree_expr_nonnegative_warnv_p (op0
,
15015 &sub_strict_overflow_p
)
15016 || !tree_expr_nonnegative_warnv_p (op1
,
15017 &sub_strict_overflow_p
))
15019 /* One of operands must be positive and the other non-negative. */
15020 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15021 overflows, on a twos-complement machine the sum of two
15022 nonnegative numbers can never be zero. */
15023 return (tree_expr_nonzero_warnv_p (op0
,
15025 || tree_expr_nonzero_warnv_p (op1
,
15026 strict_overflow_p
));
15031 if (TYPE_OVERFLOW_UNDEFINED (type
))
15033 if (tree_expr_nonzero_warnv_p (op0
,
15035 && tree_expr_nonzero_warnv_p (op1
,
15036 strict_overflow_p
))
15038 *strict_overflow_p
= true;
15045 sub_strict_overflow_p
= false;
15046 if (tree_expr_nonzero_warnv_p (op0
,
15047 &sub_strict_overflow_p
)
15048 && tree_expr_nonzero_warnv_p (op1
,
15049 &sub_strict_overflow_p
))
15051 if (sub_strict_overflow_p
)
15052 *strict_overflow_p
= true;
15057 sub_strict_overflow_p
= false;
15058 if (tree_expr_nonzero_warnv_p (op0
,
15059 &sub_strict_overflow_p
))
15061 if (sub_strict_overflow_p
)
15062 *strict_overflow_p
= true;
15064 /* When both operands are nonzero, then MAX must be too. */
15065 if (tree_expr_nonzero_warnv_p (op1
,
15066 strict_overflow_p
))
15069 /* MAX where operand 0 is positive is positive. */
15070 return tree_expr_nonnegative_warnv_p (op0
,
15071 strict_overflow_p
);
15073 /* MAX where operand 1 is positive is positive. */
15074 else if (tree_expr_nonzero_warnv_p (op1
,
15075 &sub_strict_overflow_p
)
15076 && tree_expr_nonnegative_warnv_p (op1
,
15077 &sub_strict_overflow_p
))
15079 if (sub_strict_overflow_p
)
15080 *strict_overflow_p
= true;
15086 return (tree_expr_nonzero_warnv_p (op1
,
15088 || tree_expr_nonzero_warnv_p (op0
,
15089 strict_overflow_p
));
15098 /* Return true when T is an address and is known to be nonzero.
15099 For floating point we further ensure that T is not denormal.
15100 Similar logic is present in nonzero_address in rtlanal.h.
15102 If the return value is based on the assumption that signed overflow
15103 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15104 change *STRICT_OVERFLOW_P. */
15107 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15109 bool sub_strict_overflow_p
;
15110 switch (TREE_CODE (t
))
15113 return !integer_zerop (t
);
15117 tree base
= TREE_OPERAND (t
, 0);
15118 if (!DECL_P (base
))
15119 base
= get_base_address (base
);
15124 /* Weak declarations may link to NULL. Other things may also be NULL
15125 so protect with -fdelete-null-pointer-checks; but not variables
15126 allocated on the stack. */
15128 && (flag_delete_null_pointer_checks
15129 || (DECL_CONTEXT (base
)
15130 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15131 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
)))))
15132 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
15134 /* Constants are never weak. */
15135 if (CONSTANT_CLASS_P (base
))
15142 sub_strict_overflow_p
= false;
15143 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15144 &sub_strict_overflow_p
)
15145 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15146 &sub_strict_overflow_p
))
15148 if (sub_strict_overflow_p
)
15149 *strict_overflow_p
= true;
15160 /* Return true when T is an address and is known to be nonzero.
15161 For floating point we further ensure that T is not denormal.
15162 Similar logic is present in nonzero_address in rtlanal.h.
15164 If the return value is based on the assumption that signed overflow
15165 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15166 change *STRICT_OVERFLOW_P. */
15169 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15171 tree type
= TREE_TYPE (t
);
15172 enum tree_code code
;
15174 /* Doing something useful for floating point would need more work. */
15175 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
15178 code
= TREE_CODE (t
);
15179 switch (TREE_CODE_CLASS (code
))
15182 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15183 strict_overflow_p
);
15185 case tcc_comparison
:
15186 return tree_binary_nonzero_warnv_p (code
, type
,
15187 TREE_OPERAND (t
, 0),
15188 TREE_OPERAND (t
, 1),
15189 strict_overflow_p
);
15191 case tcc_declaration
:
15192 case tcc_reference
:
15193 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15201 case TRUTH_NOT_EXPR
:
15202 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15203 strict_overflow_p
);
15205 case TRUTH_AND_EXPR
:
15206 case TRUTH_OR_EXPR
:
15207 case TRUTH_XOR_EXPR
:
15208 return tree_binary_nonzero_warnv_p (code
, type
,
15209 TREE_OPERAND (t
, 0),
15210 TREE_OPERAND (t
, 1),
15211 strict_overflow_p
);
15218 case WITH_SIZE_EXPR
:
15220 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15222 case COMPOUND_EXPR
:
15225 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15226 strict_overflow_p
);
15229 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
15230 strict_overflow_p
);
15233 return alloca_call_p (t
);
15241 /* Return true when T is an address and is known to be nonzero.
15242 Handle warnings about undefined signed overflow. */
15245 tree_expr_nonzero_p (tree t
)
15247 bool ret
, strict_overflow_p
;
15249 strict_overflow_p
= false;
15250 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
15251 if (strict_overflow_p
)
15252 fold_overflow_warning (("assuming signed overflow does not occur when "
15253 "determining that expression is always "
15255 WARN_STRICT_OVERFLOW_MISC
);
15259 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15260 attempt to fold the expression to a constant without modifying TYPE,
15263 If the expression could be simplified to a constant, then return
15264 the constant. If the expression would not be simplified to a
15265 constant, then return NULL_TREE. */
15268 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15270 tree tem
= fold_binary (code
, type
, op0
, op1
);
15271 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15274 /* Given the components of a unary expression CODE, TYPE and OP0,
15275 attempt to fold the expression to a constant without modifying
15278 If the expression could be simplified to a constant, then return
15279 the constant. If the expression would not be simplified to a
15280 constant, then return NULL_TREE. */
15283 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15285 tree tem
= fold_unary (code
, type
, op0
);
15286 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15289 /* If EXP represents referencing an element in a constant string
15290 (either via pointer arithmetic or array indexing), return the
15291 tree representing the value accessed, otherwise return NULL. */
15294 fold_read_from_constant_string (tree exp
)
15296 if ((TREE_CODE (exp
) == INDIRECT_REF
15297 || TREE_CODE (exp
) == ARRAY_REF
)
15298 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15300 tree exp1
= TREE_OPERAND (exp
, 0);
15303 location_t loc
= EXPR_LOCATION (exp
);
15305 if (TREE_CODE (exp
) == INDIRECT_REF
)
15306 string
= string_constant (exp1
, &index
);
15309 tree low_bound
= array_ref_low_bound (exp
);
15310 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15312 /* Optimize the special-case of a zero lower bound.
15314 We convert the low_bound to sizetype to avoid some problems
15315 with constant folding. (E.g. suppose the lower bound is 1,
15316 and its mode is QI. Without the conversion,l (ARRAY
15317 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15318 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15319 if (! integer_zerop (low_bound
))
15320 index
= size_diffop_loc (loc
, index
,
15321 fold_convert_loc (loc
, sizetype
, low_bound
));
15327 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15328 && TREE_CODE (string
) == STRING_CST
15329 && TREE_CODE (index
) == INTEGER_CST
15330 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15331 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15333 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15334 return build_int_cst_type (TREE_TYPE (exp
),
15335 (TREE_STRING_POINTER (string
)
15336 [TREE_INT_CST_LOW (index
)]));
15341 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15342 an integer constant, real, or fixed-point constant.
15344 TYPE is the type of the result. */
15347 fold_negate_const (tree arg0
, tree type
)
15349 tree t
= NULL_TREE
;
15351 switch (TREE_CODE (arg0
))
15355 double_int val
= tree_to_double_int (arg0
);
15356 int overflow
= neg_double (val
.low
, val
.high
, &val
.low
, &val
.high
);
15358 t
= force_fit_type_double (type
, val
, 1,
15359 (overflow
| TREE_OVERFLOW (arg0
))
15360 && !TYPE_UNSIGNED (type
));
15365 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15370 FIXED_VALUE_TYPE f
;
15371 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15372 &(TREE_FIXED_CST (arg0
)), NULL
,
15373 TYPE_SATURATING (type
));
15374 t
= build_fixed (type
, f
);
15375 /* Propagate overflow flags. */
15376 if (overflow_p
| TREE_OVERFLOW (arg0
))
15377 TREE_OVERFLOW (t
) = 1;
15382 gcc_unreachable ();
15388 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15389 an integer constant or real constant.
15391 TYPE is the type of the result. */
15394 fold_abs_const (tree arg0
, tree type
)
15396 tree t
= NULL_TREE
;
15398 switch (TREE_CODE (arg0
))
15402 double_int val
= tree_to_double_int (arg0
);
15404 /* If the value is unsigned or non-negative, then the absolute value
15405 is the same as the ordinary value. */
15406 if (TYPE_UNSIGNED (type
)
15407 || !double_int_negative_p (val
))
15410 /* If the value is negative, then the absolute value is
15416 overflow
= neg_double (val
.low
, val
.high
, &val
.low
, &val
.high
);
15417 t
= force_fit_type_double (type
, val
, -1,
15418 overflow
| TREE_OVERFLOW (arg0
));
15424 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
15425 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15431 gcc_unreachable ();
15437 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15438 constant. TYPE is the type of the result. */
15441 fold_not_const (const_tree arg0
, tree type
)
15445 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
15447 val
= double_int_not (tree_to_double_int (arg0
));
15448 return force_fit_type_double (type
, val
, 0, TREE_OVERFLOW (arg0
));
15451 /* Given CODE, a relational operator, the target type, TYPE and two
15452 constant operands OP0 and OP1, return the result of the
15453 relational operation. If the result is not a compile time
15454 constant, then return NULL_TREE. */
15457 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
15459 int result
, invert
;
15461 /* From here on, the only cases we handle are when the result is
15462 known to be a constant. */
15464 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
15466 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
15467 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
15469 /* Handle the cases where either operand is a NaN. */
15470 if (real_isnan (c0
) || real_isnan (c1
))
15480 case UNORDERED_EXPR
:
15494 if (flag_trapping_math
)
15500 gcc_unreachable ();
15503 return constant_boolean_node (result
, type
);
15506 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
15509 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
15511 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
15512 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
15513 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
15516 /* Handle equality/inequality of complex constants. */
15517 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
15519 tree rcond
= fold_relational_const (code
, type
,
15520 TREE_REALPART (op0
),
15521 TREE_REALPART (op1
));
15522 tree icond
= fold_relational_const (code
, type
,
15523 TREE_IMAGPART (op0
),
15524 TREE_IMAGPART (op1
));
15525 if (code
== EQ_EXPR
)
15526 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
15527 else if (code
== NE_EXPR
)
15528 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
15533 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15535 To compute GT, swap the arguments and do LT.
15536 To compute GE, do LT and invert the result.
15537 To compute LE, swap the arguments, do LT and invert the result.
15538 To compute NE, do EQ and invert the result.
15540 Therefore, the code below must handle only EQ and LT. */
15542 if (code
== LE_EXPR
|| code
== GT_EXPR
)
15547 code
= swap_tree_comparison (code
);
15550 /* Note that it is safe to invert for real values here because we
15551 have already handled the one case that it matters. */
15554 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15557 code
= invert_tree_comparison (code
, false);
15560 /* Compute a result for LT or EQ if args permit;
15561 Otherwise return T. */
15562 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15564 if (code
== EQ_EXPR
)
15565 result
= tree_int_cst_equal (op0
, op1
);
15566 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
15567 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
15569 result
= INT_CST_LT (op0
, op1
);
15576 return constant_boolean_node (result
, type
);
15579 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15580 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15584 fold_build_cleanup_point_expr (tree type
, tree expr
)
15586 /* If the expression does not have side effects then we don't have to wrap
15587 it with a cleanup point expression. */
15588 if (!TREE_SIDE_EFFECTS (expr
))
15591 /* If the expression is a return, check to see if the expression inside the
15592 return has no side effects or the right hand side of the modify expression
15593 inside the return. If either don't have side effects set we don't need to
15594 wrap the expression in a cleanup point expression. Note we don't check the
15595 left hand side of the modify because it should always be a return decl. */
15596 if (TREE_CODE (expr
) == RETURN_EXPR
)
15598 tree op
= TREE_OPERAND (expr
, 0);
15599 if (!op
|| !TREE_SIDE_EFFECTS (op
))
15601 op
= TREE_OPERAND (op
, 1);
15602 if (!TREE_SIDE_EFFECTS (op
))
15606 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
15609 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15610 of an indirection through OP0, or NULL_TREE if no simplification is
15614 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
15620 subtype
= TREE_TYPE (sub
);
15621 if (!POINTER_TYPE_P (subtype
))
15624 if (TREE_CODE (sub
) == ADDR_EXPR
)
15626 tree op
= TREE_OPERAND (sub
, 0);
15627 tree optype
= TREE_TYPE (op
);
15628 /* *&CONST_DECL -> to the value of the const decl. */
15629 if (TREE_CODE (op
) == CONST_DECL
)
15630 return DECL_INITIAL (op
);
15631 /* *&p => p; make sure to handle *&"str"[cst] here. */
15632 if (type
== optype
)
15634 tree fop
= fold_read_from_constant_string (op
);
15640 /* *(foo *)&fooarray => fooarray[0] */
15641 else if (TREE_CODE (optype
) == ARRAY_TYPE
15642 && type
== TREE_TYPE (optype
)
15643 && (!in_gimple_form
15644 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15646 tree type_domain
= TYPE_DOMAIN (optype
);
15647 tree min_val
= size_zero_node
;
15648 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15649 min_val
= TYPE_MIN_VALUE (type_domain
);
15651 && TREE_CODE (min_val
) != INTEGER_CST
)
15653 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
15654 NULL_TREE
, NULL_TREE
);
15656 /* *(foo *)&complexfoo => __real__ complexfoo */
15657 else if (TREE_CODE (optype
) == COMPLEX_TYPE
15658 && type
== TREE_TYPE (optype
))
15659 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
15660 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15661 else if (TREE_CODE (optype
) == VECTOR_TYPE
15662 && type
== TREE_TYPE (optype
))
15664 tree part_width
= TYPE_SIZE (type
);
15665 tree index
= bitsize_int (0);
15666 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
15670 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15671 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
15673 tree op00
= TREE_OPERAND (sub
, 0);
15674 tree op01
= TREE_OPERAND (sub
, 1);
15677 if (TREE_CODE (op00
) == ADDR_EXPR
)
15680 op00
= TREE_OPERAND (op00
, 0);
15681 op00type
= TREE_TYPE (op00
);
15683 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15684 if (TREE_CODE (op00type
) == VECTOR_TYPE
15685 && type
== TREE_TYPE (op00type
))
15687 HOST_WIDE_INT offset
= tree_low_cst (op01
, 0);
15688 tree part_width
= TYPE_SIZE (type
);
15689 unsigned HOST_WIDE_INT part_widthi
= tree_low_cst (part_width
, 0)/BITS_PER_UNIT
;
15690 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
15691 tree index
= bitsize_int (indexi
);
15693 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (op00type
))
15694 return fold_build3_loc (loc
,
15695 BIT_FIELD_REF
, type
, op00
,
15696 part_width
, index
);
15699 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15700 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
15701 && type
== TREE_TYPE (op00type
))
15703 tree size
= TYPE_SIZE_UNIT (type
);
15704 if (tree_int_cst_equal (size
, op01
))
15705 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
15707 /* ((foo *)&fooarray)[1] => fooarray[1] */
15708 else if (TREE_CODE (op00type
) == ARRAY_TYPE
15709 && type
== TREE_TYPE (op00type
))
15711 tree type_domain
= TYPE_DOMAIN (op00type
);
15712 tree min_val
= size_zero_node
;
15713 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15714 min_val
= TYPE_MIN_VALUE (type_domain
);
15715 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
15716 TYPE_SIZE_UNIT (type
));
15717 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
15718 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
15719 NULL_TREE
, NULL_TREE
);
15724 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15725 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
15726 && type
== TREE_TYPE (TREE_TYPE (subtype
))
15727 && (!in_gimple_form
15728 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15731 tree min_val
= size_zero_node
;
15732 sub
= build_fold_indirect_ref_loc (loc
, sub
);
15733 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
15734 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15735 min_val
= TYPE_MIN_VALUE (type_domain
);
15737 && TREE_CODE (min_val
) != INTEGER_CST
)
15739 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
15746 /* Builds an expression for an indirection through T, simplifying some
15750 build_fold_indirect_ref_loc (location_t loc
, tree t
)
15752 tree type
= TREE_TYPE (TREE_TYPE (t
));
15753 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
15758 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
15761 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15764 fold_indirect_ref_loc (location_t loc
, tree t
)
15766 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
15774 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15775 whose result is ignored. The type of the returned tree need not be
15776 the same as the original expression. */
15779 fold_ignored_result (tree t
)
15781 if (!TREE_SIDE_EFFECTS (t
))
15782 return integer_zero_node
;
15785 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
15788 t
= TREE_OPERAND (t
, 0);
15792 case tcc_comparison
:
15793 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15794 t
= TREE_OPERAND (t
, 0);
15795 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
15796 t
= TREE_OPERAND (t
, 1);
15801 case tcc_expression
:
15802 switch (TREE_CODE (t
))
15804 case COMPOUND_EXPR
:
15805 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15807 t
= TREE_OPERAND (t
, 0);
15811 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15812 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15814 t
= TREE_OPERAND (t
, 0);
15827 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15828 This can only be applied to objects of a sizetype. */
15831 round_up_loc (location_t loc
, tree value
, int divisor
)
15833 tree div
= NULL_TREE
;
15835 gcc_assert (divisor
> 0);
15839 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15840 have to do anything. Only do this when we are not given a const,
15841 because in that case, this check is more expensive than just
15843 if (TREE_CODE (value
) != INTEGER_CST
)
15845 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15847 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15851 /* If divisor is a power of two, simplify this to bit manipulation. */
15852 if (divisor
== (divisor
& -divisor
))
15854 if (TREE_CODE (value
) == INTEGER_CST
)
15856 double_int val
= tree_to_double_int (value
);
15859 if ((val
.low
& (divisor
- 1)) == 0)
15862 overflow_p
= TREE_OVERFLOW (value
);
15863 val
.low
&= ~(divisor
- 1);
15864 val
.low
+= divisor
;
15872 return force_fit_type_double (TREE_TYPE (value
), val
,
15879 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
15880 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
15881 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15882 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
15888 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15889 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
15890 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
15896 /* Likewise, but round down. */
15899 round_down_loc (location_t loc
, tree value
, int divisor
)
15901 tree div
= NULL_TREE
;
15903 gcc_assert (divisor
> 0);
15907 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15908 have to do anything. Only do this when we are not given a const,
15909 because in that case, this check is more expensive than just
15911 if (TREE_CODE (value
) != INTEGER_CST
)
15913 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15915 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15919 /* If divisor is a power of two, simplify this to bit manipulation. */
15920 if (divisor
== (divisor
& -divisor
))
15924 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15925 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
15930 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15931 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
15932 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
15938 /* Returns the pointer to the base of the object addressed by EXP and
15939 extracts the information about the offset of the access, storing it
15940 to PBITPOS and POFFSET. */
15943 split_address_to_core_and_offset (tree exp
,
15944 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
15947 enum machine_mode mode
;
15948 int unsignedp
, volatilep
;
15949 HOST_WIDE_INT bitsize
;
15950 location_t loc
= EXPR_LOCATION (exp
);
15952 if (TREE_CODE (exp
) == ADDR_EXPR
)
15954 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
15955 poffset
, &mode
, &unsignedp
, &volatilep
,
15957 core
= build_fold_addr_expr_loc (loc
, core
);
15963 *poffset
= NULL_TREE
;
15969 /* Returns true if addresses of E1 and E2 differ by a constant, false
15970 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15973 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
15976 HOST_WIDE_INT bitpos1
, bitpos2
;
15977 tree toffset1
, toffset2
, tdiff
, type
;
15979 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
15980 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
15982 if (bitpos1
% BITS_PER_UNIT
!= 0
15983 || bitpos2
% BITS_PER_UNIT
!= 0
15984 || !operand_equal_p (core1
, core2
, 0))
15987 if (toffset1
&& toffset2
)
15989 type
= TREE_TYPE (toffset1
);
15990 if (type
!= TREE_TYPE (toffset2
))
15991 toffset2
= fold_convert (type
, toffset2
);
15993 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
15994 if (!cst_and_fits_in_hwi (tdiff
))
15997 *diff
= int_cst_value (tdiff
);
15999 else if (toffset1
|| toffset2
)
16001 /* If only one of the offsets is non-constant, the difference cannot
16008 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16012 /* Simplify the floating point expression EXP when the sign of the
16013 result is not significant. Return NULL_TREE if no simplification
16017 fold_strip_sign_ops (tree exp
)
16020 location_t loc
= EXPR_LOCATION (exp
);
16022 switch (TREE_CODE (exp
))
16026 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16027 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16031 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16033 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16034 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16035 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16036 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16037 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16038 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16041 case COMPOUND_EXPR
:
16042 arg0
= TREE_OPERAND (exp
, 0);
16043 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16045 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16049 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16050 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16052 return fold_build3_loc (loc
,
16053 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16054 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16055 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16060 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16063 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16064 /* Strip copysign function call, return the 1st argument. */
16065 arg0
= CALL_EXPR_ARG (exp
, 0);
16066 arg1
= CALL_EXPR_ARG (exp
, 1);
16067 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16070 /* Strip sign ops from the argument of "odd" math functions. */
16071 if (negate_mathfn_p (fcode
))
16073 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16075 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);