1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer
= 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code
{
91 static bool negate_mathfn_p (enum built_in_function
);
92 static bool negate_expr_p (tree
);
93 static tree
negate_expr (tree
);
94 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
95 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
96 static tree
const_binop (enum tree_code
, tree
, tree
);
97 static enum comparison_code
comparison_to_compcode (enum tree_code
);
98 static enum tree_code
compcode_to_comparison (enum comparison_code
);
99 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
100 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
101 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
102 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
103 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
104 static tree
make_bit_field_ref (location_t
, tree
, tree
,
105 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
106 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
108 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
110 enum machine_mode
*, int *, int *,
112 static int all_ones_mask_p (const_tree
, int);
113 static tree
sign_bit_p (tree
, const_tree
);
114 static int simple_operand_p (const_tree
);
115 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
116 static tree
range_predecessor (tree
);
117 static tree
range_successor (tree
);
118 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
119 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
120 static tree
unextend (tree
, int, int, tree
);
121 static tree
fold_truthop (location_t
, enum tree_code
, tree
, tree
, tree
);
122 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
124 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
125 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
126 static tree
fold_binary_op_with_conditional_arg (location_t
,
127 enum tree_code
, tree
,
130 static tree
fold_mathfn_compare (location_t
,
131 enum built_in_function
, enum tree_code
,
133 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
134 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
135 static bool reorder_operands_p (const_tree
, const_tree
);
136 static tree
fold_negate_const (tree
, tree
);
137 static tree
fold_not_const (const_tree
, tree
);
138 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
139 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
145 expr_location_or (tree t
, location_t loc
)
147 location_t tloc
= EXPR_LOCATION (t
);
148 return tloc
!= UNKNOWN_LOCATION
? tloc
: loc
;
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
155 protected_set_expr_location_unshare (tree x
, location_t loc
)
157 if (CAN_HAVE_LOCATION_P (x
)
158 && EXPR_LOCATION (x
) != loc
159 && !(TREE_CODE (x
) == SAVE_EXPR
160 || TREE_CODE (x
) == TARGET_EXPR
161 || TREE_CODE (x
) == BIND_EXPR
))
164 SET_EXPR_LOCATION (x
, loc
);
170 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
171 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
172 and SUM1. Then this yields nonzero if overflow occurred during the
175 Overflow occurs if A and B have the same sign, but A and SUM differ in
176 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
178 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
180 /* If ARG2 divides ARG1 with zero remainder, carries out the division
181 of type CODE and returns the quotient.
182 Otherwise returns NULL_TREE. */
185 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
190 /* The sign of the division is according to operand two, that
191 does the correct thing for POINTER_PLUS_EXPR where we want
192 a signed division. */
193 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
194 if (TREE_CODE (TREE_TYPE (arg2
)) == INTEGER_TYPE
195 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2
)))
198 quo
= double_int_divmod (tree_to_double_int (arg1
),
199 tree_to_double_int (arg2
),
202 if (double_int_zero_p (rem
))
203 return build_int_cst_wide (TREE_TYPE (arg1
), quo
.low
, quo
.high
);
208 /* This is nonzero if we should defer warnings about undefined
209 overflow. This facility exists because these warnings are a
210 special case. The code to estimate loop iterations does not want
211 to issue any warnings, since it works with expressions which do not
212 occur in user code. Various bits of cleanup code call fold(), but
213 only use the result if it has certain characteristics (e.g., is a
214 constant); that code only wants to issue a warning if the result is
217 static int fold_deferring_overflow_warnings
;
219 /* If a warning about undefined overflow is deferred, this is the
220 warning. Note that this may cause us to turn two warnings into
221 one, but that is fine since it is sufficient to only give one
222 warning per expression. */
224 static const char* fold_deferred_overflow_warning
;
226 /* If a warning about undefined overflow is deferred, this is the
227 level at which the warning should be emitted. */
229 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
231 /* Start deferring overflow warnings. We could use a stack here to
232 permit nested calls, but at present it is not necessary. */
235 fold_defer_overflow_warnings (void)
237 ++fold_deferring_overflow_warnings
;
240 /* Stop deferring overflow warnings. If there is a pending warning,
241 and ISSUE is true, then issue the warning if appropriate. STMT is
242 the statement with which the warning should be associated (used for
243 location information); STMT may be NULL. CODE is the level of the
244 warning--a warn_strict_overflow_code value. This function will use
245 the smaller of CODE and the deferred code when deciding whether to
246 issue the warning. CODE may be zero to mean to always use the
250 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
255 gcc_assert (fold_deferring_overflow_warnings
> 0);
256 --fold_deferring_overflow_warnings
;
257 if (fold_deferring_overflow_warnings
> 0)
259 if (fold_deferred_overflow_warning
!= NULL
261 && code
< (int) fold_deferred_overflow_code
)
262 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
266 warnmsg
= fold_deferred_overflow_warning
;
267 fold_deferred_overflow_warning
= NULL
;
269 if (!issue
|| warnmsg
== NULL
)
272 if (gimple_no_warning_p (stmt
))
275 /* Use the smallest code level when deciding to issue the
277 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
278 code
= fold_deferred_overflow_code
;
280 if (!issue_strict_overflow_warning (code
))
284 locus
= input_location
;
286 locus
= gimple_location (stmt
);
287 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
290 /* Stop deferring overflow warnings, ignoring any deferred
294 fold_undefer_and_ignore_overflow_warnings (void)
296 fold_undefer_overflow_warnings (false, NULL
, 0);
299 /* Whether we are deferring overflow warnings. */
302 fold_deferring_overflow_warnings_p (void)
304 return fold_deferring_overflow_warnings
> 0;
307 /* This is called when we fold something based on the fact that signed
308 overflow is undefined. */
311 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
313 if (fold_deferring_overflow_warnings
> 0)
315 if (fold_deferred_overflow_warning
== NULL
316 || wc
< fold_deferred_overflow_code
)
318 fold_deferred_overflow_warning
= gmsgid
;
319 fold_deferred_overflow_code
= wc
;
322 else if (issue_strict_overflow_warning (wc
))
323 warning (OPT_Wstrict_overflow
, gmsgid
);
326 /* Return true if the built-in mathematical function specified by CODE
327 is odd, i.e. -f(x) == f(-x). */
330 negate_mathfn_p (enum built_in_function code
)
334 CASE_FLT_FN (BUILT_IN_ASIN
):
335 CASE_FLT_FN (BUILT_IN_ASINH
):
336 CASE_FLT_FN (BUILT_IN_ATAN
):
337 CASE_FLT_FN (BUILT_IN_ATANH
):
338 CASE_FLT_FN (BUILT_IN_CASIN
):
339 CASE_FLT_FN (BUILT_IN_CASINH
):
340 CASE_FLT_FN (BUILT_IN_CATAN
):
341 CASE_FLT_FN (BUILT_IN_CATANH
):
342 CASE_FLT_FN (BUILT_IN_CBRT
):
343 CASE_FLT_FN (BUILT_IN_CPROJ
):
344 CASE_FLT_FN (BUILT_IN_CSIN
):
345 CASE_FLT_FN (BUILT_IN_CSINH
):
346 CASE_FLT_FN (BUILT_IN_CTAN
):
347 CASE_FLT_FN (BUILT_IN_CTANH
):
348 CASE_FLT_FN (BUILT_IN_ERF
):
349 CASE_FLT_FN (BUILT_IN_LLROUND
):
350 CASE_FLT_FN (BUILT_IN_LROUND
):
351 CASE_FLT_FN (BUILT_IN_ROUND
):
352 CASE_FLT_FN (BUILT_IN_SIN
):
353 CASE_FLT_FN (BUILT_IN_SINH
):
354 CASE_FLT_FN (BUILT_IN_TAN
):
355 CASE_FLT_FN (BUILT_IN_TANH
):
356 CASE_FLT_FN (BUILT_IN_TRUNC
):
359 CASE_FLT_FN (BUILT_IN_LLRINT
):
360 CASE_FLT_FN (BUILT_IN_LRINT
):
361 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
362 CASE_FLT_FN (BUILT_IN_RINT
):
363 return !flag_rounding_math
;
371 /* Check whether we may negate an integer constant T without causing
375 may_negate_without_overflow_p (const_tree t
)
377 unsigned HOST_WIDE_INT val
;
381 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
383 type
= TREE_TYPE (t
);
384 if (TYPE_UNSIGNED (type
))
387 prec
= TYPE_PRECISION (type
);
388 if (prec
> HOST_BITS_PER_WIDE_INT
)
390 if (TREE_INT_CST_LOW (t
) != 0)
392 prec
-= HOST_BITS_PER_WIDE_INT
;
393 val
= TREE_INT_CST_HIGH (t
);
396 val
= TREE_INT_CST_LOW (t
);
397 if (prec
< HOST_BITS_PER_WIDE_INT
)
398 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
399 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
402 /* Determine whether an expression T can be cheaply negated using
403 the function negate_expr without introducing undefined overflow. */
406 negate_expr_p (tree t
)
413 type
= TREE_TYPE (t
);
416 switch (TREE_CODE (t
))
419 if (TYPE_OVERFLOW_WRAPS (type
))
422 /* Check that -CST will not overflow type. */
423 return may_negate_without_overflow_p (t
);
425 return (INTEGRAL_TYPE_P (type
)
426 && TYPE_OVERFLOW_WRAPS (type
));
433 /* We want to canonicalize to positive real constants. Pretend
434 that only negative ones can be easily negated. */
435 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
438 return negate_expr_p (TREE_REALPART (t
))
439 && negate_expr_p (TREE_IMAGPART (t
));
442 return negate_expr_p (TREE_OPERAND (t
, 0))
443 && negate_expr_p (TREE_OPERAND (t
, 1));
446 return negate_expr_p (TREE_OPERAND (t
, 0));
449 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
450 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
452 /* -(A + B) -> (-B) - A. */
453 if (negate_expr_p (TREE_OPERAND (t
, 1))
454 && reorder_operands_p (TREE_OPERAND (t
, 0),
455 TREE_OPERAND (t
, 1)))
457 /* -(A + B) -> (-A) - B. */
458 return negate_expr_p (TREE_OPERAND (t
, 0));
461 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
462 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
463 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
464 && reorder_operands_p (TREE_OPERAND (t
, 0),
465 TREE_OPERAND (t
, 1));
468 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
474 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
475 return negate_expr_p (TREE_OPERAND (t
, 1))
476 || negate_expr_p (TREE_OPERAND (t
, 0));
484 /* In general we can't negate A / B, because if A is INT_MIN and
485 B is 1, we may turn this into INT_MIN / -1 which is undefined
486 and actually traps on some architectures. But if overflow is
487 undefined, we can negate, because - (INT_MIN / 1) is an
489 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
490 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
492 return negate_expr_p (TREE_OPERAND (t
, 1))
493 || negate_expr_p (TREE_OPERAND (t
, 0));
496 /* Negate -((double)float) as (double)(-float). */
497 if (TREE_CODE (type
) == REAL_TYPE
)
499 tree tem
= strip_float_extensions (t
);
501 return negate_expr_p (tem
);
506 /* Negate -f(x) as f(-x). */
507 if (negate_mathfn_p (builtin_mathfn_code (t
)))
508 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
512 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
513 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
515 tree op1
= TREE_OPERAND (t
, 1);
516 if (TREE_INT_CST_HIGH (op1
) == 0
517 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
518 == TREE_INT_CST_LOW (op1
))
529 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
530 simplification is possible.
531 If negate_expr_p would return true for T, NULL_TREE will never be
535 fold_negate_expr (location_t loc
, tree t
)
537 tree type
= TREE_TYPE (t
);
540 switch (TREE_CODE (t
))
542 /* Convert - (~A) to A + 1. */
544 if (INTEGRAL_TYPE_P (type
))
545 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
546 build_int_cst (type
, 1));
550 tem
= fold_negate_const (t
, type
);
551 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
552 || !TYPE_OVERFLOW_TRAPS (type
))
557 tem
= fold_negate_const (t
, type
);
558 /* Two's complement FP formats, such as c4x, may overflow. */
559 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
564 tem
= fold_negate_const (t
, type
);
569 tree rpart
= negate_expr (TREE_REALPART (t
));
570 tree ipart
= negate_expr (TREE_IMAGPART (t
));
572 if ((TREE_CODE (rpart
) == REAL_CST
573 && TREE_CODE (ipart
) == REAL_CST
)
574 || (TREE_CODE (rpart
) == INTEGER_CST
575 && TREE_CODE (ipart
) == INTEGER_CST
))
576 return build_complex (type
, rpart
, ipart
);
581 if (negate_expr_p (t
))
582 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
583 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
584 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
588 if (negate_expr_p (t
))
589 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
590 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
594 return TREE_OPERAND (t
, 0);
597 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
600 /* -(A + B) -> (-B) - A. */
601 if (negate_expr_p (TREE_OPERAND (t
, 1))
602 && reorder_operands_p (TREE_OPERAND (t
, 0),
603 TREE_OPERAND (t
, 1)))
605 tem
= negate_expr (TREE_OPERAND (t
, 1));
606 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
607 tem
, TREE_OPERAND (t
, 0));
610 /* -(A + B) -> (-A) - B. */
611 if (negate_expr_p (TREE_OPERAND (t
, 0)))
613 tem
= negate_expr (TREE_OPERAND (t
, 0));
614 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
615 tem
, TREE_OPERAND (t
, 1));
621 /* - (A - B) -> B - A */
622 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
623 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
624 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
625 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
626 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
630 if (TYPE_UNSIGNED (type
))
636 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
638 tem
= TREE_OPERAND (t
, 1);
639 if (negate_expr_p (tem
))
640 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
641 TREE_OPERAND (t
, 0), negate_expr (tem
));
642 tem
= TREE_OPERAND (t
, 0);
643 if (negate_expr_p (tem
))
644 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
645 negate_expr (tem
), TREE_OPERAND (t
, 1));
654 /* In general we can't negate A / B, because if A is INT_MIN and
655 B is 1, we may turn this into INT_MIN / -1 which is undefined
656 and actually traps on some architectures. But if overflow is
657 undefined, we can negate, because - (INT_MIN / 1) is an
659 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
661 const char * const warnmsg
= G_("assuming signed overflow does not "
662 "occur when negating a division");
663 tem
= TREE_OPERAND (t
, 1);
664 if (negate_expr_p (tem
))
666 if (INTEGRAL_TYPE_P (type
)
667 && (TREE_CODE (tem
) != INTEGER_CST
668 || integer_onep (tem
)))
669 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
670 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
671 TREE_OPERAND (t
, 0), negate_expr (tem
));
673 tem
= TREE_OPERAND (t
, 0);
674 if (negate_expr_p (tem
))
676 if (INTEGRAL_TYPE_P (type
)
677 && (TREE_CODE (tem
) != INTEGER_CST
678 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
679 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
680 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
681 negate_expr (tem
), TREE_OPERAND (t
, 1));
687 /* Convert -((double)float) into (double)(-float). */
688 if (TREE_CODE (type
) == REAL_TYPE
)
690 tem
= strip_float_extensions (t
);
691 if (tem
!= t
&& negate_expr_p (tem
))
692 return fold_convert_loc (loc
, type
, negate_expr (tem
));
697 /* Negate -f(x) as f(-x). */
698 if (negate_mathfn_p (builtin_mathfn_code (t
))
699 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
703 fndecl
= get_callee_fndecl (t
);
704 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
705 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
710 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
711 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
713 tree op1
= TREE_OPERAND (t
, 1);
714 if (TREE_INT_CST_HIGH (op1
) == 0
715 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
716 == TREE_INT_CST_LOW (op1
))
718 tree ntype
= TYPE_UNSIGNED (type
)
719 ? signed_type_for (type
)
720 : unsigned_type_for (type
);
721 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
722 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
723 return fold_convert_loc (loc
, type
, temp
);
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
748 loc
= EXPR_LOCATION (t
);
749 type
= TREE_TYPE (t
);
752 tem
= fold_negate_expr (loc
, t
);
754 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
755 return fold_convert_loc (loc
, type
, tem
);
758 /* Split a tree IN into a constant, literal and variable parts that could be
759 combined with CODE to make IN. "constant" means an expression with
760 TREE_CONSTANT but that isn't an actual constant. CODE must be a
761 commutative arithmetic operation. Store the constant part into *CONP,
762 the literal in *LITP and return the variable part. If a part isn't
763 present, set it to null. If the tree does not decompose in this way,
764 return the entire tree as the variable part and the other parts as null.
766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
767 case, we negate an operand that was subtracted. Except if it is a
768 literal for which we use *MINUS_LITP instead.
770 If NEGATE_P is true, we are negating all of IN, again except a literal
771 for which we use *MINUS_LITP instead.
773 If IN is itself a literal or constant, return it as appropriate.
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
779 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
780 tree
*minus_litp
, int negate_p
)
788 /* Strip any conversions that don't change the machine mode or signedness. */
789 STRIP_SIGN_NOPS (in
);
791 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
792 || TREE_CODE (in
) == FIXED_CST
)
794 else if (TREE_CODE (in
) == code
795 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
796 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
797 /* We can associate addition and subtraction together (even
798 though the C standard doesn't say so) for integers because
799 the value is not affected. For reals, the value might be
800 affected, so we can't. */
801 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
802 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
804 tree op0
= TREE_OPERAND (in
, 0);
805 tree op1
= TREE_OPERAND (in
, 1);
806 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
807 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
809 /* First see if either of the operands is a literal, then a constant. */
810 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
811 || TREE_CODE (op0
) == FIXED_CST
)
812 *litp
= op0
, op0
= 0;
813 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
814 || TREE_CODE (op1
) == FIXED_CST
)
815 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
817 if (op0
!= 0 && TREE_CONSTANT (op0
))
818 *conp
= op0
, op0
= 0;
819 else if (op1
!= 0 && TREE_CONSTANT (op1
))
820 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
822 /* If we haven't dealt with either operand, this is not a case we can
823 decompose. Otherwise, VAR is either of the ones remaining, if any. */
824 if (op0
!= 0 && op1
!= 0)
829 var
= op1
, neg_var_p
= neg1_p
;
831 /* Now do any needed negations. */
833 *minus_litp
= *litp
, *litp
= 0;
835 *conp
= negate_expr (*conp
);
837 var
= negate_expr (var
);
839 else if (TREE_CONSTANT (in
))
847 *minus_litp
= *litp
, *litp
= 0;
848 else if (*minus_litp
)
849 *litp
= *minus_litp
, *minus_litp
= 0;
850 *conp
= negate_expr (*conp
);
851 var
= negate_expr (var
);
857 /* Re-associate trees split by the above function. T1 and T2 are
858 either expressions to associate or null. Return the new
859 expression, if any. LOC is the location of the new expression. If
860 we build an operation, do it in TYPE and with CODE. */
863 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
870 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
871 try to fold this since we will have infinite recursion. But do
872 deal with any NEGATE_EXPRs. */
873 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
874 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
876 if (code
== PLUS_EXPR
)
878 if (TREE_CODE (t1
) == NEGATE_EXPR
)
879 return build2_loc (loc
, MINUS_EXPR
, type
,
880 fold_convert_loc (loc
, type
, t2
),
881 fold_convert_loc (loc
, type
,
882 TREE_OPERAND (t1
, 0)));
883 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
884 return build2_loc (loc
, MINUS_EXPR
, type
,
885 fold_convert_loc (loc
, type
, t1
),
886 fold_convert_loc (loc
, type
,
887 TREE_OPERAND (t2
, 0)));
888 else if (integer_zerop (t2
))
889 return fold_convert_loc (loc
, type
, t1
);
891 else if (code
== MINUS_EXPR
)
893 if (integer_zerop (t2
))
894 return fold_convert_loc (loc
, type
, t1
);
897 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
898 fold_convert_loc (loc
, type
, t2
));
901 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
902 fold_convert_loc (loc
, type
, t2
));
905 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
906 for use in int_const_binop, size_binop and size_diffop. */
909 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
911 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
913 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
928 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
929 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
930 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
934 /* Combine two integer constants ARG1 and ARG2 under operation CODE
935 to produce a new constant. Return NULL_TREE if we don't know how
936 to evaluate CODE at compile-time. */
939 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
941 double_int op1
, op2
, res
, tmp
;
943 tree type
= TREE_TYPE (arg1
);
944 bool uns
= TYPE_UNSIGNED (type
);
946 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
947 bool overflow
= false;
949 op1
= tree_to_double_int (arg1
);
950 op2
= tree_to_double_int (arg2
);
955 res
= double_int_ior (op1
, op2
);
959 res
= double_int_xor (op1
, op2
);
963 res
= double_int_and (op1
, op2
);
967 res
= double_int_rshift (op1
, double_int_to_shwi (op2
),
968 TYPE_PRECISION (type
), !uns
);
972 /* It's unclear from the C standard whether shifts can overflow.
973 The following code ignores overflow; perhaps a C standard
974 interpretation ruling is needed. */
975 res
= double_int_lshift (op1
, double_int_to_shwi (op2
),
976 TYPE_PRECISION (type
), !uns
);
980 res
= double_int_rrotate (op1
, double_int_to_shwi (op2
),
981 TYPE_PRECISION (type
));
985 res
= double_int_lrotate (op1
, double_int_to_shwi (op2
),
986 TYPE_PRECISION (type
));
990 overflow
= add_double (op1
.low
, op1
.high
, op2
.low
, op2
.high
,
991 &res
.low
, &res
.high
);
995 neg_double (op2
.low
, op2
.high
, &res
.low
, &res
.high
);
996 add_double (op1
.low
, op1
.high
, res
.low
, res
.high
,
997 &res
.low
, &res
.high
);
998 overflow
= OVERFLOW_SUM_SIGN (res
.high
, op2
.high
, op1
.high
);
1002 overflow
= mul_double (op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1003 &res
.low
, &res
.high
);
1006 case TRUNC_DIV_EXPR
:
1007 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1008 case EXACT_DIV_EXPR
:
1009 /* This is a shortcut for a common special case. */
1010 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1011 && !TREE_OVERFLOW (arg1
)
1012 && !TREE_OVERFLOW (arg2
)
1013 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1015 if (code
== CEIL_DIV_EXPR
)
1016 op1
.low
+= op2
.low
- 1;
1018 res
.low
= op1
.low
/ op2
.low
, res
.high
= 0;
1022 /* ... fall through ... */
1024 case ROUND_DIV_EXPR
:
1025 if (double_int_zero_p (op2
))
1027 if (double_int_one_p (op2
))
1032 if (double_int_equal_p (op1
, op2
)
1033 && ! double_int_zero_p (op1
))
1035 res
= double_int_one
;
1038 overflow
= div_and_round_double (code
, uns
,
1039 op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1040 &res
.low
, &res
.high
,
1041 &tmp
.low
, &tmp
.high
);
1044 case TRUNC_MOD_EXPR
:
1045 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1046 /* This is a shortcut for a common special case. */
1047 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1048 && !TREE_OVERFLOW (arg1
)
1049 && !TREE_OVERFLOW (arg2
)
1050 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1052 if (code
== CEIL_MOD_EXPR
)
1053 op1
.low
+= op2
.low
- 1;
1054 res
.low
= op1
.low
% op2
.low
, res
.high
= 0;
1058 /* ... fall through ... */
1060 case ROUND_MOD_EXPR
:
1061 if (double_int_zero_p (op2
))
1063 overflow
= div_and_round_double (code
, uns
,
1064 op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1065 &tmp
.low
, &tmp
.high
,
1066 &res
.low
, &res
.high
);
1070 res
= double_int_min (op1
, op2
, uns
);
1074 res
= double_int_max (op1
, op2
, uns
);
1081 t
= force_fit_type_double (TREE_TYPE (arg1
), res
, 1,
1082 ((!uns
|| is_sizetype
) && overflow
)
1083 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1088 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1089 constant. We assume ARG1 and ARG2 have the same data type, or at least
1090 are the same kind of constant and the same machine mode. Return zero if
1091 combining the constants is not allowed in the current operating mode. */
1094 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1096 /* Sanity check for the recursive cases. */
1103 if (TREE_CODE (arg1
) == INTEGER_CST
)
1104 return int_const_binop (code
, arg1
, arg2
);
1106 if (TREE_CODE (arg1
) == REAL_CST
)
1108 enum machine_mode mode
;
1111 REAL_VALUE_TYPE value
;
1112 REAL_VALUE_TYPE result
;
1116 /* The following codes are handled by real_arithmetic. */
1131 d1
= TREE_REAL_CST (arg1
);
1132 d2
= TREE_REAL_CST (arg2
);
1134 type
= TREE_TYPE (arg1
);
1135 mode
= TYPE_MODE (type
);
1137 /* Don't perform operation if we honor signaling NaNs and
1138 either operand is a NaN. */
1139 if (HONOR_SNANS (mode
)
1140 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1143 /* Don't perform operation if it would raise a division
1144 by zero exception. */
1145 if (code
== RDIV_EXPR
1146 && REAL_VALUES_EQUAL (d2
, dconst0
)
1147 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1150 /* If either operand is a NaN, just return it. Otherwise, set up
1151 for floating-point trap; we return an overflow. */
1152 if (REAL_VALUE_ISNAN (d1
))
1154 else if (REAL_VALUE_ISNAN (d2
))
1157 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1158 real_convert (&result
, mode
, &value
);
1160 /* Don't constant fold this floating point operation if
1161 the result has overflowed and flag_trapping_math. */
1162 if (flag_trapping_math
1163 && MODE_HAS_INFINITIES (mode
)
1164 && REAL_VALUE_ISINF (result
)
1165 && !REAL_VALUE_ISINF (d1
)
1166 && !REAL_VALUE_ISINF (d2
))
1169 /* Don't constant fold this floating point operation if the
1170 result may dependent upon the run-time rounding mode and
1171 flag_rounding_math is set, or if GCC's software emulation
1172 is unable to accurately represent the result. */
1173 if ((flag_rounding_math
1174 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1175 && (inexact
|| !real_identical (&result
, &value
)))
1178 t
= build_real (type
, result
);
1180 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1184 if (TREE_CODE (arg1
) == FIXED_CST
)
1186 FIXED_VALUE_TYPE f1
;
1187 FIXED_VALUE_TYPE f2
;
1188 FIXED_VALUE_TYPE result
;
1193 /* The following codes are handled by fixed_arithmetic. */
1199 case TRUNC_DIV_EXPR
:
1200 f2
= TREE_FIXED_CST (arg2
);
1205 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1206 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1214 f1
= TREE_FIXED_CST (arg1
);
1215 type
= TREE_TYPE (arg1
);
1216 sat_p
= TYPE_SATURATING (type
);
1217 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1218 t
= build_fixed (type
, result
);
1219 /* Propagate overflow flags. */
1220 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1221 TREE_OVERFLOW (t
) = 1;
1225 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1227 tree type
= TREE_TYPE (arg1
);
1228 tree r1
= TREE_REALPART (arg1
);
1229 tree i1
= TREE_IMAGPART (arg1
);
1230 tree r2
= TREE_REALPART (arg2
);
1231 tree i2
= TREE_IMAGPART (arg2
);
1238 real
= const_binop (code
, r1
, r2
);
1239 imag
= const_binop (code
, i1
, i2
);
1243 if (COMPLEX_FLOAT_TYPE_P (type
))
1244 return do_mpc_arg2 (arg1
, arg2
, type
,
1245 /* do_nonfinite= */ folding_initializer
,
1248 real
= const_binop (MINUS_EXPR
,
1249 const_binop (MULT_EXPR
, r1
, r2
),
1250 const_binop (MULT_EXPR
, i1
, i2
));
1251 imag
= const_binop (PLUS_EXPR
,
1252 const_binop (MULT_EXPR
, r1
, i2
),
1253 const_binop (MULT_EXPR
, i1
, r2
));
1257 if (COMPLEX_FLOAT_TYPE_P (type
))
1258 return do_mpc_arg2 (arg1
, arg2
, type
,
1259 /* do_nonfinite= */ folding_initializer
,
1262 case TRUNC_DIV_EXPR
:
1264 case FLOOR_DIV_EXPR
:
1265 case ROUND_DIV_EXPR
:
1266 if (flag_complex_method
== 0)
1268 /* Keep this algorithm in sync with
1269 tree-complex.c:expand_complex_div_straight().
1271 Expand complex division to scalars, straightforward algorithm.
1272 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1276 = const_binop (PLUS_EXPR
,
1277 const_binop (MULT_EXPR
, r2
, r2
),
1278 const_binop (MULT_EXPR
, i2
, i2
));
1280 = const_binop (PLUS_EXPR
,
1281 const_binop (MULT_EXPR
, r1
, r2
),
1282 const_binop (MULT_EXPR
, i1
, i2
));
1284 = const_binop (MINUS_EXPR
,
1285 const_binop (MULT_EXPR
, i1
, r2
),
1286 const_binop (MULT_EXPR
, r1
, i2
));
1288 real
= const_binop (code
, t1
, magsquared
);
1289 imag
= const_binop (code
, t2
, magsquared
);
1293 /* Keep this algorithm in sync with
1294 tree-complex.c:expand_complex_div_wide().
1296 Expand complex division to scalars, modified algorithm to minimize
1297 overflow with wide input ranges. */
1298 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1299 fold_abs_const (r2
, TREE_TYPE (type
)),
1300 fold_abs_const (i2
, TREE_TYPE (type
)));
1302 if (integer_nonzerop (compare
))
1304 /* In the TRUE branch, we compute
1306 div = (br * ratio) + bi;
1307 tr = (ar * ratio) + ai;
1308 ti = (ai * ratio) - ar;
1311 tree ratio
= const_binop (code
, r2
, i2
);
1312 tree div
= const_binop (PLUS_EXPR
, i2
,
1313 const_binop (MULT_EXPR
, r2
, ratio
));
1314 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1315 real
= const_binop (PLUS_EXPR
, real
, i1
);
1316 real
= const_binop (code
, real
, div
);
1318 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1319 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1320 imag
= const_binop (code
, imag
, div
);
1324 /* In the FALSE branch, we compute
1326 divisor = (d * ratio) + c;
1327 tr = (b * ratio) + a;
1328 ti = b - (a * ratio);
1331 tree ratio
= const_binop (code
, i2
, r2
);
1332 tree div
= const_binop (PLUS_EXPR
, r2
,
1333 const_binop (MULT_EXPR
, i2
, ratio
));
1335 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1336 real
= const_binop (PLUS_EXPR
, real
, r1
);
1337 real
= const_binop (code
, real
, div
);
1339 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1340 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1341 imag
= const_binop (code
, imag
, div
);
1351 return build_complex (type
, real
, imag
);
1354 if (TREE_CODE (arg1
) == VECTOR_CST
)
1356 tree type
= TREE_TYPE(arg1
);
1357 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1358 tree elements1
, elements2
, list
= NULL_TREE
;
1360 if(TREE_CODE(arg2
) != VECTOR_CST
)
1363 elements1
= TREE_VECTOR_CST_ELTS (arg1
);
1364 elements2
= TREE_VECTOR_CST_ELTS (arg2
);
1366 for (i
= 0; i
< count
; i
++)
1368 tree elem1
, elem2
, elem
;
1370 /* The trailing elements can be empty and should be treated as 0 */
1372 elem1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1375 elem1
= TREE_VALUE(elements1
);
1376 elements1
= TREE_CHAIN (elements1
);
1380 elem2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1383 elem2
= TREE_VALUE(elements2
);
1384 elements2
= TREE_CHAIN (elements2
);
1387 elem
= const_binop (code
, elem1
, elem2
);
1389 /* It is possible that const_binop cannot handle the given
1390 code and return NULL_TREE */
1391 if(elem
== NULL_TREE
)
1394 list
= tree_cons (NULL_TREE
, elem
, list
);
1396 return build_vector(type
, nreverse(list
));
1401 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1402 indicates which particular sizetype to create. */
1405 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1407 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1410 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1411 is a tree code. The type of the result is taken from the operands.
1412 Both must be equivalent integer types, ala int_binop_types_match_p.
1413 If the operands are constant, so is the result. */
1416 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1418 tree type
= TREE_TYPE (arg0
);
1420 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1421 return error_mark_node
;
1423 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1426 /* Handle the special case of two integer constants faster. */
1427 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1429 /* And some specific cases even faster than that. */
1430 if (code
== PLUS_EXPR
)
1432 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1434 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1437 else if (code
== MINUS_EXPR
)
1439 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1442 else if (code
== MULT_EXPR
)
1444 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1448 /* Handle general case of two integer constants. */
1449 return int_const_binop (code
, arg0
, arg1
);
1452 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1455 /* Given two values, either both of sizetype or both of bitsizetype,
1456 compute the difference between the two values. Return the value
1457 in signed type corresponding to the type of the operands. */
1460 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1462 tree type
= TREE_TYPE (arg0
);
1465 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1468 /* If the type is already signed, just do the simple thing. */
1469 if (!TYPE_UNSIGNED (type
))
1470 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1472 if (type
== sizetype
)
1474 else if (type
== bitsizetype
)
1475 ctype
= sbitsizetype
;
1477 ctype
= signed_type_for (type
);
1479 /* If either operand is not a constant, do the conversions to the signed
1480 type and subtract. The hardware will do the right thing with any
1481 overflow in the subtraction. */
1482 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1483 return size_binop_loc (loc
, MINUS_EXPR
,
1484 fold_convert_loc (loc
, ctype
, arg0
),
1485 fold_convert_loc (loc
, ctype
, arg1
));
1487 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1488 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1489 overflow) and negate (which can't either). Special-case a result
1490 of zero while we're here. */
1491 if (tree_int_cst_equal (arg0
, arg1
))
1492 return build_int_cst (ctype
, 0);
1493 else if (tree_int_cst_lt (arg1
, arg0
))
1494 return fold_convert_loc (loc
, ctype
,
1495 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1497 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1498 fold_convert_loc (loc
, ctype
,
1499 size_binop_loc (loc
,
1504 /* A subroutine of fold_convert_const handling conversions of an
1505 INTEGER_CST to another integer type. */
1508 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1512 /* Given an integer constant, make new constant with new type,
1513 appropriately sign-extended or truncated. */
1514 t
= force_fit_type_double (type
, tree_to_double_int (arg1
),
1515 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1516 (TREE_INT_CST_HIGH (arg1
) < 0
1517 && (TYPE_UNSIGNED (type
)
1518 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1519 | TREE_OVERFLOW (arg1
));
1524 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1525 to an integer type. */
1528 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1533 /* The following code implements the floating point to integer
1534 conversion rules required by the Java Language Specification,
1535 that IEEE NaNs are mapped to zero and values that overflow
1536 the target precision saturate, i.e. values greater than
1537 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1538 are mapped to INT_MIN. These semantics are allowed by the
1539 C and C++ standards that simply state that the behavior of
1540 FP-to-integer conversion is unspecified upon overflow. */
1544 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1548 case FIX_TRUNC_EXPR
:
1549 real_trunc (&r
, VOIDmode
, &x
);
1556 /* If R is NaN, return zero and show we have an overflow. */
1557 if (REAL_VALUE_ISNAN (r
))
1560 val
= double_int_zero
;
1563 /* See if R is less than the lower bound or greater than the
1568 tree lt
= TYPE_MIN_VALUE (type
);
1569 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1570 if (REAL_VALUES_LESS (r
, l
))
1573 val
= tree_to_double_int (lt
);
1579 tree ut
= TYPE_MAX_VALUE (type
);
1582 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1583 if (REAL_VALUES_LESS (u
, r
))
1586 val
= tree_to_double_int (ut
);
1592 real_to_integer2 ((HOST_WIDE_INT
*) &val
.low
, &val
.high
, &r
);
1594 t
= force_fit_type_double (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1598 /* A subroutine of fold_convert_const handling conversions of a
1599 FIXED_CST to an integer type. */
1602 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1605 double_int temp
, temp_trunc
;
1608 /* Right shift FIXED_CST to temp by fbit. */
1609 temp
= TREE_FIXED_CST (arg1
).data
;
1610 mode
= TREE_FIXED_CST (arg1
).mode
;
1611 if (GET_MODE_FBIT (mode
) < 2 * HOST_BITS_PER_WIDE_INT
)
1613 temp
= double_int_rshift (temp
, GET_MODE_FBIT (mode
),
1614 HOST_BITS_PER_DOUBLE_INT
,
1615 SIGNED_FIXED_POINT_MODE_P (mode
));
1617 /* Left shift temp to temp_trunc by fbit. */
1618 temp_trunc
= double_int_lshift (temp
, GET_MODE_FBIT (mode
),
1619 HOST_BITS_PER_DOUBLE_INT
,
1620 SIGNED_FIXED_POINT_MODE_P (mode
));
1624 temp
= double_int_zero
;
1625 temp_trunc
= double_int_zero
;
1628 /* If FIXED_CST is negative, we need to round the value toward 0.
1629 By checking if the fractional bits are not zero to add 1 to temp. */
1630 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1631 && double_int_negative_p (temp_trunc
)
1632 && !double_int_equal_p (TREE_FIXED_CST (arg1
).data
, temp_trunc
))
1633 temp
= double_int_add (temp
, double_int_one
);
1635 /* Given a fixed-point constant, make new constant with new type,
1636 appropriately sign-extended or truncated. */
1637 t
= force_fit_type_double (type
, temp
, -1,
1638 (double_int_negative_p (temp
)
1639 && (TYPE_UNSIGNED (type
)
1640 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1641 | TREE_OVERFLOW (arg1
));
1646 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1647 to another floating point type. */
1650 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1652 REAL_VALUE_TYPE value
;
1655 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1656 t
= build_real (type
, value
);
1658 /* If converting an infinity or NAN to a representation that doesn't
1659 have one, set the overflow bit so that we can produce some kind of
1660 error message at the appropriate point if necessary. It's not the
1661 most user-friendly message, but it's better than nothing. */
1662 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1663 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1664 TREE_OVERFLOW (t
) = 1;
1665 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1666 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1667 TREE_OVERFLOW (t
) = 1;
1668 /* Regular overflow, conversion produced an infinity in a mode that
1669 can't represent them. */
1670 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1671 && REAL_VALUE_ISINF (value
)
1672 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1673 TREE_OVERFLOW (t
) = 1;
1675 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1679 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1680 to a floating point type. */
1683 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1685 REAL_VALUE_TYPE value
;
1688 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1689 t
= build_real (type
, value
);
1691 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1695 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1696 to another fixed-point type. */
1699 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1701 FIXED_VALUE_TYPE value
;
1705 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1706 TYPE_SATURATING (type
));
1707 t
= build_fixed (type
, value
);
1709 /* Propagate overflow flags. */
1710 if (overflow_p
| TREE_OVERFLOW (arg1
))
1711 TREE_OVERFLOW (t
) = 1;
1715 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1716 to a fixed-point type. */
1719 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1721 FIXED_VALUE_TYPE value
;
1725 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
1726 TREE_INT_CST (arg1
),
1727 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1728 TYPE_SATURATING (type
));
1729 t
= build_fixed (type
, value
);
1731 /* Propagate overflow flags. */
1732 if (overflow_p
| TREE_OVERFLOW (arg1
))
1733 TREE_OVERFLOW (t
) = 1;
1737 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1738 to a fixed-point type. */
1741 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1743 FIXED_VALUE_TYPE value
;
1747 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1748 &TREE_REAL_CST (arg1
),
1749 TYPE_SATURATING (type
));
1750 t
= build_fixed (type
, value
);
1752 /* Propagate overflow flags. */
1753 if (overflow_p
| TREE_OVERFLOW (arg1
))
1754 TREE_OVERFLOW (t
) = 1;
1758 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1759 type TYPE. If no simplification can be done return NULL_TREE. */
1762 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1764 if (TREE_TYPE (arg1
) == type
)
1767 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1768 || TREE_CODE (type
) == OFFSET_TYPE
)
1770 if (TREE_CODE (arg1
) == INTEGER_CST
)
1771 return fold_convert_const_int_from_int (type
, arg1
);
1772 else if (TREE_CODE (arg1
) == REAL_CST
)
1773 return fold_convert_const_int_from_real (code
, type
, arg1
);
1774 else if (TREE_CODE (arg1
) == FIXED_CST
)
1775 return fold_convert_const_int_from_fixed (type
, arg1
);
1777 else if (TREE_CODE (type
) == REAL_TYPE
)
1779 if (TREE_CODE (arg1
) == INTEGER_CST
)
1780 return build_real_from_int_cst (type
, arg1
);
1781 else if (TREE_CODE (arg1
) == REAL_CST
)
1782 return fold_convert_const_real_from_real (type
, arg1
);
1783 else if (TREE_CODE (arg1
) == FIXED_CST
)
1784 return fold_convert_const_real_from_fixed (type
, arg1
);
1786 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1788 if (TREE_CODE (arg1
) == FIXED_CST
)
1789 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1790 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1791 return fold_convert_const_fixed_from_int (type
, arg1
);
1792 else if (TREE_CODE (arg1
) == REAL_CST
)
1793 return fold_convert_const_fixed_from_real (type
, arg1
);
1798 /* Construct a vector of zero elements of vector type TYPE. */
1801 build_zero_vector (tree type
)
1805 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1806 return build_vector_from_val (type
, t
);
1809 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1812 fold_convertible_p (const_tree type
, const_tree arg
)
1814 tree orig
= TREE_TYPE (arg
);
1819 if (TREE_CODE (arg
) == ERROR_MARK
1820 || TREE_CODE (type
) == ERROR_MARK
1821 || TREE_CODE (orig
) == ERROR_MARK
)
1824 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1827 switch (TREE_CODE (type
))
1829 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1830 case POINTER_TYPE
: case REFERENCE_TYPE
:
1832 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1833 || TREE_CODE (orig
) == OFFSET_TYPE
)
1835 return (TREE_CODE (orig
) == VECTOR_TYPE
1836 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1839 case FIXED_POINT_TYPE
:
1843 return TREE_CODE (type
) == TREE_CODE (orig
);
1850 /* Convert expression ARG to type TYPE. Used by the middle-end for
1851 simple conversions in preference to calling the front-end's convert. */
1854 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1856 tree orig
= TREE_TYPE (arg
);
1862 if (TREE_CODE (arg
) == ERROR_MARK
1863 || TREE_CODE (type
) == ERROR_MARK
1864 || TREE_CODE (orig
) == ERROR_MARK
)
1865 return error_mark_node
;
1867 switch (TREE_CODE (type
))
1870 case REFERENCE_TYPE
:
1871 /* Handle conversions between pointers to different address spaces. */
1872 if (POINTER_TYPE_P (orig
)
1873 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1874 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1875 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1878 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1880 if (TREE_CODE (arg
) == INTEGER_CST
)
1882 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1883 if (tem
!= NULL_TREE
)
1886 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1887 || TREE_CODE (orig
) == OFFSET_TYPE
)
1888 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1889 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1890 return fold_convert_loc (loc
, type
,
1891 fold_build1_loc (loc
, REALPART_EXPR
,
1892 TREE_TYPE (orig
), arg
));
1893 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1894 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1895 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1898 if (TREE_CODE (arg
) == INTEGER_CST
)
1900 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1901 if (tem
!= NULL_TREE
)
1904 else if (TREE_CODE (arg
) == REAL_CST
)
1906 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1907 if (tem
!= NULL_TREE
)
1910 else if (TREE_CODE (arg
) == FIXED_CST
)
1912 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1913 if (tem
!= NULL_TREE
)
1917 switch (TREE_CODE (orig
))
1920 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1921 case POINTER_TYPE
: case REFERENCE_TYPE
:
1922 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1925 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1927 case FIXED_POINT_TYPE
:
1928 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1931 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1932 return fold_convert_loc (loc
, type
, tem
);
1938 case FIXED_POINT_TYPE
:
1939 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1940 || TREE_CODE (arg
) == REAL_CST
)
1942 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1943 if (tem
!= NULL_TREE
)
1944 goto fold_convert_exit
;
1947 switch (TREE_CODE (orig
))
1949 case FIXED_POINT_TYPE
:
1954 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1957 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1958 return fold_convert_loc (loc
, type
, tem
);
1965 switch (TREE_CODE (orig
))
1968 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1969 case POINTER_TYPE
: case REFERENCE_TYPE
:
1971 case FIXED_POINT_TYPE
:
1972 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
1973 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
1974 fold_convert_loc (loc
, TREE_TYPE (type
),
1975 integer_zero_node
));
1980 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1982 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1983 TREE_OPERAND (arg
, 0));
1984 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1985 TREE_OPERAND (arg
, 1));
1986 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
1989 arg
= save_expr (arg
);
1990 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1991 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
1992 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
1993 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
1994 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2002 if (integer_zerop (arg
))
2003 return build_zero_vector (type
);
2004 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2005 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2006 || TREE_CODE (orig
) == VECTOR_TYPE
);
2007 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2010 tem
= fold_ignored_result (arg
);
2011 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2014 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2015 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2019 protected_set_expr_location_unshare (tem
, loc
);
2023 /* Return false if expr can be assumed not to be an lvalue, true
2027 maybe_lvalue_p (const_tree x
)
2029 /* We only need to wrap lvalue tree codes. */
2030 switch (TREE_CODE (x
))
2043 case ARRAY_RANGE_REF
:
2049 case PREINCREMENT_EXPR
:
2050 case PREDECREMENT_EXPR
:
2052 case TRY_CATCH_EXPR
:
2053 case WITH_CLEANUP_EXPR
:
2062 /* Assume the worst for front-end tree codes. */
2063 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2071 /* Return an expr equal to X but certainly not valid as an lvalue. */
2074 non_lvalue_loc (location_t loc
, tree x
)
2076 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2081 if (! maybe_lvalue_p (x
))
2083 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2086 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2087 Zero means allow extended lvalues. */
2089 int pedantic_lvalues
;
2091 /* When pedantic, return an expr equal to X but certainly not valid as a
2092 pedantic lvalue. Otherwise, return X. */
2095 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2097 if (pedantic_lvalues
)
2098 return non_lvalue_loc (loc
, x
);
2100 return protected_set_expr_location_unshare (x
, loc
);
2103 /* Given a tree comparison code, return the code that is the logical inverse
2104 of the given code. It is not safe to do this for floating-point
2105 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2106 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2109 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2111 if (honor_nans
&& flag_trapping_math
)
2121 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2123 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2125 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2127 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2141 return UNORDERED_EXPR
;
2142 case UNORDERED_EXPR
:
2143 return ORDERED_EXPR
;
2149 /* Similar, but return the comparison that results if the operands are
2150 swapped. This is safe for floating-point. */
2153 swap_tree_comparison (enum tree_code code
)
2160 case UNORDERED_EXPR
:
2186 /* Convert a comparison tree code from an enum tree_code representation
2187 into a compcode bit-based encoding. This function is the inverse of
2188 compcode_to_comparison. */
2190 static enum comparison_code
2191 comparison_to_compcode (enum tree_code code
)
2208 return COMPCODE_ORD
;
2209 case UNORDERED_EXPR
:
2210 return COMPCODE_UNORD
;
2212 return COMPCODE_UNLT
;
2214 return COMPCODE_UNEQ
;
2216 return COMPCODE_UNLE
;
2218 return COMPCODE_UNGT
;
2220 return COMPCODE_LTGT
;
2222 return COMPCODE_UNGE
;
2228 /* Convert a compcode bit-based encoding of a comparison operator back
2229 to GCC's enum tree_code representation. This function is the
2230 inverse of comparison_to_compcode. */
2232 static enum tree_code
2233 compcode_to_comparison (enum comparison_code code
)
2250 return ORDERED_EXPR
;
2251 case COMPCODE_UNORD
:
2252 return UNORDERED_EXPR
;
2270 /* Return a tree for the comparison which is the combination of
2271 doing the AND or OR (depending on CODE) of the two operations LCODE
2272 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2273 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2274 if this makes the transformation invalid. */
2277 combine_comparisons (location_t loc
,
2278 enum tree_code code
, enum tree_code lcode
,
2279 enum tree_code rcode
, tree truth_type
,
2280 tree ll_arg
, tree lr_arg
)
2282 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2283 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2284 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2289 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2290 compcode
= lcompcode
& rcompcode
;
2293 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2294 compcode
= lcompcode
| rcompcode
;
2303 /* Eliminate unordered comparisons, as well as LTGT and ORD
2304 which are not used unless the mode has NaNs. */
2305 compcode
&= ~COMPCODE_UNORD
;
2306 if (compcode
== COMPCODE_LTGT
)
2307 compcode
= COMPCODE_NE
;
2308 else if (compcode
== COMPCODE_ORD
)
2309 compcode
= COMPCODE_TRUE
;
2311 else if (flag_trapping_math
)
2313 /* Check that the original operation and the optimized ones will trap
2314 under the same condition. */
2315 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2316 && (lcompcode
!= COMPCODE_EQ
)
2317 && (lcompcode
!= COMPCODE_ORD
);
2318 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2319 && (rcompcode
!= COMPCODE_EQ
)
2320 && (rcompcode
!= COMPCODE_ORD
);
2321 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2322 && (compcode
!= COMPCODE_EQ
)
2323 && (compcode
!= COMPCODE_ORD
);
2325 /* In a short-circuited boolean expression the LHS might be
2326 such that the RHS, if evaluated, will never trap. For
2327 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2328 if neither x nor y is NaN. (This is a mixed blessing: for
2329 example, the expression above will never trap, hence
2330 optimizing it to x < y would be invalid). */
2331 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2332 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2335 /* If the comparison was short-circuited, and only the RHS
2336 trapped, we may now generate a spurious trap. */
2338 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2341 /* If we changed the conditions that cause a trap, we lose. */
2342 if ((ltrap
|| rtrap
) != trap
)
2346 if (compcode
== COMPCODE_TRUE
)
2347 return constant_boolean_node (true, truth_type
);
2348 else if (compcode
== COMPCODE_FALSE
)
2349 return constant_boolean_node (false, truth_type
);
2352 enum tree_code tcode
;
2354 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2355 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2359 /* Return nonzero if two operands (typically of the same tree node)
2360 are necessarily equal. If either argument has side-effects this
2361 function returns zero. FLAGS modifies behavior as follows:
2363 If OEP_ONLY_CONST is set, only return nonzero for constants.
2364 This function tests whether the operands are indistinguishable;
2365 it does not test whether they are equal using C's == operation.
2366 The distinction is important for IEEE floating point, because
2367 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2368 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2370 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2371 even though it may hold multiple values during a function.
2372 This is because a GCC tree node guarantees that nothing else is
2373 executed between the evaluation of its "operands" (which may often
2374 be evaluated in arbitrary order). Hence if the operands themselves
2375 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2376 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2377 unset means assuming isochronic (or instantaneous) tree equivalence.
2378 Unless comparing arbitrary expression trees, such as from different
2379 statements, this flag can usually be left unset.
2381 If OEP_PURE_SAME is set, then pure functions with identical arguments
2382 are considered the same. It is used when the caller has other ways
2383 to ensure that global memory is unchanged in between. */
2386 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2388 /* If either is ERROR_MARK, they aren't equal. */
2389 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2390 || TREE_TYPE (arg0
) == error_mark_node
2391 || TREE_TYPE (arg1
) == error_mark_node
)
2394 /* Similar, if either does not have a type (like a released SSA name),
2395 they aren't equal. */
2396 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2399 /* Check equality of integer constants before bailing out due to
2400 precision differences. */
2401 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2402 return tree_int_cst_equal (arg0
, arg1
);
2404 /* If both types don't have the same signedness, then we can't consider
2405 them equal. We must check this before the STRIP_NOPS calls
2406 because they may change the signedness of the arguments. As pointers
2407 strictly don't have a signedness, require either two pointers or
2408 two non-pointers as well. */
2409 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2410 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2413 /* We cannot consider pointers to different address space equal. */
2414 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2415 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2416 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2419 /* If both types don't have the same precision, then it is not safe
2421 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
2427 /* In case both args are comparisons but with different comparison
2428 code, try to swap the comparison operands of one arg to produce
2429 a match and compare that variant. */
2430 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2431 && COMPARISON_CLASS_P (arg0
)
2432 && COMPARISON_CLASS_P (arg1
))
2434 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2436 if (TREE_CODE (arg0
) == swap_code
)
2437 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2438 TREE_OPERAND (arg1
, 1), flags
)
2439 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2440 TREE_OPERAND (arg1
, 0), flags
);
2443 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2444 /* This is needed for conversions and for COMPONENT_REF.
2445 Might as well play it safe and always test this. */
2446 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2447 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2448 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2451 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2452 We don't care about side effects in that case because the SAVE_EXPR
2453 takes care of that for us. In all other cases, two expressions are
2454 equal if they have no side effects. If we have two identical
2455 expressions with side effects that should be treated the same due
2456 to the only side effects being identical SAVE_EXPR's, that will
2457 be detected in the recursive calls below.
2458 If we are taking an invariant address of two identical objects
2459 they are necessarily equal as well. */
2460 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2461 && (TREE_CODE (arg0
) == SAVE_EXPR
2462 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2463 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2466 /* Next handle constant cases, those for which we can return 1 even
2467 if ONLY_CONST is set. */
2468 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2469 switch (TREE_CODE (arg0
))
2472 return tree_int_cst_equal (arg0
, arg1
);
2475 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2476 TREE_FIXED_CST (arg1
));
2479 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2480 TREE_REAL_CST (arg1
)))
2484 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2486 /* If we do not distinguish between signed and unsigned zero,
2487 consider them equal. */
2488 if (real_zerop (arg0
) && real_zerop (arg1
))
2497 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2498 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2501 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2504 v1
= TREE_CHAIN (v1
);
2505 v2
= TREE_CHAIN (v2
);
2512 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2514 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2518 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2519 && ! memcmp (TREE_STRING_POINTER (arg0
),
2520 TREE_STRING_POINTER (arg1
),
2521 TREE_STRING_LENGTH (arg0
)));
2524 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2525 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2526 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2531 if (flags
& OEP_ONLY_CONST
)
2534 /* Define macros to test an operand from arg0 and arg1 for equality and a
2535 variant that allows null and views null as being different from any
2536 non-null value. In the latter case, if either is null, the both
2537 must be; otherwise, do the normal comparison. */
2538 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2539 TREE_OPERAND (arg1, N), flags)
2541 #define OP_SAME_WITH_NULL(N) \
2542 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2543 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2545 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2548 /* Two conversions are equal only if signedness and modes match. */
2549 switch (TREE_CODE (arg0
))
2552 case FIX_TRUNC_EXPR
:
2553 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2554 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2564 case tcc_comparison
:
2566 if (OP_SAME (0) && OP_SAME (1))
2569 /* For commutative ops, allow the other order. */
2570 return (commutative_tree_code (TREE_CODE (arg0
))
2571 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2572 TREE_OPERAND (arg1
, 1), flags
)
2573 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2574 TREE_OPERAND (arg1
, 0), flags
));
2577 /* If either of the pointer (or reference) expressions we are
2578 dereferencing contain a side effect, these cannot be equal. */
2579 if (TREE_SIDE_EFFECTS (arg0
)
2580 || TREE_SIDE_EFFECTS (arg1
))
2583 switch (TREE_CODE (arg0
))
2591 /* Require equal access sizes, and similar pointer types.
2592 We can have incomplete types for array references of
2593 variable-sized arrays from the Fortran frontent
2595 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2596 || (TYPE_SIZE (TREE_TYPE (arg0
))
2597 && TYPE_SIZE (TREE_TYPE (arg1
))
2598 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2599 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2600 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
2601 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
2602 && OP_SAME (0) && OP_SAME (1));
2605 case ARRAY_RANGE_REF
:
2606 /* Operands 2 and 3 may be null.
2607 Compare the array index by value if it is constant first as we
2608 may have different types but same value here. */
2610 && (tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2611 TREE_OPERAND (arg1
, 1))
2613 && OP_SAME_WITH_NULL (2)
2614 && OP_SAME_WITH_NULL (3));
2617 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2618 may be NULL when we're called to compare MEM_EXPRs. */
2619 return OP_SAME_WITH_NULL (0)
2621 && OP_SAME_WITH_NULL (2);
2624 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2630 case tcc_expression
:
2631 switch (TREE_CODE (arg0
))
2634 case TRUTH_NOT_EXPR
:
2637 case TRUTH_ANDIF_EXPR
:
2638 case TRUTH_ORIF_EXPR
:
2639 return OP_SAME (0) && OP_SAME (1);
2642 case WIDEN_MULT_PLUS_EXPR
:
2643 case WIDEN_MULT_MINUS_EXPR
:
2646 /* The multiplcation operands are commutative. */
2649 case TRUTH_AND_EXPR
:
2651 case TRUTH_XOR_EXPR
:
2652 if (OP_SAME (0) && OP_SAME (1))
2655 /* Otherwise take into account this is a commutative operation. */
2656 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2657 TREE_OPERAND (arg1
, 1), flags
)
2658 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2659 TREE_OPERAND (arg1
, 0), flags
));
2664 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2671 switch (TREE_CODE (arg0
))
2674 /* If the CALL_EXPRs call different functions, then they
2675 clearly can not be equal. */
2676 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2681 unsigned int cef
= call_expr_flags (arg0
);
2682 if (flags
& OEP_PURE_SAME
)
2683 cef
&= ECF_CONST
| ECF_PURE
;
2690 /* Now see if all the arguments are the same. */
2692 const_call_expr_arg_iterator iter0
, iter1
;
2694 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2695 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2697 a0
= next_const_call_expr_arg (&iter0
),
2698 a1
= next_const_call_expr_arg (&iter1
))
2699 if (! operand_equal_p (a0
, a1
, flags
))
2702 /* If we get here and both argument lists are exhausted
2703 then the CALL_EXPRs are equal. */
2704 return ! (a0
|| a1
);
2710 case tcc_declaration
:
2711 /* Consider __builtin_sqrt equal to sqrt. */
2712 return (TREE_CODE (arg0
) == FUNCTION_DECL
2713 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2714 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2715 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2722 #undef OP_SAME_WITH_NULL
2725 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2726 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2728 When in doubt, return 0. */
2731 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2733 int unsignedp1
, unsignedpo
;
2734 tree primarg0
, primarg1
, primother
;
2735 unsigned int correct_width
;
2737 if (operand_equal_p (arg0
, arg1
, 0))
2740 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2741 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2744 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2745 and see if the inner values are the same. This removes any
2746 signedness comparison, which doesn't matter here. */
2747 primarg0
= arg0
, primarg1
= arg1
;
2748 STRIP_NOPS (primarg0
);
2749 STRIP_NOPS (primarg1
);
2750 if (operand_equal_p (primarg0
, primarg1
, 0))
2753 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2754 actual comparison operand, ARG0.
2756 First throw away any conversions to wider types
2757 already present in the operands. */
2759 primarg1
= get_narrower (arg1
, &unsignedp1
);
2760 primother
= get_narrower (other
, &unsignedpo
);
2762 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2763 if (unsignedp1
== unsignedpo
2764 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2765 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2767 tree type
= TREE_TYPE (arg0
);
2769 /* Make sure shorter operand is extended the right way
2770 to match the longer operand. */
2771 primarg1
= fold_convert (signed_or_unsigned_type_for
2772 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2774 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2781 /* See if ARG is an expression that is either a comparison or is performing
2782 arithmetic on comparisons. The comparisons must only be comparing
2783 two different values, which will be stored in *CVAL1 and *CVAL2; if
2784 they are nonzero it means that some operands have already been found.
2785 No variables may be used anywhere else in the expression except in the
2786 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2787 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2789 If this is true, return 1. Otherwise, return zero. */
2792 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2794 enum tree_code code
= TREE_CODE (arg
);
2795 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2797 /* We can handle some of the tcc_expression cases here. */
2798 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2800 else if (tclass
== tcc_expression
2801 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2802 || code
== COMPOUND_EXPR
))
2803 tclass
= tcc_binary
;
2805 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2806 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2808 /* If we've already found a CVAL1 or CVAL2, this expression is
2809 two complex to handle. */
2810 if (*cval1
|| *cval2
)
2820 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2823 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2824 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2825 cval1
, cval2
, save_p
));
2830 case tcc_expression
:
2831 if (code
== COND_EXPR
)
2832 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2833 cval1
, cval2
, save_p
)
2834 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2835 cval1
, cval2
, save_p
)
2836 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2837 cval1
, cval2
, save_p
));
2840 case tcc_comparison
:
2841 /* First see if we can handle the first operand, then the second. For
2842 the second operand, we know *CVAL1 can't be zero. It must be that
2843 one side of the comparison is each of the values; test for the
2844 case where this isn't true by failing if the two operands
2847 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2848 TREE_OPERAND (arg
, 1), 0))
2852 *cval1
= TREE_OPERAND (arg
, 0);
2853 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2855 else if (*cval2
== 0)
2856 *cval2
= TREE_OPERAND (arg
, 0);
2857 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2862 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2864 else if (*cval2
== 0)
2865 *cval2
= TREE_OPERAND (arg
, 1);
2866 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2878 /* ARG is a tree that is known to contain just arithmetic operations and
2879 comparisons. Evaluate the operations in the tree substituting NEW0 for
2880 any occurrence of OLD0 as an operand of a comparison and likewise for
2884 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2885 tree old1
, tree new1
)
2887 tree type
= TREE_TYPE (arg
);
2888 enum tree_code code
= TREE_CODE (arg
);
2889 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2891 /* We can handle some of the tcc_expression cases here. */
2892 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2894 else if (tclass
== tcc_expression
2895 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2896 tclass
= tcc_binary
;
2901 return fold_build1_loc (loc
, code
, type
,
2902 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2903 old0
, new0
, old1
, new1
));
2906 return fold_build2_loc (loc
, code
, type
,
2907 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2908 old0
, new0
, old1
, new1
),
2909 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2910 old0
, new0
, old1
, new1
));
2912 case tcc_expression
:
2916 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
2920 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
2924 return fold_build3_loc (loc
, code
, type
,
2925 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2926 old0
, new0
, old1
, new1
),
2927 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2928 old0
, new0
, old1
, new1
),
2929 eval_subst (loc
, TREE_OPERAND (arg
, 2),
2930 old0
, new0
, old1
, new1
));
2934 /* Fall through - ??? */
2936 case tcc_comparison
:
2938 tree arg0
= TREE_OPERAND (arg
, 0);
2939 tree arg1
= TREE_OPERAND (arg
, 1);
2941 /* We need to check both for exact equality and tree equality. The
2942 former will be true if the operand has a side-effect. In that
2943 case, we know the operand occurred exactly once. */
2945 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2947 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2950 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2952 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2955 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
2963 /* Return a tree for the case when the result of an expression is RESULT
2964 converted to TYPE and OMITTED was previously an operand of the expression
2965 but is now not needed (e.g., we folded OMITTED * 0).
2967 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2968 the conversion of RESULT to TYPE. */
2971 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
2973 tree t
= fold_convert_loc (loc
, type
, result
);
2975 /* If the resulting operand is an empty statement, just return the omitted
2976 statement casted to void. */
2977 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2978 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
2979 fold_ignored_result (omitted
));
2981 if (TREE_SIDE_EFFECTS (omitted
))
2982 return build2_loc (loc
, COMPOUND_EXPR
, type
,
2983 fold_ignored_result (omitted
), t
);
2985 return non_lvalue_loc (loc
, t
);
2988 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2991 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
2994 tree t
= fold_convert_loc (loc
, type
, result
);
2996 /* If the resulting operand is an empty statement, just return the omitted
2997 statement casted to void. */
2998 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2999 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
3000 fold_ignored_result (omitted
));
3002 if (TREE_SIDE_EFFECTS (omitted
))
3003 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3004 fold_ignored_result (omitted
), t
);
3006 return pedantic_non_lvalue_loc (loc
, t
);
3009 /* Return a tree for the case when the result of an expression is RESULT
3010 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3011 of the expression but are now not needed.
3013 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3014 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3015 evaluated before OMITTED2. Otherwise, if neither has side effects,
3016 just do the conversion of RESULT to TYPE. */
3019 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3020 tree omitted1
, tree omitted2
)
3022 tree t
= fold_convert_loc (loc
, type
, result
);
3024 if (TREE_SIDE_EFFECTS (omitted2
))
3025 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3026 if (TREE_SIDE_EFFECTS (omitted1
))
3027 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3029 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3033 /* Return a simplified tree node for the truth-negation of ARG. This
3034 never alters ARG itself. We assume that ARG is an operation that
3035 returns a truth value (0 or 1).
3037 FIXME: one would think we would fold the result, but it causes
3038 problems with the dominator optimizer. */
3041 fold_truth_not_expr (location_t loc
, tree arg
)
3043 tree type
= TREE_TYPE (arg
);
3044 enum tree_code code
= TREE_CODE (arg
);
3045 location_t loc1
, loc2
;
3047 /* If this is a comparison, we can simply invert it, except for
3048 floating-point non-equality comparisons, in which case we just
3049 enclose a TRUTH_NOT_EXPR around what we have. */
3051 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3053 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3054 if (FLOAT_TYPE_P (op_type
)
3055 && flag_trapping_math
3056 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3057 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3060 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3061 if (code
== ERROR_MARK
)
3064 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3065 TREE_OPERAND (arg
, 1));
3071 return constant_boolean_node (integer_zerop (arg
), type
);
3073 case TRUTH_AND_EXPR
:
3074 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3075 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3076 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3077 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3078 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3081 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3082 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3083 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3084 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3085 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3087 case TRUTH_XOR_EXPR
:
3088 /* Here we can invert either operand. We invert the first operand
3089 unless the second operand is a TRUTH_NOT_EXPR in which case our
3090 result is the XOR of the first operand with the inside of the
3091 negation of the second operand. */
3093 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3094 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3095 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3097 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3098 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3099 TREE_OPERAND (arg
, 1));
3101 case TRUTH_ANDIF_EXPR
:
3102 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3103 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3104 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3105 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3106 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3108 case TRUTH_ORIF_EXPR
:
3109 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3110 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3111 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3112 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3113 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3115 case TRUTH_NOT_EXPR
:
3116 return TREE_OPERAND (arg
, 0);
3120 tree arg1
= TREE_OPERAND (arg
, 1);
3121 tree arg2
= TREE_OPERAND (arg
, 2);
3123 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3124 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3126 /* A COND_EXPR may have a throw as one operand, which
3127 then has void type. Just leave void operands
3129 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3130 VOID_TYPE_P (TREE_TYPE (arg1
))
3131 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3132 VOID_TYPE_P (TREE_TYPE (arg2
))
3133 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3137 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3138 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3139 TREE_OPERAND (arg
, 0),
3140 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3142 case NON_LVALUE_EXPR
:
3143 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3144 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3147 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3148 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3150 /* ... fall through ... */
3153 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3154 return build1_loc (loc
, TREE_CODE (arg
), type
,
3155 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3158 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3160 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3163 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3165 case CLEANUP_POINT_EXPR
:
3166 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3167 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3168 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3175 /* Return a simplified tree node for the truth-negation of ARG. This
3176 never alters ARG itself. We assume that ARG is an operation that
3177 returns a truth value (0 or 1).
3179 FIXME: one would think we would fold the result, but it causes
3180 problems with the dominator optimizer. */
3183 invert_truthvalue_loc (location_t loc
, tree arg
)
3187 if (TREE_CODE (arg
) == ERROR_MARK
)
3190 tem
= fold_truth_not_expr (loc
, arg
);
3192 tem
= build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3197 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3198 operands are another bit-wise operation with a common input. If so,
3199 distribute the bit operations to save an operation and possibly two if
3200 constants are involved. For example, convert
3201 (A | B) & (A | C) into A | (B & C)
3202 Further simplification will occur if B and C are constants.
3204 If this optimization cannot be done, 0 will be returned. */
3207 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3208 tree arg0
, tree arg1
)
3213 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3214 || TREE_CODE (arg0
) == code
3215 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3216 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3219 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3221 common
= TREE_OPERAND (arg0
, 0);
3222 left
= TREE_OPERAND (arg0
, 1);
3223 right
= TREE_OPERAND (arg1
, 1);
3225 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3227 common
= TREE_OPERAND (arg0
, 0);
3228 left
= TREE_OPERAND (arg0
, 1);
3229 right
= TREE_OPERAND (arg1
, 0);
3231 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3233 common
= TREE_OPERAND (arg0
, 1);
3234 left
= TREE_OPERAND (arg0
, 0);
3235 right
= TREE_OPERAND (arg1
, 1);
3237 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3239 common
= TREE_OPERAND (arg0
, 1);
3240 left
= TREE_OPERAND (arg0
, 0);
3241 right
= TREE_OPERAND (arg1
, 0);
3246 common
= fold_convert_loc (loc
, type
, common
);
3247 left
= fold_convert_loc (loc
, type
, left
);
3248 right
= fold_convert_loc (loc
, type
, right
);
3249 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3250 fold_build2_loc (loc
, code
, type
, left
, right
));
3253 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3254 with code CODE. This optimization is unsafe. */
3256 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3257 tree arg0
, tree arg1
)
3259 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3260 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3262 /* (A / C) +- (B / C) -> (A +- B) / C. */
3264 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3265 TREE_OPERAND (arg1
, 1), 0))
3266 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3267 fold_build2_loc (loc
, code
, type
,
3268 TREE_OPERAND (arg0
, 0),
3269 TREE_OPERAND (arg1
, 0)),
3270 TREE_OPERAND (arg0
, 1));
3272 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3273 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3274 TREE_OPERAND (arg1
, 0), 0)
3275 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3276 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3278 REAL_VALUE_TYPE r0
, r1
;
3279 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3280 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3282 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3284 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3285 real_arithmetic (&r0
, code
, &r0
, &r1
);
3286 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3287 TREE_OPERAND (arg0
, 0),
3288 build_real (type
, r0
));
3294 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3295 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3298 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3299 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3301 tree result
, bftype
;
3305 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3306 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3307 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3308 && host_integerp (size
, 0)
3309 && tree_low_cst (size
, 0) == bitsize
)
3310 return fold_convert_loc (loc
, type
, inner
);
3314 if (TYPE_PRECISION (bftype
) != bitsize
3315 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3316 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3318 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3319 size_int (bitsize
), bitsize_int (bitpos
));
3322 result
= fold_convert_loc (loc
, type
, result
);
3327 /* Optimize a bit-field compare.
3329 There are two cases: First is a compare against a constant and the
3330 second is a comparison of two items where the fields are at the same
3331 bit position relative to the start of a chunk (byte, halfword, word)
3332 large enough to contain it. In these cases we can avoid the shift
3333 implicit in bitfield extractions.
3335 For constants, we emit a compare of the shifted constant with the
3336 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3337 compared. For two fields at the same position, we do the ANDs with the
3338 similar mask and compare the result of the ANDs.
3340 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3341 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3342 are the left and right operands of the comparison, respectively.
3344 If the optimization described above can be done, we return the resulting
3345 tree. Otherwise we return zero. */
3348 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3349 tree compare_type
, tree lhs
, tree rhs
)
3351 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3352 tree type
= TREE_TYPE (lhs
);
3353 tree signed_type
, unsigned_type
;
3354 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3355 enum machine_mode lmode
, rmode
, nmode
;
3356 int lunsignedp
, runsignedp
;
3357 int lvolatilep
= 0, rvolatilep
= 0;
3358 tree linner
, rinner
= NULL_TREE
;
3362 /* Get all the information about the extractions being done. If the bit size
3363 if the same as the size of the underlying object, we aren't doing an
3364 extraction at all and so can do nothing. We also don't want to
3365 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3366 then will no longer be able to replace it. */
3367 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3368 &lunsignedp
, &lvolatilep
, false);
3369 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3370 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3375 /* If this is not a constant, we can only do something if bit positions,
3376 sizes, and signedness are the same. */
3377 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3378 &runsignedp
, &rvolatilep
, false);
3380 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3381 || lunsignedp
!= runsignedp
|| offset
!= 0
3382 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3386 /* See if we can find a mode to refer to this field. We should be able to,
3387 but fail if we can't. */
3389 && GET_MODE_BITSIZE (lmode
) > 0
3390 && flag_strict_volatile_bitfields
> 0)
3393 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3394 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3395 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3396 TYPE_ALIGN (TREE_TYPE (rinner
))),
3397 word_mode
, lvolatilep
|| rvolatilep
);
3398 if (nmode
== VOIDmode
)
3401 /* Set signed and unsigned types of the precision of this mode for the
3403 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3404 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3406 /* Compute the bit position and size for the new reference and our offset
3407 within it. If the new reference is the same size as the original, we
3408 won't optimize anything, so return zero. */
3409 nbitsize
= GET_MODE_BITSIZE (nmode
);
3410 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3412 if (nbitsize
== lbitsize
)
3415 if (BYTES_BIG_ENDIAN
)
3416 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3418 /* Make the mask to be used against the extracted field. */
3419 mask
= build_int_cst_type (unsigned_type
, -1);
3420 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3421 mask
= const_binop (RSHIFT_EXPR
, mask
,
3422 size_int (nbitsize
- lbitsize
- lbitpos
));
3425 /* If not comparing with constant, just rework the comparison
3427 return fold_build2_loc (loc
, code
, compare_type
,
3428 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3429 make_bit_field_ref (loc
, linner
,
3434 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3435 make_bit_field_ref (loc
, rinner
,
3441 /* Otherwise, we are handling the constant case. See if the constant is too
3442 big for the field. Warn and return a tree of for 0 (false) if so. We do
3443 this not only for its own sake, but to avoid having to test for this
3444 error case below. If we didn't, we might generate wrong code.
3446 For unsigned fields, the constant shifted right by the field length should
3447 be all zero. For signed fields, the high-order bits should agree with
3452 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3453 fold_convert_loc (loc
,
3454 unsigned_type
, rhs
),
3455 size_int (lbitsize
))))
3457 warning (0, "comparison is always %d due to width of bit-field",
3459 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3464 tree tem
= const_binop (RSHIFT_EXPR
,
3465 fold_convert_loc (loc
, signed_type
, rhs
),
3466 size_int (lbitsize
- 1));
3467 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3469 warning (0, "comparison is always %d due to width of bit-field",
3471 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3475 /* Single-bit compares should always be against zero. */
3476 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3478 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3479 rhs
= build_int_cst (type
, 0);
3482 /* Make a new bitfield reference, shift the constant over the
3483 appropriate number of bits and mask it with the computed mask
3484 (in case this was a signed field). If we changed it, make a new one. */
3485 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3488 TREE_SIDE_EFFECTS (lhs
) = 1;
3489 TREE_THIS_VOLATILE (lhs
) = 1;
3492 rhs
= const_binop (BIT_AND_EXPR
,
3493 const_binop (LSHIFT_EXPR
,
3494 fold_convert_loc (loc
, unsigned_type
, rhs
),
3495 size_int (lbitpos
)),
3498 lhs
= build2_loc (loc
, code
, compare_type
,
3499 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3503 /* Subroutine for fold_truthop: decode a field reference.
3505 If EXP is a comparison reference, we return the innermost reference.
3507 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3508 set to the starting bit number.
3510 If the innermost field can be completely contained in a mode-sized
3511 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3513 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3514 otherwise it is not changed.
3516 *PUNSIGNEDP is set to the signedness of the field.
3518 *PMASK is set to the mask used. This is either contained in a
3519 BIT_AND_EXPR or derived from the width of the field.
3521 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3523 Return 0 if this is not a component reference or is one that we can't
3524 do anything with. */
3527 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3528 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3529 int *punsignedp
, int *pvolatilep
,
3530 tree
*pmask
, tree
*pand_mask
)
3532 tree outer_type
= 0;
3534 tree mask
, inner
, offset
;
3536 unsigned int precision
;
3538 /* All the optimizations using this function assume integer fields.
3539 There are problems with FP fields since the type_for_size call
3540 below can fail for, e.g., XFmode. */
3541 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3544 /* We are interested in the bare arrangement of bits, so strip everything
3545 that doesn't affect the machine mode. However, record the type of the
3546 outermost expression if it may matter below. */
3547 if (CONVERT_EXPR_P (exp
)
3548 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3549 outer_type
= TREE_TYPE (exp
);
3552 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3554 and_mask
= TREE_OPERAND (exp
, 1);
3555 exp
= TREE_OPERAND (exp
, 0);
3556 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3557 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3561 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3562 punsignedp
, pvolatilep
, false);
3563 if ((inner
== exp
&& and_mask
== 0)
3564 || *pbitsize
< 0 || offset
!= 0
3565 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3568 /* If the number of bits in the reference is the same as the bitsize of
3569 the outer type, then the outer type gives the signedness. Otherwise
3570 (in case of a small bitfield) the signedness is unchanged. */
3571 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3572 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3574 /* Compute the mask to access the bitfield. */
3575 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3576 precision
= TYPE_PRECISION (unsigned_type
);
3578 mask
= build_int_cst_type (unsigned_type
, -1);
3580 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3581 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3583 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3585 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3586 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3589 *pand_mask
= and_mask
;
3593 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3597 all_ones_mask_p (const_tree mask
, int size
)
3599 tree type
= TREE_TYPE (mask
);
3600 unsigned int precision
= TYPE_PRECISION (type
);
3603 tmask
= build_int_cst_type (signed_type_for (type
), -1);
3606 tree_int_cst_equal (mask
,
3607 const_binop (RSHIFT_EXPR
,
3608 const_binop (LSHIFT_EXPR
, tmask
,
3609 size_int (precision
- size
)),
3610 size_int (precision
- size
)));
3613 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3614 represents the sign bit of EXP's type. If EXP represents a sign
3615 or zero extension, also test VAL against the unextended type.
3616 The return value is the (sub)expression whose sign bit is VAL,
3617 or NULL_TREE otherwise. */
3620 sign_bit_p (tree exp
, const_tree val
)
3622 unsigned HOST_WIDE_INT mask_lo
, lo
;
3623 HOST_WIDE_INT mask_hi
, hi
;
3627 /* Tree EXP must have an integral type. */
3628 t
= TREE_TYPE (exp
);
3629 if (! INTEGRAL_TYPE_P (t
))
3632 /* Tree VAL must be an integer constant. */
3633 if (TREE_CODE (val
) != INTEGER_CST
3634 || TREE_OVERFLOW (val
))
3637 width
= TYPE_PRECISION (t
);
3638 if (width
> HOST_BITS_PER_WIDE_INT
)
3640 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3643 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3644 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3650 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3653 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3654 >> (HOST_BITS_PER_WIDE_INT
- width
));
3657 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3658 treat VAL as if it were unsigned. */
3659 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3660 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3663 /* Handle extension from a narrower type. */
3664 if (TREE_CODE (exp
) == NOP_EXPR
3665 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3666 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3671 /* Subroutine for fold_truthop: determine if an operand is simple enough
3672 to be evaluated unconditionally. */
3675 simple_operand_p (const_tree exp
)
3677 /* Strip any conversions that don't change the machine mode. */
3680 return (CONSTANT_CLASS_P (exp
)
3681 || TREE_CODE (exp
) == SSA_NAME
3683 && ! TREE_ADDRESSABLE (exp
)
3684 && ! TREE_THIS_VOLATILE (exp
)
3685 && ! DECL_NONLOCAL (exp
)
3686 /* Don't regard global variables as simple. They may be
3687 allocated in ways unknown to the compiler (shared memory,
3688 #pragma weak, etc). */
3689 && ! TREE_PUBLIC (exp
)
3690 && ! DECL_EXTERNAL (exp
)
3691 /* Loading a static variable is unduly expensive, but global
3692 registers aren't expensive. */
3693 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3696 /* The following functions are subroutines to fold_range_test and allow it to
3697 try to change a logical combination of comparisons into a range test.
3700 X == 2 || X == 3 || X == 4 || X == 5
3704 (unsigned) (X - 2) <= 3
3706 We describe each set of comparisons as being either inside or outside
3707 a range, using a variable named like IN_P, and then describe the
3708 range with a lower and upper bound. If one of the bounds is omitted,
3709 it represents either the highest or lowest value of the type.
3711 In the comments below, we represent a range by two numbers in brackets
3712 preceded by a "+" to designate being inside that range, or a "-" to
3713 designate being outside that range, so the condition can be inverted by
3714 flipping the prefix. An omitted bound is represented by a "-". For
3715 example, "- [-, 10]" means being outside the range starting at the lowest
3716 possible value and ending at 10, in other words, being greater than 10.
3717 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3720 We set up things so that the missing bounds are handled in a consistent
3721 manner so neither a missing bound nor "true" and "false" need to be
3722 handled using a special case. */
3724 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3725 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3726 and UPPER1_P are nonzero if the respective argument is an upper bound
3727 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3728 must be specified for a comparison. ARG1 will be converted to ARG0's
3729 type if both are specified. */
3732 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3733 tree arg1
, int upper1_p
)
3739 /* If neither arg represents infinity, do the normal operation.
3740 Else, if not a comparison, return infinity. Else handle the special
3741 comparison rules. Note that most of the cases below won't occur, but
3742 are handled for consistency. */
3744 if (arg0
!= 0 && arg1
!= 0)
3746 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3747 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3749 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3752 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3755 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3756 for neither. In real maths, we cannot assume open ended ranges are
3757 the same. But, this is computer arithmetic, where numbers are finite.
3758 We can therefore make the transformation of any unbounded range with
3759 the value Z, Z being greater than any representable number. This permits
3760 us to treat unbounded ranges as equal. */
3761 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3762 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3766 result
= sgn0
== sgn1
;
3769 result
= sgn0
!= sgn1
;
3772 result
= sgn0
< sgn1
;
3775 result
= sgn0
<= sgn1
;
3778 result
= sgn0
> sgn1
;
3781 result
= sgn0
>= sgn1
;
3787 return constant_boolean_node (result
, type
);
3790 /* Helper routine for make_range. Perform one step for it, return
3791 new expression if the loop should continue or NULL_TREE if it should
3795 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3796 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3797 bool *strict_overflow_p
)
3799 tree arg0_type
= TREE_TYPE (arg0
);
3800 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3801 int in_p
= *p_in_p
, n_in_p
;
3805 case TRUTH_NOT_EXPR
:
3809 case EQ_EXPR
: case NE_EXPR
:
3810 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3811 /* We can only do something if the range is testing for zero
3812 and if the second operand is an integer constant. Note that
3813 saying something is "in" the range we make is done by
3814 complementing IN_P since it will set in the initial case of
3815 being not equal to zero; "out" is leaving it alone. */
3816 if (low
== NULL_TREE
|| high
== NULL_TREE
3817 || ! integer_zerop (low
) || ! integer_zerop (high
)
3818 || TREE_CODE (arg1
) != INTEGER_CST
)
3823 case NE_EXPR
: /* - [c, c] */
3826 case EQ_EXPR
: /* + [c, c] */
3827 in_p
= ! in_p
, low
= high
= arg1
;
3829 case GT_EXPR
: /* - [-, c] */
3830 low
= 0, high
= arg1
;
3832 case GE_EXPR
: /* + [c, -] */
3833 in_p
= ! in_p
, low
= arg1
, high
= 0;
3835 case LT_EXPR
: /* - [c, -] */
3836 low
= arg1
, high
= 0;
3838 case LE_EXPR
: /* + [-, c] */
3839 in_p
= ! in_p
, low
= 0, high
= arg1
;
3845 /* If this is an unsigned comparison, we also know that EXP is
3846 greater than or equal to zero. We base the range tests we make
3847 on that fact, so we record it here so we can parse existing
3848 range tests. We test arg0_type since often the return type
3849 of, e.g. EQ_EXPR, is boolean. */
3850 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3852 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3854 build_int_cst (arg0_type
, 0),
3858 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3860 /* If the high bound is missing, but we have a nonzero low
3861 bound, reverse the range so it goes from zero to the low bound
3863 if (high
== 0 && low
&& ! integer_zerop (low
))
3866 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3867 integer_one_node
, 0);
3868 low
= build_int_cst (arg0_type
, 0);
3878 /* (-x) IN [a,b] -> x in [-b, -a] */
3879 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3880 build_int_cst (exp_type
, 0),
3882 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3883 build_int_cst (exp_type
, 0),
3885 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
3891 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3892 build_int_cst (exp_type
, 1));
3896 if (TREE_CODE (arg1
) != INTEGER_CST
)
3899 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3900 move a constant to the other side. */
3901 if (!TYPE_UNSIGNED (arg0_type
)
3902 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3905 /* If EXP is signed, any overflow in the computation is undefined,
3906 so we don't worry about it so long as our computations on
3907 the bounds don't overflow. For unsigned, overflow is defined
3908 and this is exactly the right thing. */
3909 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3910 arg0_type
, low
, 0, arg1
, 0);
3911 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3912 arg0_type
, high
, 1, arg1
, 0);
3913 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3914 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3917 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3918 *strict_overflow_p
= true;
3921 /* Check for an unsigned range which has wrapped around the maximum
3922 value thus making n_high < n_low, and normalize it. */
3923 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3925 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3926 integer_one_node
, 0);
3927 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3928 integer_one_node
, 0);
3930 /* If the range is of the form +/- [ x+1, x ], we won't
3931 be able to normalize it. But then, it represents the
3932 whole range or the empty set, so make it
3934 if (tree_int_cst_equal (n_low
, low
)
3935 && tree_int_cst_equal (n_high
, high
))
3941 low
= n_low
, high
= n_high
;
3949 case NON_LVALUE_EXPR
:
3950 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3953 if (! INTEGRAL_TYPE_P (arg0_type
)
3954 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3955 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3958 n_low
= low
, n_high
= high
;
3961 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
3964 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
3966 /* If we're converting arg0 from an unsigned type, to exp,
3967 a signed type, we will be doing the comparison as unsigned.
3968 The tests above have already verified that LOW and HIGH
3971 So we have to ensure that we will handle large unsigned
3972 values the same way that the current signed bounds treat
3975 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
3979 /* For fixed-point modes, we need to pass the saturating flag
3980 as the 2nd parameter. */
3981 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
3983 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
3984 TYPE_SATURATING (arg0_type
));
3987 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
3989 /* A range without an upper bound is, naturally, unbounded.
3990 Since convert would have cropped a very large value, use
3991 the max value for the destination type. */
3993 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
3994 : TYPE_MAX_VALUE (arg0_type
);
3996 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
3997 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
3998 fold_convert_loc (loc
, arg0_type
,
4000 build_int_cst (arg0_type
, 1));
4002 /* If the low bound is specified, "and" the range with the
4003 range for which the original unsigned value will be
4007 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4008 1, fold_convert_loc (loc
, arg0_type
,
4013 in_p
= (n_in_p
== in_p
);
4017 /* Otherwise, "or" the range with the range of the input
4018 that will be interpreted as negative. */
4019 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4020 1, fold_convert_loc (loc
, arg0_type
,
4025 in_p
= (in_p
!= n_in_p
);
4039 /* Given EXP, a logical expression, set the range it is testing into
4040 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4041 actually being tested. *PLOW and *PHIGH will be made of the same
4042 type as the returned expression. If EXP is not a comparison, we
4043 will most likely not be returning a useful value and range. Set
4044 *STRICT_OVERFLOW_P to true if the return value is only valid
4045 because signed overflow is undefined; otherwise, do not change
4046 *STRICT_OVERFLOW_P. */
4049 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4050 bool *strict_overflow_p
)
4052 enum tree_code code
;
4053 tree arg0
, arg1
= NULL_TREE
;
4054 tree exp_type
, nexp
;
4057 location_t loc
= EXPR_LOCATION (exp
);
4059 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4060 and see if we can refine the range. Some of the cases below may not
4061 happen, but it doesn't seem worth worrying about this. We "continue"
4062 the outer loop when we've changed something; otherwise we "break"
4063 the switch, which will "break" the while. */
4066 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4070 code
= TREE_CODE (exp
);
4071 exp_type
= TREE_TYPE (exp
);
4074 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4076 if (TREE_OPERAND_LENGTH (exp
) > 0)
4077 arg0
= TREE_OPERAND (exp
, 0);
4078 if (TREE_CODE_CLASS (code
) == tcc_binary
4079 || TREE_CODE_CLASS (code
) == tcc_comparison
4080 || (TREE_CODE_CLASS (code
) == tcc_expression
4081 && TREE_OPERAND_LENGTH (exp
) > 1))
4082 arg1
= TREE_OPERAND (exp
, 1);
4084 if (arg0
== NULL_TREE
)
4087 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4088 &high
, &in_p
, strict_overflow_p
);
4089 if (nexp
== NULL_TREE
)
4094 /* If EXP is a constant, we can evaluate whether this is true or false. */
4095 if (TREE_CODE (exp
) == INTEGER_CST
)
4097 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4099 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4105 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4109 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4110 type, TYPE, return an expression to test if EXP is in (or out of, depending
4111 on IN_P) the range. Return 0 if the test couldn't be created. */
4114 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4115 tree low
, tree high
)
4117 tree etype
= TREE_TYPE (exp
), value
;
4119 #ifdef HAVE_canonicalize_funcptr_for_compare
4120 /* Disable this optimization for function pointer expressions
4121 on targets that require function pointer canonicalization. */
4122 if (HAVE_canonicalize_funcptr_for_compare
4123 && TREE_CODE (etype
) == POINTER_TYPE
4124 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4130 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4132 return invert_truthvalue_loc (loc
, value
);
4137 if (low
== 0 && high
== 0)
4138 return build_int_cst (type
, 1);
4141 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4142 fold_convert_loc (loc
, etype
, high
));
4145 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4146 fold_convert_loc (loc
, etype
, low
));
4148 if (operand_equal_p (low
, high
, 0))
4149 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4150 fold_convert_loc (loc
, etype
, low
));
4152 if (integer_zerop (low
))
4154 if (! TYPE_UNSIGNED (etype
))
4156 etype
= unsigned_type_for (etype
);
4157 high
= fold_convert_loc (loc
, etype
, high
);
4158 exp
= fold_convert_loc (loc
, etype
, exp
);
4160 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4163 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4164 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4166 unsigned HOST_WIDE_INT lo
;
4170 prec
= TYPE_PRECISION (etype
);
4171 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4174 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4178 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4179 lo
= (unsigned HOST_WIDE_INT
) -1;
4182 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4184 if (TYPE_UNSIGNED (etype
))
4186 tree signed_etype
= signed_type_for (etype
);
4187 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4189 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4191 etype
= signed_etype
;
4192 exp
= fold_convert_loc (loc
, etype
, exp
);
4194 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4195 build_int_cst (etype
, 0));
4199 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4200 This requires wrap-around arithmetics for the type of the expression.
4201 First make sure that arithmetics in this type is valid, then make sure
4202 that it wraps around. */
4203 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4204 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4205 TYPE_UNSIGNED (etype
));
4207 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4209 tree utype
, minv
, maxv
;
4211 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4212 for the type in question, as we rely on this here. */
4213 utype
= unsigned_type_for (etype
);
4214 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4215 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4216 integer_one_node
, 1);
4217 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4219 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4226 high
= fold_convert_loc (loc
, etype
, high
);
4227 low
= fold_convert_loc (loc
, etype
, low
);
4228 exp
= fold_convert_loc (loc
, etype
, exp
);
4230 value
= const_binop (MINUS_EXPR
, high
, low
);
4233 if (POINTER_TYPE_P (etype
))
4235 if (value
!= 0 && !TREE_OVERFLOW (value
))
4237 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4238 return build_range_check (loc
, type
,
4239 fold_build_pointer_plus_loc (loc
, exp
, low
),
4240 1, build_int_cst (etype
, 0), value
);
4245 if (value
!= 0 && !TREE_OVERFLOW (value
))
4246 return build_range_check (loc
, type
,
4247 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4248 1, build_int_cst (etype
, 0), value
);
4253 /* Return the predecessor of VAL in its type, handling the infinite case. */
4256 range_predecessor (tree val
)
4258 tree type
= TREE_TYPE (val
);
4260 if (INTEGRAL_TYPE_P (type
)
4261 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4264 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4267 /* Return the successor of VAL in its type, handling the infinite case. */
4270 range_successor (tree val
)
4272 tree type
= TREE_TYPE (val
);
4274 if (INTEGRAL_TYPE_P (type
)
4275 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4278 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4281 /* Given two ranges, see if we can merge them into one. Return 1 if we
4282 can, 0 if we can't. Set the output range into the specified parameters. */
4285 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4286 tree high0
, int in1_p
, tree low1
, tree high1
)
4294 int lowequal
= ((low0
== 0 && low1
== 0)
4295 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4296 low0
, 0, low1
, 0)));
4297 int highequal
= ((high0
== 0 && high1
== 0)
4298 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4299 high0
, 1, high1
, 1)));
4301 /* Make range 0 be the range that starts first, or ends last if they
4302 start at the same value. Swap them if it isn't. */
4303 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4306 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4307 high1
, 1, high0
, 1))))
4309 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4310 tem
= low0
, low0
= low1
, low1
= tem
;
4311 tem
= high0
, high0
= high1
, high1
= tem
;
4314 /* Now flag two cases, whether the ranges are disjoint or whether the
4315 second range is totally subsumed in the first. Note that the tests
4316 below are simplified by the ones above. */
4317 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4318 high0
, 1, low1
, 0));
4319 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4320 high1
, 1, high0
, 1));
4322 /* We now have four cases, depending on whether we are including or
4323 excluding the two ranges. */
4326 /* If they don't overlap, the result is false. If the second range
4327 is a subset it is the result. Otherwise, the range is from the start
4328 of the second to the end of the first. */
4330 in_p
= 0, low
= high
= 0;
4332 in_p
= 1, low
= low1
, high
= high1
;
4334 in_p
= 1, low
= low1
, high
= high0
;
4337 else if (in0_p
&& ! in1_p
)
4339 /* If they don't overlap, the result is the first range. If they are
4340 equal, the result is false. If the second range is a subset of the
4341 first, and the ranges begin at the same place, we go from just after
4342 the end of the second range to the end of the first. If the second
4343 range is not a subset of the first, or if it is a subset and both
4344 ranges end at the same place, the range starts at the start of the
4345 first range and ends just before the second range.
4346 Otherwise, we can't describe this as a single range. */
4348 in_p
= 1, low
= low0
, high
= high0
;
4349 else if (lowequal
&& highequal
)
4350 in_p
= 0, low
= high
= 0;
4351 else if (subset
&& lowequal
)
4353 low
= range_successor (high1
);
4358 /* We are in the weird situation where high0 > high1 but
4359 high1 has no successor. Punt. */
4363 else if (! subset
|| highequal
)
4366 high
= range_predecessor (low1
);
4370 /* low0 < low1 but low1 has no predecessor. Punt. */
4378 else if (! in0_p
&& in1_p
)
4380 /* If they don't overlap, the result is the second range. If the second
4381 is a subset of the first, the result is false. Otherwise,
4382 the range starts just after the first range and ends at the
4383 end of the second. */
4385 in_p
= 1, low
= low1
, high
= high1
;
4386 else if (subset
|| highequal
)
4387 in_p
= 0, low
= high
= 0;
4390 low
= range_successor (high0
);
4395 /* high1 > high0 but high0 has no successor. Punt. */
4403 /* The case where we are excluding both ranges. Here the complex case
4404 is if they don't overlap. In that case, the only time we have a
4405 range is if they are adjacent. If the second is a subset of the
4406 first, the result is the first. Otherwise, the range to exclude
4407 starts at the beginning of the first range and ends at the end of the
4411 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4412 range_successor (high0
),
4414 in_p
= 0, low
= low0
, high
= high1
;
4417 /* Canonicalize - [min, x] into - [-, x]. */
4418 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4419 switch (TREE_CODE (TREE_TYPE (low0
)))
4422 if (TYPE_PRECISION (TREE_TYPE (low0
))
4423 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4427 if (tree_int_cst_equal (low0
,
4428 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4432 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4433 && integer_zerop (low0
))
4440 /* Canonicalize - [x, max] into - [x, -]. */
4441 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4442 switch (TREE_CODE (TREE_TYPE (high1
)))
4445 if (TYPE_PRECISION (TREE_TYPE (high1
))
4446 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4450 if (tree_int_cst_equal (high1
,
4451 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4455 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4456 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4458 integer_one_node
, 1)))
4465 /* The ranges might be also adjacent between the maximum and
4466 minimum values of the given type. For
4467 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4468 return + [x + 1, y - 1]. */
4469 if (low0
== 0 && high1
== 0)
4471 low
= range_successor (high0
);
4472 high
= range_predecessor (low1
);
4473 if (low
== 0 || high
== 0)
4483 in_p
= 0, low
= low0
, high
= high0
;
4485 in_p
= 0, low
= low0
, high
= high1
;
4488 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4493 /* Subroutine of fold, looking inside expressions of the form
4494 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4495 of the COND_EXPR. This function is being used also to optimize
4496 A op B ? C : A, by reversing the comparison first.
4498 Return a folded expression whose code is not a COND_EXPR
4499 anymore, or NULL_TREE if no folding opportunity is found. */
4502 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4503 tree arg0
, tree arg1
, tree arg2
)
4505 enum tree_code comp_code
= TREE_CODE (arg0
);
4506 tree arg00
= TREE_OPERAND (arg0
, 0);
4507 tree arg01
= TREE_OPERAND (arg0
, 1);
4508 tree arg1_type
= TREE_TYPE (arg1
);
4514 /* If we have A op 0 ? A : -A, consider applying the following
4517 A == 0? A : -A same as -A
4518 A != 0? A : -A same as A
4519 A >= 0? A : -A same as abs (A)
4520 A > 0? A : -A same as abs (A)
4521 A <= 0? A : -A same as -abs (A)
4522 A < 0? A : -A same as -abs (A)
4524 None of these transformations work for modes with signed
4525 zeros. If A is +/-0, the first two transformations will
4526 change the sign of the result (from +0 to -0, or vice
4527 versa). The last four will fix the sign of the result,
4528 even though the original expressions could be positive or
4529 negative, depending on the sign of A.
4531 Note that all these transformations are correct if A is
4532 NaN, since the two alternatives (A and -A) are also NaNs. */
4533 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4534 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4535 ? real_zerop (arg01
)
4536 : integer_zerop (arg01
))
4537 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4538 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4539 /* In the case that A is of the form X-Y, '-A' (arg2) may
4540 have already been folded to Y-X, check for that. */
4541 || (TREE_CODE (arg1
) == MINUS_EXPR
4542 && TREE_CODE (arg2
) == MINUS_EXPR
4543 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4544 TREE_OPERAND (arg2
, 1), 0)
4545 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4546 TREE_OPERAND (arg2
, 0), 0))))
4551 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4552 return pedantic_non_lvalue_loc (loc
,
4553 fold_convert_loc (loc
, type
,
4554 negate_expr (tem
)));
4557 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4560 if (flag_trapping_math
)
4565 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4566 arg1
= fold_convert_loc (loc
, signed_type_for
4567 (TREE_TYPE (arg1
)), arg1
);
4568 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4569 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4572 if (flag_trapping_math
)
4576 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4577 arg1
= fold_convert_loc (loc
, signed_type_for
4578 (TREE_TYPE (arg1
)), arg1
);
4579 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4580 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4582 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4586 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4587 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4588 both transformations are correct when A is NaN: A != 0
4589 is then true, and A == 0 is false. */
4591 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4592 && integer_zerop (arg01
) && integer_zerop (arg2
))
4594 if (comp_code
== NE_EXPR
)
4595 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4596 else if (comp_code
== EQ_EXPR
)
4597 return build_int_cst (type
, 0);
4600 /* Try some transformations of A op B ? A : B.
4602 A == B? A : B same as B
4603 A != B? A : B same as A
4604 A >= B? A : B same as max (A, B)
4605 A > B? A : B same as max (B, A)
4606 A <= B? A : B same as min (A, B)
4607 A < B? A : B same as min (B, A)
4609 As above, these transformations don't work in the presence
4610 of signed zeros. For example, if A and B are zeros of
4611 opposite sign, the first two transformations will change
4612 the sign of the result. In the last four, the original
4613 expressions give different results for (A=+0, B=-0) and
4614 (A=-0, B=+0), but the transformed expressions do not.
4616 The first two transformations are correct if either A or B
4617 is a NaN. In the first transformation, the condition will
4618 be false, and B will indeed be chosen. In the case of the
4619 second transformation, the condition A != B will be true,
4620 and A will be chosen.
4622 The conversions to max() and min() are not correct if B is
4623 a number and A is not. The conditions in the original
4624 expressions will be false, so all four give B. The min()
4625 and max() versions would give a NaN instead. */
4626 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4627 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4628 /* Avoid these transformations if the COND_EXPR may be used
4629 as an lvalue in the C++ front-end. PR c++/19199. */
4631 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4632 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4633 || ! maybe_lvalue_p (arg1
)
4634 || ! maybe_lvalue_p (arg2
)))
4636 tree comp_op0
= arg00
;
4637 tree comp_op1
= arg01
;
4638 tree comp_type
= TREE_TYPE (comp_op0
);
4640 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4641 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4651 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4653 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4658 /* In C++ a ?: expression can be an lvalue, so put the
4659 operand which will be used if they are equal first
4660 so that we can convert this back to the
4661 corresponding COND_EXPR. */
4662 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4664 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4665 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4666 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4667 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4668 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4669 comp_op1
, comp_op0
);
4670 return pedantic_non_lvalue_loc (loc
,
4671 fold_convert_loc (loc
, type
, tem
));
4678 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4680 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4681 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4682 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4683 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4684 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4685 comp_op1
, comp_op0
);
4686 return pedantic_non_lvalue_loc (loc
,
4687 fold_convert_loc (loc
, type
, tem
));
4691 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4692 return pedantic_non_lvalue_loc (loc
,
4693 fold_convert_loc (loc
, type
, arg2
));
4696 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4697 return pedantic_non_lvalue_loc (loc
,
4698 fold_convert_loc (loc
, type
, arg1
));
4701 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4706 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4707 we might still be able to simplify this. For example,
4708 if C1 is one less or one more than C2, this might have started
4709 out as a MIN or MAX and been transformed by this function.
4710 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4712 if (INTEGRAL_TYPE_P (type
)
4713 && TREE_CODE (arg01
) == INTEGER_CST
4714 && TREE_CODE (arg2
) == INTEGER_CST
)
4718 if (TREE_CODE (arg1
) == INTEGER_CST
)
4720 /* We can replace A with C1 in this case. */
4721 arg1
= fold_convert_loc (loc
, type
, arg01
);
4722 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4725 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4726 MIN_EXPR, to preserve the signedness of the comparison. */
4727 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4729 && operand_equal_p (arg01
,
4730 const_binop (PLUS_EXPR
, arg2
,
4731 build_int_cst (type
, 1)),
4734 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4735 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4737 return pedantic_non_lvalue_loc (loc
,
4738 fold_convert_loc (loc
, type
, tem
));
4743 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4745 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4747 && operand_equal_p (arg01
,
4748 const_binop (MINUS_EXPR
, arg2
,
4749 build_int_cst (type
, 1)),
4752 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4753 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4755 return pedantic_non_lvalue_loc (loc
,
4756 fold_convert_loc (loc
, type
, tem
));
4761 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4762 MAX_EXPR, to preserve the signedness of the comparison. */
4763 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4765 && operand_equal_p (arg01
,
4766 const_binop (MINUS_EXPR
, arg2
,
4767 build_int_cst (type
, 1)),
4770 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4771 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4773 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4778 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4779 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4781 && operand_equal_p (arg01
,
4782 const_binop (PLUS_EXPR
, arg2
,
4783 build_int_cst (type
, 1)),
4786 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4787 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4789 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4803 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4804 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4805 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4809 /* EXP is some logical combination of boolean tests. See if we can
4810 merge it into some range test. Return the new tree if so. */
4813 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4816 int or_op
= (code
== TRUTH_ORIF_EXPR
4817 || code
== TRUTH_OR_EXPR
);
4818 int in0_p
, in1_p
, in_p
;
4819 tree low0
, low1
, low
, high0
, high1
, high
;
4820 bool strict_overflow_p
= false;
4821 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4822 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4824 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4825 "when simplifying range test");
4827 /* If this is an OR operation, invert both sides; we will invert
4828 again at the end. */
4830 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4832 /* If both expressions are the same, if we can merge the ranges, and we
4833 can build the range test, return it or it inverted. If one of the
4834 ranges is always true or always false, consider it to be the same
4835 expression as the other. */
4836 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4837 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4839 && 0 != (tem
= (build_range_check (loc
, type
,
4841 : rhs
!= 0 ? rhs
: integer_zero_node
,
4844 if (strict_overflow_p
)
4845 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4846 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4849 /* On machines where the branch cost is expensive, if this is a
4850 short-circuited branch and the underlying object on both sides
4851 is the same, make a non-short-circuit operation. */
4852 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4853 && lhs
!= 0 && rhs
!= 0
4854 && (code
== TRUTH_ANDIF_EXPR
4855 || code
== TRUTH_ORIF_EXPR
)
4856 && operand_equal_p (lhs
, rhs
, 0))
4858 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4859 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4860 which cases we can't do this. */
4861 if (simple_operand_p (lhs
))
4862 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4863 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4866 else if (!lang_hooks
.decls
.global_bindings_p ()
4867 && !CONTAINS_PLACEHOLDER_P (lhs
))
4869 tree common
= save_expr (lhs
);
4871 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4872 or_op
? ! in0_p
: in0_p
,
4874 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4875 or_op
? ! in1_p
: in1_p
,
4878 if (strict_overflow_p
)
4879 fold_overflow_warning (warnmsg
,
4880 WARN_STRICT_OVERFLOW_COMPARISON
);
4881 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4882 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4891 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4892 bit value. Arrange things so the extra bits will be set to zero if and
4893 only if C is signed-extended to its full width. If MASK is nonzero,
4894 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4897 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4899 tree type
= TREE_TYPE (c
);
4900 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4903 if (p
== modesize
|| unsignedp
)
4906 /* We work by getting just the sign bit into the low-order bit, then
4907 into the high-order bit, then sign-extend. We then XOR that value
4909 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1));
4910 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1));
4912 /* We must use a signed type in order to get an arithmetic right shift.
4913 However, we must also avoid introducing accidental overflows, so that
4914 a subsequent call to integer_zerop will work. Hence we must
4915 do the type conversion here. At this point, the constant is either
4916 zero or one, and the conversion to a signed type can never overflow.
4917 We could get an overflow if this conversion is done anywhere else. */
4918 if (TYPE_UNSIGNED (type
))
4919 temp
= fold_convert (signed_type_for (type
), temp
);
4921 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
4922 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
4924 temp
= const_binop (BIT_AND_EXPR
, temp
,
4925 fold_convert (TREE_TYPE (c
), mask
));
4926 /* If necessary, convert the type back to match the type of C. */
4927 if (TYPE_UNSIGNED (type
))
4928 temp
= fold_convert (type
, temp
);
4930 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
4933 /* For an expression that has the form
4937 we can drop one of the inner expressions and simplify to
4941 LOC is the location of the resulting expression. OP is the inner
4942 logical operation; the left-hand side in the examples above, while CMPOP
4943 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4944 removing a condition that guards another, as in
4945 (A != NULL && A->...) || A == NULL
4946 which we must not transform. If RHS_ONLY is true, only eliminate the
4947 right-most operand of the inner logical operation. */
4950 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
4953 tree type
= TREE_TYPE (cmpop
);
4954 enum tree_code code
= TREE_CODE (cmpop
);
4955 enum tree_code truthop_code
= TREE_CODE (op
);
4956 tree lhs
= TREE_OPERAND (op
, 0);
4957 tree rhs
= TREE_OPERAND (op
, 1);
4958 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
4959 enum tree_code rhs_code
= TREE_CODE (rhs
);
4960 enum tree_code lhs_code
= TREE_CODE (lhs
);
4961 enum tree_code inv_code
;
4963 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
4966 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4969 if (rhs_code
== truthop_code
)
4971 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
4972 if (newrhs
!= NULL_TREE
)
4975 rhs_code
= TREE_CODE (rhs
);
4978 if (lhs_code
== truthop_code
&& !rhs_only
)
4980 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
4981 if (newlhs
!= NULL_TREE
)
4984 lhs_code
= TREE_CODE (lhs
);
4988 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
4989 if (inv_code
== rhs_code
4990 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
4991 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
4993 if (!rhs_only
&& inv_code
== lhs_code
4994 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
4995 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
4997 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
4998 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5003 /* Find ways of folding logical expressions of LHS and RHS:
5004 Try to merge two comparisons to the same innermost item.
5005 Look for range tests like "ch >= '0' && ch <= '9'".
5006 Look for combinations of simple terms on machines with expensive branches
5007 and evaluate the RHS unconditionally.
5009 For example, if we have p->a == 2 && p->b == 4 and we can make an
5010 object large enough to span both A and B, we can do this with a comparison
5011 against the object ANDed with the a mask.
5013 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5014 operations to do this with one comparison.
5016 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5017 function and the one above.
5019 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5020 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5022 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5025 We return the simplified tree or 0 if no optimization is possible. */
5028 fold_truthop (location_t loc
, enum tree_code code
, tree truth_type
,
5031 /* If this is the "or" of two comparisons, we can do something if
5032 the comparisons are NE_EXPR. If this is the "and", we can do something
5033 if the comparisons are EQ_EXPR. I.e.,
5034 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5036 WANTED_CODE is this operation code. For single bit fields, we can
5037 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5038 comparison for one-bit fields. */
5040 enum tree_code wanted_code
;
5041 enum tree_code lcode
, rcode
;
5042 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5043 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5044 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5045 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5046 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5047 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5048 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5049 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5050 enum machine_mode lnmode
, rnmode
;
5051 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5052 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5053 tree l_const
, r_const
;
5054 tree lntype
, rntype
, result
;
5055 HOST_WIDE_INT first_bit
, end_bit
;
5057 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
5058 enum tree_code orig_code
= code
;
5060 /* Start by getting the comparison codes. Fail if anything is volatile.
5061 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5062 it were surrounded with a NE_EXPR. */
5064 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5067 lcode
= TREE_CODE (lhs
);
5068 rcode
= TREE_CODE (rhs
);
5070 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5072 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5073 build_int_cst (TREE_TYPE (lhs
), 0));
5077 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5079 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5080 build_int_cst (TREE_TYPE (rhs
), 0));
5084 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5085 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5088 ll_arg
= TREE_OPERAND (lhs
, 0);
5089 lr_arg
= TREE_OPERAND (lhs
, 1);
5090 rl_arg
= TREE_OPERAND (rhs
, 0);
5091 rr_arg
= TREE_OPERAND (rhs
, 1);
5093 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5094 if (simple_operand_p (ll_arg
)
5095 && simple_operand_p (lr_arg
))
5097 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5098 && operand_equal_p (lr_arg
, rr_arg
, 0))
5100 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5101 truth_type
, ll_arg
, lr_arg
);
5105 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5106 && operand_equal_p (lr_arg
, rl_arg
, 0))
5108 result
= combine_comparisons (loc
, code
, lcode
,
5109 swap_tree_comparison (rcode
),
5110 truth_type
, ll_arg
, lr_arg
);
5116 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5117 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5119 /* If the RHS can be evaluated unconditionally and its operands are
5120 simple, it wins to evaluate the RHS unconditionally on machines
5121 with expensive branches. In this case, this isn't a comparison
5122 that can be merged. Avoid doing this if the RHS is a floating-point
5123 comparison since those can trap. */
5125 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5127 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5128 && simple_operand_p (rl_arg
)
5129 && simple_operand_p (rr_arg
))
5131 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5132 if (code
== TRUTH_OR_EXPR
5133 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5134 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5135 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5136 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5137 return build2_loc (loc
, NE_EXPR
, truth_type
,
5138 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5140 build_int_cst (TREE_TYPE (ll_arg
), 0));
5142 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5143 if (code
== TRUTH_AND_EXPR
5144 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5145 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5146 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5147 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5148 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5149 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5151 build_int_cst (TREE_TYPE (ll_arg
), 0));
5153 if (LOGICAL_OP_NON_SHORT_CIRCUIT
)
5155 if (code
!= orig_code
|| lhs
!= orig_lhs
|| rhs
!= orig_rhs
)
5156 return build2_loc (loc
, code
, truth_type
, lhs
, rhs
);
5161 /* See if the comparisons can be merged. Then get all the parameters for
5164 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5165 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5169 ll_inner
= decode_field_reference (loc
, ll_arg
,
5170 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5171 &ll_unsignedp
, &volatilep
, &ll_mask
,
5173 lr_inner
= decode_field_reference (loc
, lr_arg
,
5174 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5175 &lr_unsignedp
, &volatilep
, &lr_mask
,
5177 rl_inner
= decode_field_reference (loc
, rl_arg
,
5178 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5179 &rl_unsignedp
, &volatilep
, &rl_mask
,
5181 rr_inner
= decode_field_reference (loc
, rr_arg
,
5182 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5183 &rr_unsignedp
, &volatilep
, &rr_mask
,
5186 /* It must be true that the inner operation on the lhs of each
5187 comparison must be the same if we are to be able to do anything.
5188 Then see if we have constants. If not, the same must be true for
5190 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5191 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5194 if (TREE_CODE (lr_arg
) == INTEGER_CST
5195 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5196 l_const
= lr_arg
, r_const
= rr_arg
;
5197 else if (lr_inner
== 0 || rr_inner
== 0
5198 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5201 l_const
= r_const
= 0;
5203 /* If either comparison code is not correct for our logical operation,
5204 fail. However, we can convert a one-bit comparison against zero into
5205 the opposite comparison against that bit being set in the field. */
5207 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5208 if (lcode
!= wanted_code
)
5210 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5212 /* Make the left operand unsigned, since we are only interested
5213 in the value of one bit. Otherwise we are doing the wrong
5222 /* This is analogous to the code for l_const above. */
5223 if (rcode
!= wanted_code
)
5225 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5234 /* See if we can find a mode that contains both fields being compared on
5235 the left. If we can't, fail. Otherwise, update all constants and masks
5236 to be relative to a field of that size. */
5237 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5238 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5239 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5240 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5242 if (lnmode
== VOIDmode
)
5245 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5246 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5247 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5248 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5250 if (BYTES_BIG_ENDIAN
)
5252 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5253 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5256 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5257 size_int (xll_bitpos
));
5258 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5259 size_int (xrl_bitpos
));
5263 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5264 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5265 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5266 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5267 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5270 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5272 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5277 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5278 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5279 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5280 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5281 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5284 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5286 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5290 /* If the right sides are not constant, do the same for it. Also,
5291 disallow this optimization if a size or signedness mismatch occurs
5292 between the left and right sides. */
5295 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5296 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5297 /* Make sure the two fields on the right
5298 correspond to the left without being swapped. */
5299 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5302 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5303 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5304 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5305 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5307 if (rnmode
== VOIDmode
)
5310 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5311 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5312 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5313 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5315 if (BYTES_BIG_ENDIAN
)
5317 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5318 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5321 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5323 size_int (xlr_bitpos
));
5324 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5326 size_int (xrr_bitpos
));
5328 /* Make a mask that corresponds to both fields being compared.
5329 Do this for both items being compared. If the operands are the
5330 same size and the bits being compared are in the same position
5331 then we can do this by masking both and comparing the masked
5333 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5334 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5335 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5337 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5338 ll_unsignedp
|| rl_unsignedp
);
5339 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5340 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5342 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5343 lr_unsignedp
|| rr_unsignedp
);
5344 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5345 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5347 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5350 /* There is still another way we can do something: If both pairs of
5351 fields being compared are adjacent, we may be able to make a wider
5352 field containing them both.
5354 Note that we still must mask the lhs/rhs expressions. Furthermore,
5355 the mask must be shifted to account for the shift done by
5356 make_bit_field_ref. */
5357 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5358 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5359 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5360 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5364 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5365 ll_bitsize
+ rl_bitsize
,
5366 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5367 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5368 lr_bitsize
+ rr_bitsize
,
5369 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5371 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5372 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5373 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5374 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5376 /* Convert to the smaller type before masking out unwanted bits. */
5378 if (lntype
!= rntype
)
5380 if (lnbitsize
> rnbitsize
)
5382 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5383 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5386 else if (lnbitsize
< rnbitsize
)
5388 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5389 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5394 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5395 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5397 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5398 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5400 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5406 /* Handle the case of comparisons with constants. If there is something in
5407 common between the masks, those bits of the constants must be the same.
5408 If not, the condition is always false. Test for this to avoid generating
5409 incorrect code below. */
5410 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5411 if (! integer_zerop (result
)
5412 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5413 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5415 if (wanted_code
== NE_EXPR
)
5417 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5418 return constant_boolean_node (true, truth_type
);
5422 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5423 return constant_boolean_node (false, truth_type
);
5427 /* Construct the expression we will return. First get the component
5428 reference we will make. Unless the mask is all ones the width of
5429 that field, perform the mask operation. Then compare with the
5431 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5432 ll_unsignedp
|| rl_unsignedp
);
5434 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5435 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5436 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5438 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5439 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5442 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5446 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5450 enum tree_code op_code
;
5453 int consts_equal
, consts_lt
;
5456 STRIP_SIGN_NOPS (arg0
);
5458 op_code
= TREE_CODE (arg0
);
5459 minmax_const
= TREE_OPERAND (arg0
, 1);
5460 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5461 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5462 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5463 inner
= TREE_OPERAND (arg0
, 0);
5465 /* If something does not permit us to optimize, return the original tree. */
5466 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5467 || TREE_CODE (comp_const
) != INTEGER_CST
5468 || TREE_OVERFLOW (comp_const
)
5469 || TREE_CODE (minmax_const
) != INTEGER_CST
5470 || TREE_OVERFLOW (minmax_const
))
5473 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5474 and GT_EXPR, doing the rest with recursive calls using logical
5478 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5481 = optimize_minmax_comparison (loc
,
5482 invert_tree_comparison (code
, false),
5485 return invert_truthvalue_loc (loc
, tem
);
5491 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5492 optimize_minmax_comparison
5493 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5494 optimize_minmax_comparison
5495 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5498 if (op_code
== MAX_EXPR
&& consts_equal
)
5499 /* MAX (X, 0) == 0 -> X <= 0 */
5500 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5502 else if (op_code
== MAX_EXPR
&& consts_lt
)
5503 /* MAX (X, 0) == 5 -> X == 5 */
5504 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5506 else if (op_code
== MAX_EXPR
)
5507 /* MAX (X, 0) == -1 -> false */
5508 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5510 else if (consts_equal
)
5511 /* MIN (X, 0) == 0 -> X >= 0 */
5512 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5515 /* MIN (X, 0) == 5 -> false */
5516 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5519 /* MIN (X, 0) == -1 -> X == -1 */
5520 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5523 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5524 /* MAX (X, 0) > 0 -> X > 0
5525 MAX (X, 0) > 5 -> X > 5 */
5526 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5528 else if (op_code
== MAX_EXPR
)
5529 /* MAX (X, 0) > -1 -> true */
5530 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5532 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5533 /* MIN (X, 0) > 0 -> false
5534 MIN (X, 0) > 5 -> false */
5535 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5538 /* MIN (X, 0) > -1 -> X > -1 */
5539 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5546 /* T is an integer expression that is being multiplied, divided, or taken a
5547 modulus (CODE says which and what kind of divide or modulus) by a
5548 constant C. See if we can eliminate that operation by folding it with
5549 other operations already in T. WIDE_TYPE, if non-null, is a type that
5550 should be used for the computation if wider than our type.
5552 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5553 (X * 2) + (Y * 4). We must, however, be assured that either the original
5554 expression would not overflow or that overflow is undefined for the type
5555 in the language in question.
5557 If we return a non-null expression, it is an equivalent form of the
5558 original computation, but need not be in the original type.
5560 We set *STRICT_OVERFLOW_P to true if the return values depends on
5561 signed overflow being undefined. Otherwise we do not change
5562 *STRICT_OVERFLOW_P. */
5565 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5566 bool *strict_overflow_p
)
5568 /* To avoid exponential search depth, refuse to allow recursion past
5569 three levels. Beyond that (1) it's highly unlikely that we'll find
5570 something interesting and (2) we've probably processed it before
5571 when we built the inner expression. */
5580 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5587 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5588 bool *strict_overflow_p
)
5590 tree type
= TREE_TYPE (t
);
5591 enum tree_code tcode
= TREE_CODE (t
);
5592 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5593 > GET_MODE_SIZE (TYPE_MODE (type
)))
5594 ? wide_type
: type
);
5596 int same_p
= tcode
== code
;
5597 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5598 bool sub_strict_overflow_p
;
5600 /* Don't deal with constants of zero here; they confuse the code below. */
5601 if (integer_zerop (c
))
5604 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5605 op0
= TREE_OPERAND (t
, 0);
5607 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5608 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5610 /* Note that we need not handle conditional operations here since fold
5611 already handles those cases. So just do arithmetic here. */
5615 /* For a constant, we can always simplify if we are a multiply
5616 or (for divide and modulus) if it is a multiple of our constant. */
5617 if (code
== MULT_EXPR
5618 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
)))
5619 return const_binop (code
, fold_convert (ctype
, t
),
5620 fold_convert (ctype
, c
));
5623 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5624 /* If op0 is an expression ... */
5625 if ((COMPARISON_CLASS_P (op0
)
5626 || UNARY_CLASS_P (op0
)
5627 || BINARY_CLASS_P (op0
)
5628 || VL_EXP_CLASS_P (op0
)
5629 || EXPRESSION_CLASS_P (op0
))
5630 /* ... and has wrapping overflow, and its type is smaller
5631 than ctype, then we cannot pass through as widening. */
5632 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5633 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5634 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5635 && (TYPE_PRECISION (ctype
)
5636 > TYPE_PRECISION (TREE_TYPE (op0
))))
5637 /* ... or this is a truncation (t is narrower than op0),
5638 then we cannot pass through this narrowing. */
5639 || (TYPE_PRECISION (type
)
5640 < TYPE_PRECISION (TREE_TYPE (op0
)))
5641 /* ... or signedness changes for division or modulus,
5642 then we cannot pass through this conversion. */
5643 || (code
!= MULT_EXPR
5644 && (TYPE_UNSIGNED (ctype
)
5645 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5646 /* ... or has undefined overflow while the converted to
5647 type has not, we cannot do the operation in the inner type
5648 as that would introduce undefined overflow. */
5649 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5650 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5653 /* Pass the constant down and see if we can make a simplification. If
5654 we can, replace this expression with the inner simplification for
5655 possible later conversion to our or some other type. */
5656 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5657 && TREE_CODE (t2
) == INTEGER_CST
5658 && !TREE_OVERFLOW (t2
)
5659 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5661 ? ctype
: NULL_TREE
,
5662 strict_overflow_p
))))
5667 /* If widening the type changes it from signed to unsigned, then we
5668 must avoid building ABS_EXPR itself as unsigned. */
5669 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5671 tree cstype
= (*signed_type_for
) (ctype
);
5672 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5675 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5676 return fold_convert (ctype
, t1
);
5680 /* If the constant is negative, we cannot simplify this. */
5681 if (tree_int_cst_sgn (c
) == -1)
5685 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5687 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5690 case MIN_EXPR
: case MAX_EXPR
:
5691 /* If widening the type changes the signedness, then we can't perform
5692 this optimization as that changes the result. */
5693 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5696 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5697 sub_strict_overflow_p
= false;
5698 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5699 &sub_strict_overflow_p
)) != 0
5700 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5701 &sub_strict_overflow_p
)) != 0)
5703 if (tree_int_cst_sgn (c
) < 0)
5704 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5705 if (sub_strict_overflow_p
)
5706 *strict_overflow_p
= true;
5707 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5708 fold_convert (ctype
, t2
));
5712 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5713 /* If the second operand is constant, this is a multiplication
5714 or floor division, by a power of two, so we can treat it that
5715 way unless the multiplier or divisor overflows. Signed
5716 left-shift overflow is implementation-defined rather than
5717 undefined in C90, so do not convert signed left shift into
5719 if (TREE_CODE (op1
) == INTEGER_CST
5720 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5721 /* const_binop may not detect overflow correctly,
5722 so check for it explicitly here. */
5723 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5724 && TREE_INT_CST_HIGH (op1
) == 0
5725 && 0 != (t1
= fold_convert (ctype
,
5726 const_binop (LSHIFT_EXPR
,
5729 && !TREE_OVERFLOW (t1
))
5730 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5731 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5733 fold_convert (ctype
, op0
),
5735 c
, code
, wide_type
, strict_overflow_p
);
5738 case PLUS_EXPR
: case MINUS_EXPR
:
5739 /* See if we can eliminate the operation on both sides. If we can, we
5740 can return a new PLUS or MINUS. If we can't, the only remaining
5741 cases where we can do anything are if the second operand is a
5743 sub_strict_overflow_p
= false;
5744 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5745 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5746 if (t1
!= 0 && t2
!= 0
5747 && (code
== MULT_EXPR
5748 /* If not multiplication, we can only do this if both operands
5749 are divisible by c. */
5750 || (multiple_of_p (ctype
, op0
, c
)
5751 && multiple_of_p (ctype
, op1
, c
))))
5753 if (sub_strict_overflow_p
)
5754 *strict_overflow_p
= true;
5755 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5756 fold_convert (ctype
, t2
));
5759 /* If this was a subtraction, negate OP1 and set it to be an addition.
5760 This simplifies the logic below. */
5761 if (tcode
== MINUS_EXPR
)
5763 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5764 /* If OP1 was not easily negatable, the constant may be OP0. */
5765 if (TREE_CODE (op0
) == INTEGER_CST
)
5776 if (TREE_CODE (op1
) != INTEGER_CST
)
5779 /* If either OP1 or C are negative, this optimization is not safe for
5780 some of the division and remainder types while for others we need
5781 to change the code. */
5782 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5784 if (code
== CEIL_DIV_EXPR
)
5785 code
= FLOOR_DIV_EXPR
;
5786 else if (code
== FLOOR_DIV_EXPR
)
5787 code
= CEIL_DIV_EXPR
;
5788 else if (code
!= MULT_EXPR
5789 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5793 /* If it's a multiply or a division/modulus operation of a multiple
5794 of our constant, do the operation and verify it doesn't overflow. */
5795 if (code
== MULT_EXPR
5796 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5798 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5799 fold_convert (ctype
, c
));
5800 /* We allow the constant to overflow with wrapping semantics. */
5802 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5808 /* If we have an unsigned type is not a sizetype, we cannot widen
5809 the operation since it will change the result if the original
5810 computation overflowed. */
5811 if (TYPE_UNSIGNED (ctype
)
5812 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5816 /* If we were able to eliminate our operation from the first side,
5817 apply our operation to the second side and reform the PLUS. */
5818 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5819 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5821 /* The last case is if we are a multiply. In that case, we can
5822 apply the distributive law to commute the multiply and addition
5823 if the multiplication of the constants doesn't overflow. */
5824 if (code
== MULT_EXPR
)
5825 return fold_build2 (tcode
, ctype
,
5826 fold_build2 (code
, ctype
,
5827 fold_convert (ctype
, op0
),
5828 fold_convert (ctype
, c
)),
5834 /* We have a special case here if we are doing something like
5835 (C * 8) % 4 since we know that's zero. */
5836 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5837 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5838 /* If the multiplication can overflow we cannot optimize this.
5839 ??? Until we can properly mark individual operations as
5840 not overflowing we need to treat sizetype special here as
5841 stor-layout relies on this opimization to make
5842 DECL_FIELD_BIT_OFFSET always a constant. */
5843 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5844 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
5845 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))))
5846 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5847 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5849 *strict_overflow_p
= true;
5850 return omit_one_operand (type
, integer_zero_node
, op0
);
5853 /* ... fall through ... */
5855 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5856 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5857 /* If we can extract our operation from the LHS, do so and return a
5858 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5859 do something only if the second operand is a constant. */
5861 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5862 strict_overflow_p
)) != 0)
5863 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5864 fold_convert (ctype
, op1
));
5865 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5866 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5867 strict_overflow_p
)) != 0)
5868 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5869 fold_convert (ctype
, t1
));
5870 else if (TREE_CODE (op1
) != INTEGER_CST
)
5873 /* If these are the same operation types, we can associate them
5874 assuming no overflow. */
5879 mul
= double_int_mul_with_sign
5881 (tree_to_double_int (op1
),
5882 TYPE_PRECISION (ctype
), TYPE_UNSIGNED (ctype
)),
5884 (tree_to_double_int (c
),
5885 TYPE_PRECISION (ctype
), TYPE_UNSIGNED (ctype
)),
5886 false, &overflow_p
);
5887 overflow_p
= (((!TYPE_UNSIGNED (ctype
)
5888 || (TREE_CODE (ctype
) == INTEGER_TYPE
5889 && TYPE_IS_SIZETYPE (ctype
)))
5891 | TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
));
5892 if (!double_int_fits_to_tree_p (ctype
, mul
)
5893 && ((TYPE_UNSIGNED (ctype
) && tcode
!= MULT_EXPR
)
5894 || !TYPE_UNSIGNED (ctype
)
5895 || (TREE_CODE (ctype
) == INTEGER_TYPE
5896 && TYPE_IS_SIZETYPE (ctype
))))
5899 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5900 double_int_to_tree (ctype
, mul
));
5903 /* If these operations "cancel" each other, we have the main
5904 optimizations of this pass, which occur when either constant is a
5905 multiple of the other, in which case we replace this with either an
5906 operation or CODE or TCODE.
5908 If we have an unsigned type that is not a sizetype, we cannot do
5909 this since it will change the result if the original computation
5911 if ((TYPE_OVERFLOW_UNDEFINED (ctype
)
5912 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5913 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5914 || (tcode
== MULT_EXPR
5915 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5916 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5917 && code
!= MULT_EXPR
)))
5919 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5921 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5922 *strict_overflow_p
= true;
5923 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5924 fold_convert (ctype
,
5925 const_binop (TRUNC_DIV_EXPR
,
5928 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
)))
5930 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5931 *strict_overflow_p
= true;
5932 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5933 fold_convert (ctype
,
5934 const_binop (TRUNC_DIV_EXPR
,
5947 /* Return a node which has the indicated constant VALUE (either 0 or
5948 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5949 and is of the indicated TYPE. */
5952 constant_boolean_node (bool value
, tree type
)
5954 if (type
== integer_type_node
)
5955 return value
? integer_one_node
: integer_zero_node
;
5956 else if (type
== boolean_type_node
)
5957 return value
? boolean_true_node
: boolean_false_node
;
5958 else if (TREE_CODE (type
) == VECTOR_TYPE
)
5959 return build_vector_from_val (type
,
5960 build_int_cst (TREE_TYPE (type
),
5963 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
5967 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5968 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5969 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5970 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5971 COND is the first argument to CODE; otherwise (as in the example
5972 given here), it is the second argument. TYPE is the type of the
5973 original expression. Return NULL_TREE if no simplification is
5977 fold_binary_op_with_conditional_arg (location_t loc
,
5978 enum tree_code code
,
5979 tree type
, tree op0
, tree op1
,
5980 tree cond
, tree arg
, int cond_first_p
)
5982 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
5983 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
5984 tree test
, true_value
, false_value
;
5985 tree lhs
= NULL_TREE
;
5986 tree rhs
= NULL_TREE
;
5988 if (TREE_CODE (cond
) == COND_EXPR
)
5990 test
= TREE_OPERAND (cond
, 0);
5991 true_value
= TREE_OPERAND (cond
, 1);
5992 false_value
= TREE_OPERAND (cond
, 2);
5993 /* If this operand throws an expression, then it does not make
5994 sense to try to perform a logical or arithmetic operation
5996 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
5998 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6003 tree testtype
= TREE_TYPE (cond
);
6005 true_value
= constant_boolean_node (true, testtype
);
6006 false_value
= constant_boolean_node (false, testtype
);
6009 /* This transformation is only worthwhile if we don't have to wrap ARG
6010 in a SAVE_EXPR and the operation can be simplified on at least one
6011 of the branches once its pushed inside the COND_EXPR. */
6012 if (!TREE_CONSTANT (arg
)
6013 && (TREE_SIDE_EFFECTS (arg
)
6014 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6017 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6020 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6022 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6024 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6028 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6030 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6032 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6035 /* Check that we have simplified at least one of the branches. */
6036 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6039 return fold_build3_loc (loc
, COND_EXPR
, type
, test
, lhs
, rhs
);
6043 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6045 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6046 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6047 ADDEND is the same as X.
6049 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6050 and finite. The problematic cases are when X is zero, and its mode
6051 has signed zeros. In the case of rounding towards -infinity,
6052 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6053 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6056 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6058 if (!real_zerop (addend
))
6061 /* Don't allow the fold with -fsignaling-nans. */
6062 if (HONOR_SNANS (TYPE_MODE (type
)))
6065 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6066 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6069 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6070 if (TREE_CODE (addend
) == REAL_CST
6071 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6074 /* The mode has signed zeros, and we have to honor their sign.
6075 In this situation, there is only one case we can return true for.
6076 X - 0 is the same as X unless rounding towards -infinity is
6078 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6081 /* Subroutine of fold() that checks comparisons of built-in math
6082 functions against real constants.
6084 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6085 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6086 is the type of the result and ARG0 and ARG1 are the operands of the
6087 comparison. ARG1 must be a TREE_REAL_CST.
6089 The function returns the constant folded tree if a simplification
6090 can be made, and NULL_TREE otherwise. */
6093 fold_mathfn_compare (location_t loc
,
6094 enum built_in_function fcode
, enum tree_code code
,
6095 tree type
, tree arg0
, tree arg1
)
6099 if (BUILTIN_SQRT_P (fcode
))
6101 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6102 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6104 c
= TREE_REAL_CST (arg1
);
6105 if (REAL_VALUE_NEGATIVE (c
))
6107 /* sqrt(x) < y is always false, if y is negative. */
6108 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6109 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6111 /* sqrt(x) > y is always true, if y is negative and we
6112 don't care about NaNs, i.e. negative values of x. */
6113 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6114 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6116 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6117 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6118 build_real (TREE_TYPE (arg
), dconst0
));
6120 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6124 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6125 real_convert (&c2
, mode
, &c2
);
6127 if (REAL_VALUE_ISINF (c2
))
6129 /* sqrt(x) > y is x == +Inf, when y is very large. */
6130 if (HONOR_INFINITIES (mode
))
6131 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6132 build_real (TREE_TYPE (arg
), c2
));
6134 /* sqrt(x) > y is always false, when y is very large
6135 and we don't care about infinities. */
6136 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6139 /* sqrt(x) > c is the same as x > c*c. */
6140 return fold_build2_loc (loc
, code
, type
, arg
,
6141 build_real (TREE_TYPE (arg
), c2
));
6143 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6147 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6148 real_convert (&c2
, mode
, &c2
);
6150 if (REAL_VALUE_ISINF (c2
))
6152 /* sqrt(x) < y is always true, when y is a very large
6153 value and we don't care about NaNs or Infinities. */
6154 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6155 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6157 /* sqrt(x) < y is x != +Inf when y is very large and we
6158 don't care about NaNs. */
6159 if (! HONOR_NANS (mode
))
6160 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6161 build_real (TREE_TYPE (arg
), c2
));
6163 /* sqrt(x) < y is x >= 0 when y is very large and we
6164 don't care about Infinities. */
6165 if (! HONOR_INFINITIES (mode
))
6166 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6167 build_real (TREE_TYPE (arg
), dconst0
));
6169 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6170 arg
= save_expr (arg
);
6171 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6172 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6173 build_real (TREE_TYPE (arg
),
6175 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6176 build_real (TREE_TYPE (arg
),
6180 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6181 if (! HONOR_NANS (mode
))
6182 return fold_build2_loc (loc
, code
, type
, arg
,
6183 build_real (TREE_TYPE (arg
), c2
));
6185 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6186 arg
= save_expr (arg
);
6187 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6188 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6189 build_real (TREE_TYPE (arg
),
6191 fold_build2_loc (loc
, code
, type
, arg
,
6192 build_real (TREE_TYPE (arg
),
6200 /* Subroutine of fold() that optimizes comparisons against Infinities,
6201 either +Inf or -Inf.
6203 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6204 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6205 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6207 The function returns the constant folded tree if a simplification
6208 can be made, and NULL_TREE otherwise. */
6211 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6212 tree arg0
, tree arg1
)
6214 enum machine_mode mode
;
6215 REAL_VALUE_TYPE max
;
6219 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6221 /* For negative infinity swap the sense of the comparison. */
6222 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6224 code
= swap_tree_comparison (code
);
6229 /* x > +Inf is always false, if with ignore sNANs. */
6230 if (HONOR_SNANS (mode
))
6232 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6235 /* x <= +Inf is always true, if we don't case about NaNs. */
6236 if (! HONOR_NANS (mode
))
6237 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6239 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6240 arg0
= save_expr (arg0
);
6241 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6245 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6246 real_maxval (&max
, neg
, mode
);
6247 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6248 arg0
, build_real (TREE_TYPE (arg0
), max
));
6251 /* x < +Inf is always equal to x <= DBL_MAX. */
6252 real_maxval (&max
, neg
, mode
);
6253 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6254 arg0
, build_real (TREE_TYPE (arg0
), max
));
6257 /* x != +Inf is always equal to !(x > DBL_MAX). */
6258 real_maxval (&max
, neg
, mode
);
6259 if (! HONOR_NANS (mode
))
6260 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6261 arg0
, build_real (TREE_TYPE (arg0
), max
));
6263 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6264 arg0
, build_real (TREE_TYPE (arg0
), max
));
6265 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6274 /* Subroutine of fold() that optimizes comparisons of a division by
6275 a nonzero integer constant against an integer constant, i.e.
6278 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6279 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6280 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6282 The function returns the constant folded tree if a simplification
6283 can be made, and NULL_TREE otherwise. */
6286 fold_div_compare (location_t loc
,
6287 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6289 tree prod
, tmp
, hi
, lo
;
6290 tree arg00
= TREE_OPERAND (arg0
, 0);
6291 tree arg01
= TREE_OPERAND (arg0
, 1);
6293 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6297 /* We have to do this the hard way to detect unsigned overflow.
6298 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6299 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
6300 TREE_INT_CST_HIGH (arg01
),
6301 TREE_INT_CST_LOW (arg1
),
6302 TREE_INT_CST_HIGH (arg1
),
6303 &val
.low
, &val
.high
, unsigned_p
);
6304 prod
= force_fit_type_double (TREE_TYPE (arg00
), val
, -1, overflow
);
6305 neg_overflow
= false;
6309 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6310 build_int_cst (TREE_TYPE (arg01
), 1));
6313 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6314 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
6315 TREE_INT_CST_HIGH (prod
),
6316 TREE_INT_CST_LOW (tmp
),
6317 TREE_INT_CST_HIGH (tmp
),
6318 &val
.low
, &val
.high
, unsigned_p
);
6319 hi
= force_fit_type_double (TREE_TYPE (arg00
), val
,
6320 -1, overflow
| TREE_OVERFLOW (prod
));
6322 else if (tree_int_cst_sgn (arg01
) >= 0)
6324 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6325 build_int_cst (TREE_TYPE (arg01
), 1));
6326 switch (tree_int_cst_sgn (arg1
))
6329 neg_overflow
= true;
6330 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6335 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6340 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6350 /* A negative divisor reverses the relational operators. */
6351 code
= swap_tree_comparison (code
);
6353 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6354 build_int_cst (TREE_TYPE (arg01
), 1));
6355 switch (tree_int_cst_sgn (arg1
))
6358 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6363 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6368 neg_overflow
= true;
6369 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6381 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6382 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6383 if (TREE_OVERFLOW (hi
))
6384 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6385 if (TREE_OVERFLOW (lo
))
6386 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6387 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6390 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6391 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6392 if (TREE_OVERFLOW (hi
))
6393 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6394 if (TREE_OVERFLOW (lo
))
6395 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6396 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6399 if (TREE_OVERFLOW (lo
))
6401 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6402 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6404 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6407 if (TREE_OVERFLOW (hi
))
6409 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6410 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6412 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6415 if (TREE_OVERFLOW (hi
))
6417 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6418 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6420 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6423 if (TREE_OVERFLOW (lo
))
6425 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6426 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6428 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6438 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6439 equality/inequality test, then return a simplified form of the test
6440 using a sign testing. Otherwise return NULL. TYPE is the desired
6444 fold_single_bit_test_into_sign_test (location_t loc
,
6445 enum tree_code code
, tree arg0
, tree arg1
,
6448 /* If this is testing a single bit, we can optimize the test. */
6449 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6450 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6451 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6453 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6454 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6455 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6457 if (arg00
!= NULL_TREE
6458 /* This is only a win if casting to a signed type is cheap,
6459 i.e. when arg00's type is not a partial mode. */
6460 && TYPE_PRECISION (TREE_TYPE (arg00
))
6461 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6463 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6464 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6466 fold_convert_loc (loc
, stype
, arg00
),
6467 build_int_cst (stype
, 0));
6474 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6475 equality/inequality test, then return a simplified form of
6476 the test using shifts and logical operations. Otherwise return
6477 NULL. TYPE is the desired result type. */
6480 fold_single_bit_test (location_t loc
, enum tree_code code
,
6481 tree arg0
, tree arg1
, tree result_type
)
6483 /* If this is testing a single bit, we can optimize the test. */
6484 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6485 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6486 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6488 tree inner
= TREE_OPERAND (arg0
, 0);
6489 tree type
= TREE_TYPE (arg0
);
6490 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6491 enum machine_mode operand_mode
= TYPE_MODE (type
);
6493 tree signed_type
, unsigned_type
, intermediate_type
;
6496 /* First, see if we can fold the single bit test into a sign-bit
6498 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6503 /* Otherwise we have (A & C) != 0 where C is a single bit,
6504 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6505 Similarly for (A & C) == 0. */
6507 /* If INNER is a right shift of a constant and it plus BITNUM does
6508 not overflow, adjust BITNUM and INNER. */
6509 if (TREE_CODE (inner
) == RSHIFT_EXPR
6510 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6511 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6512 && bitnum
< TYPE_PRECISION (type
)
6513 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6514 bitnum
- TYPE_PRECISION (type
)))
6516 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6517 inner
= TREE_OPERAND (inner
, 0);
6520 /* If we are going to be able to omit the AND below, we must do our
6521 operations as unsigned. If we must use the AND, we have a choice.
6522 Normally unsigned is faster, but for some machines signed is. */
6523 #ifdef LOAD_EXTEND_OP
6524 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6525 && !flag_syntax_only
) ? 0 : 1;
6530 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6531 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6532 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6533 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6536 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6537 inner
, size_int (bitnum
));
6539 one
= build_int_cst (intermediate_type
, 1);
6541 if (code
== EQ_EXPR
)
6542 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6544 /* Put the AND last so it can combine with more things. */
6545 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6547 /* Make sure to return the proper type. */
6548 inner
= fold_convert_loc (loc
, result_type
, inner
);
6555 /* Check whether we are allowed to reorder operands arg0 and arg1,
6556 such that the evaluation of arg1 occurs before arg0. */
6559 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6561 if (! flag_evaluation_order
)
6563 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6565 return ! TREE_SIDE_EFFECTS (arg0
)
6566 && ! TREE_SIDE_EFFECTS (arg1
);
6569 /* Test whether it is preferable two swap two operands, ARG0 and
6570 ARG1, for example because ARG0 is an integer constant and ARG1
6571 isn't. If REORDER is true, only recommend swapping if we can
6572 evaluate the operands in reverse order. */
6575 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6577 STRIP_SIGN_NOPS (arg0
);
6578 STRIP_SIGN_NOPS (arg1
);
6580 if (TREE_CODE (arg1
) == INTEGER_CST
)
6582 if (TREE_CODE (arg0
) == INTEGER_CST
)
6585 if (TREE_CODE (arg1
) == REAL_CST
)
6587 if (TREE_CODE (arg0
) == REAL_CST
)
6590 if (TREE_CODE (arg1
) == FIXED_CST
)
6592 if (TREE_CODE (arg0
) == FIXED_CST
)
6595 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6597 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6600 if (TREE_CONSTANT (arg1
))
6602 if (TREE_CONSTANT (arg0
))
6605 if (optimize_function_for_size_p (cfun
))
6608 if (reorder
&& flag_evaluation_order
6609 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6612 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6613 for commutative and comparison operators. Ensuring a canonical
6614 form allows the optimizers to find additional redundancies without
6615 having to explicitly check for both orderings. */
6616 if (TREE_CODE (arg0
) == SSA_NAME
6617 && TREE_CODE (arg1
) == SSA_NAME
6618 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6621 /* Put SSA_NAMEs last. */
6622 if (TREE_CODE (arg1
) == SSA_NAME
)
6624 if (TREE_CODE (arg0
) == SSA_NAME
)
6627 /* Put variables last. */
6636 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6637 ARG0 is extended to a wider type. */
6640 fold_widened_comparison (location_t loc
, enum tree_code code
,
6641 tree type
, tree arg0
, tree arg1
)
6643 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6645 tree shorter_type
, outer_type
;
6649 if (arg0_unw
== arg0
)
6651 shorter_type
= TREE_TYPE (arg0_unw
);
6653 #ifdef HAVE_canonicalize_funcptr_for_compare
6654 /* Disable this optimization if we're casting a function pointer
6655 type on targets that require function pointer canonicalization. */
6656 if (HAVE_canonicalize_funcptr_for_compare
6657 && TREE_CODE (shorter_type
) == POINTER_TYPE
6658 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6662 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6665 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6667 /* If possible, express the comparison in the shorter mode. */
6668 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6669 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6670 && (TREE_TYPE (arg1_unw
) == shorter_type
6671 || ((TYPE_PRECISION (shorter_type
)
6672 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6673 && (TYPE_UNSIGNED (shorter_type
)
6674 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6675 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6676 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6677 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6678 && int_fits_type_p (arg1_unw
, shorter_type
))))
6679 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6680 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6682 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6683 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6684 || !int_fits_type_p (arg1_unw
, shorter_type
))
6687 /* If we are comparing with the integer that does not fit into the range
6688 of the shorter type, the result is known. */
6689 outer_type
= TREE_TYPE (arg1_unw
);
6690 min
= lower_bound_in_type (outer_type
, shorter_type
);
6691 max
= upper_bound_in_type (outer_type
, shorter_type
);
6693 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6695 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6702 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6707 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6713 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6715 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6720 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6722 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6731 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6732 ARG0 just the signedness is changed. */
6735 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6736 tree arg0
, tree arg1
)
6739 tree inner_type
, outer_type
;
6741 if (!CONVERT_EXPR_P (arg0
))
6744 outer_type
= TREE_TYPE (arg0
);
6745 arg0_inner
= TREE_OPERAND (arg0
, 0);
6746 inner_type
= TREE_TYPE (arg0_inner
);
6748 #ifdef HAVE_canonicalize_funcptr_for_compare
6749 /* Disable this optimization if we're casting a function pointer
6750 type on targets that require function pointer canonicalization. */
6751 if (HAVE_canonicalize_funcptr_for_compare
6752 && TREE_CODE (inner_type
) == POINTER_TYPE
6753 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6757 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6760 if (TREE_CODE (arg1
) != INTEGER_CST
6761 && !(CONVERT_EXPR_P (arg1
)
6762 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6765 if ((TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6766 || POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6771 if (TREE_CODE (arg1
) == INTEGER_CST
)
6772 arg1
= force_fit_type_double (inner_type
, tree_to_double_int (arg1
),
6773 0, TREE_OVERFLOW (arg1
));
6775 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6777 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6780 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6781 step of the array. Reconstructs s and delta in the case of s *
6782 delta being an integer constant (and thus already folded). ADDR is
6783 the address. MULT is the multiplicative expression. If the
6784 function succeeds, the new address expression is returned.
6785 Otherwise NULL_TREE is returned. LOC is the location of the
6786 resulting expression. */
6789 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6791 tree s
, delta
, step
;
6792 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6797 /* Strip the nops that might be added when converting op1 to sizetype. */
6800 /* Canonicalize op1 into a possibly non-constant delta
6801 and an INTEGER_CST s. */
6802 if (TREE_CODE (op1
) == MULT_EXPR
)
6804 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6809 if (TREE_CODE (arg0
) == INTEGER_CST
)
6814 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6822 else if (TREE_CODE (op1
) == INTEGER_CST
)
6829 /* Simulate we are delta * 1. */
6831 s
= integer_one_node
;
6834 for (;; ref
= TREE_OPERAND (ref
, 0))
6836 if (TREE_CODE (ref
) == ARRAY_REF
)
6840 /* Remember if this was a multi-dimensional array. */
6841 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6844 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6847 itype
= TREE_TYPE (domain
);
6849 step
= array_ref_element_size (ref
);
6850 if (TREE_CODE (step
) != INTEGER_CST
)
6855 if (! tree_int_cst_equal (step
, s
))
6860 /* Try if delta is a multiple of step. */
6861 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6867 /* Only fold here if we can verify we do not overflow one
6868 dimension of a multi-dimensional array. */
6873 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
6874 || !TYPE_MAX_VALUE (domain
)
6875 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6878 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6879 fold_convert_loc (loc
, itype
,
6880 TREE_OPERAND (ref
, 1)),
6881 fold_convert_loc (loc
, itype
, delta
));
6883 || TREE_CODE (tmp
) != INTEGER_CST
6884 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6890 else if (TREE_CODE (ref
) == COMPONENT_REF
6891 && TREE_CODE (TREE_TYPE (ref
)) == ARRAY_TYPE
)
6895 /* Remember if this was a multi-dimensional array. */
6896 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6899 domain
= TYPE_DOMAIN (TREE_TYPE (ref
));
6902 itype
= TREE_TYPE (domain
);
6904 step
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref
)));
6905 if (TREE_CODE (step
) != INTEGER_CST
)
6910 if (! tree_int_cst_equal (step
, s
))
6915 /* Try if delta is a multiple of step. */
6916 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6922 /* Only fold here if we can verify we do not overflow one
6923 dimension of a multi-dimensional array. */
6928 if (!TYPE_MIN_VALUE (domain
)
6929 || !TYPE_MAX_VALUE (domain
)
6930 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6933 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6934 fold_convert_loc (loc
, itype
,
6935 TYPE_MIN_VALUE (domain
)),
6936 fold_convert_loc (loc
, itype
, delta
));
6937 if (TREE_CODE (tmp
) != INTEGER_CST
6938 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6947 if (!handled_component_p (ref
))
6951 /* We found the suitable array reference. So copy everything up to it,
6952 and replace the index. */
6954 pref
= TREE_OPERAND (addr
, 0);
6955 ret
= copy_node (pref
);
6956 SET_EXPR_LOCATION (ret
, loc
);
6961 pref
= TREE_OPERAND (pref
, 0);
6962 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6963 pos
= TREE_OPERAND (pos
, 0);
6966 if (TREE_CODE (ref
) == ARRAY_REF
)
6968 TREE_OPERAND (pos
, 1)
6969 = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
6970 fold_convert_loc (loc
, itype
, TREE_OPERAND (pos
, 1)),
6971 fold_convert_loc (loc
, itype
, delta
));
6972 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6974 else if (TREE_CODE (ref
) == COMPONENT_REF
)
6976 gcc_assert (ret
== pos
);
6977 ret
= build4_loc (loc
, ARRAY_REF
, TREE_TYPE (TREE_TYPE (ref
)), ret
,
6979 (loc
, PLUS_EXPR
, itype
,
6980 fold_convert_loc (loc
, itype
,
6982 (TYPE_DOMAIN (TREE_TYPE (ref
)))),
6983 fold_convert_loc (loc
, itype
, delta
)),
6984 NULL_TREE
, NULL_TREE
);
6985 return build_fold_addr_expr_loc (loc
, ret
);
6992 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6993 means A >= Y && A != MAX, but in this case we know that
6994 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6997 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
6999 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7001 if (TREE_CODE (bound
) == LT_EXPR
)
7002 a
= TREE_OPERAND (bound
, 0);
7003 else if (TREE_CODE (bound
) == GT_EXPR
)
7004 a
= TREE_OPERAND (bound
, 1);
7008 typea
= TREE_TYPE (a
);
7009 if (!INTEGRAL_TYPE_P (typea
)
7010 && !POINTER_TYPE_P (typea
))
7013 if (TREE_CODE (ineq
) == LT_EXPR
)
7015 a1
= TREE_OPERAND (ineq
, 1);
7016 y
= TREE_OPERAND (ineq
, 0);
7018 else if (TREE_CODE (ineq
) == GT_EXPR
)
7020 a1
= TREE_OPERAND (ineq
, 0);
7021 y
= TREE_OPERAND (ineq
, 1);
7026 if (TREE_TYPE (a1
) != typea
)
7029 if (POINTER_TYPE_P (typea
))
7031 /* Convert the pointer types into integer before taking the difference. */
7032 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7033 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7034 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7037 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7039 if (!diff
|| !integer_onep (diff
))
7042 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7045 /* Fold a sum or difference of at least one multiplication.
7046 Returns the folded tree or NULL if no simplification could be made. */
7049 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7050 tree arg0
, tree arg1
)
7052 tree arg00
, arg01
, arg10
, arg11
;
7053 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7055 /* (A * C) +- (B * C) -> (A+-B) * C.
7056 (A * C) +- A -> A * (C+-1).
7057 We are most concerned about the case where C is a constant,
7058 but other combinations show up during loop reduction. Since
7059 it is not difficult, try all four possibilities. */
7061 if (TREE_CODE (arg0
) == MULT_EXPR
)
7063 arg00
= TREE_OPERAND (arg0
, 0);
7064 arg01
= TREE_OPERAND (arg0
, 1);
7066 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7068 arg00
= build_one_cst (type
);
7073 /* We cannot generate constant 1 for fract. */
7074 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7077 arg01
= build_one_cst (type
);
7079 if (TREE_CODE (arg1
) == MULT_EXPR
)
7081 arg10
= TREE_OPERAND (arg1
, 0);
7082 arg11
= TREE_OPERAND (arg1
, 1);
7084 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7086 arg10
= build_one_cst (type
);
7087 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7088 the purpose of this canonicalization. */
7089 if (TREE_INT_CST_HIGH (arg1
) == -1
7090 && negate_expr_p (arg1
)
7091 && code
== PLUS_EXPR
)
7093 arg11
= negate_expr (arg1
);
7101 /* We cannot generate constant 1 for fract. */
7102 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7105 arg11
= build_one_cst (type
);
7109 if (operand_equal_p (arg01
, arg11
, 0))
7110 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7111 else if (operand_equal_p (arg00
, arg10
, 0))
7112 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7113 else if (operand_equal_p (arg00
, arg11
, 0))
7114 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7115 else if (operand_equal_p (arg01
, arg10
, 0))
7116 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7118 /* No identical multiplicands; see if we can find a common
7119 power-of-two factor in non-power-of-two multiplies. This
7120 can help in multi-dimensional array access. */
7121 else if (host_integerp (arg01
, 0)
7122 && host_integerp (arg11
, 0))
7124 HOST_WIDE_INT int01
, int11
, tmp
;
7127 int01
= TREE_INT_CST_LOW (arg01
);
7128 int11
= TREE_INT_CST_LOW (arg11
);
7130 /* Move min of absolute values to int11. */
7131 if (absu_hwi (int01
) < absu_hwi (int11
))
7133 tmp
= int01
, int01
= int11
, int11
= tmp
;
7134 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7141 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7142 /* The remainder should not be a constant, otherwise we
7143 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7144 increased the number of multiplications necessary. */
7145 && TREE_CODE (arg10
) != INTEGER_CST
)
7147 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7148 build_int_cst (TREE_TYPE (arg00
),
7153 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7158 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7159 fold_build2_loc (loc
, code
, type
,
7160 fold_convert_loc (loc
, type
, alt0
),
7161 fold_convert_loc (loc
, type
, alt1
)),
7162 fold_convert_loc (loc
, type
, same
));
7167 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7168 specified by EXPR into the buffer PTR of length LEN bytes.
7169 Return the number of bytes placed in the buffer, or zero
7173 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7175 tree type
= TREE_TYPE (expr
);
7176 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7177 int byte
, offset
, word
, words
;
7178 unsigned char value
;
7180 if (total_bytes
> len
)
7182 words
= total_bytes
/ UNITS_PER_WORD
;
7184 for (byte
= 0; byte
< total_bytes
; byte
++)
7186 int bitpos
= byte
* BITS_PER_UNIT
;
7187 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7188 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7190 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7191 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7193 if (total_bytes
> UNITS_PER_WORD
)
7195 word
= byte
/ UNITS_PER_WORD
;
7196 if (WORDS_BIG_ENDIAN
)
7197 word
= (words
- 1) - word
;
7198 offset
= word
* UNITS_PER_WORD
;
7199 if (BYTES_BIG_ENDIAN
)
7200 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7202 offset
+= byte
% UNITS_PER_WORD
;
7205 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7206 ptr
[offset
] = value
;
7212 /* Subroutine of native_encode_expr. Encode the REAL_CST
7213 specified by EXPR into the buffer PTR of length LEN bytes.
7214 Return the number of bytes placed in the buffer, or zero
7218 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7220 tree type
= TREE_TYPE (expr
);
7221 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7222 int byte
, offset
, word
, words
, bitpos
;
7223 unsigned char value
;
7225 /* There are always 32 bits in each long, no matter the size of
7226 the hosts long. We handle floating point representations with
7230 if (total_bytes
> len
)
7232 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7234 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7236 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7237 bitpos
+= BITS_PER_UNIT
)
7239 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7240 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7242 if (UNITS_PER_WORD
< 4)
7244 word
= byte
/ UNITS_PER_WORD
;
7245 if (WORDS_BIG_ENDIAN
)
7246 word
= (words
- 1) - word
;
7247 offset
= word
* UNITS_PER_WORD
;
7248 if (BYTES_BIG_ENDIAN
)
7249 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7251 offset
+= byte
% UNITS_PER_WORD
;
7254 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7255 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7260 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7261 specified by EXPR into the buffer PTR of length LEN bytes.
7262 Return the number of bytes placed in the buffer, or zero
7266 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7271 part
= TREE_REALPART (expr
);
7272 rsize
= native_encode_expr (part
, ptr
, len
);
7275 part
= TREE_IMAGPART (expr
);
7276 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7279 return rsize
+ isize
;
7283 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7284 specified by EXPR into the buffer PTR of length LEN bytes.
7285 Return the number of bytes placed in the buffer, or zero
7289 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7291 int i
, size
, offset
, count
;
7292 tree itype
, elem
, elements
;
7295 elements
= TREE_VECTOR_CST_ELTS (expr
);
7296 count
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
));
7297 itype
= TREE_TYPE (TREE_TYPE (expr
));
7298 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7299 for (i
= 0; i
< count
; i
++)
7303 elem
= TREE_VALUE (elements
);
7304 elements
= TREE_CHAIN (elements
);
7311 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7316 if (offset
+ size
> len
)
7318 memset (ptr
+offset
, 0, size
);
7326 /* Subroutine of native_encode_expr. Encode the STRING_CST
7327 specified by EXPR into the buffer PTR of length LEN bytes.
7328 Return the number of bytes placed in the buffer, or zero
7332 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
7334 tree type
= TREE_TYPE (expr
);
7335 HOST_WIDE_INT total_bytes
;
7337 if (TREE_CODE (type
) != ARRAY_TYPE
7338 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7339 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7340 || !host_integerp (TYPE_SIZE_UNIT (type
), 0))
7342 total_bytes
= tree_low_cst (TYPE_SIZE_UNIT (type
), 0);
7343 if (total_bytes
> len
)
7345 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
7347 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
7348 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
7349 total_bytes
- TREE_STRING_LENGTH (expr
));
7352 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
7357 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7358 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7359 buffer PTR of length LEN bytes. Return the number of bytes
7360 placed in the buffer, or zero upon failure. */
7363 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7365 switch (TREE_CODE (expr
))
7368 return native_encode_int (expr
, ptr
, len
);
7371 return native_encode_real (expr
, ptr
, len
);
7374 return native_encode_complex (expr
, ptr
, len
);
7377 return native_encode_vector (expr
, ptr
, len
);
7380 return native_encode_string (expr
, ptr
, len
);
7388 /* Subroutine of native_interpret_expr. Interpret the contents of
7389 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7390 If the buffer cannot be interpreted, return NULL_TREE. */
7393 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7395 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7396 int byte
, offset
, word
, words
;
7397 unsigned char value
;
7400 if (total_bytes
> len
)
7402 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
7405 result
= double_int_zero
;
7406 words
= total_bytes
/ UNITS_PER_WORD
;
7408 for (byte
= 0; byte
< total_bytes
; byte
++)
7410 int bitpos
= byte
* BITS_PER_UNIT
;
7411 if (total_bytes
> UNITS_PER_WORD
)
7413 word
= byte
/ UNITS_PER_WORD
;
7414 if (WORDS_BIG_ENDIAN
)
7415 word
= (words
- 1) - word
;
7416 offset
= word
* UNITS_PER_WORD
;
7417 if (BYTES_BIG_ENDIAN
)
7418 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7420 offset
+= byte
% UNITS_PER_WORD
;
7423 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7424 value
= ptr
[offset
];
7426 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7427 result
.low
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7429 result
.high
|= (unsigned HOST_WIDE_INT
) value
7430 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7433 return double_int_to_tree (type
, result
);
7437 /* Subroutine of native_interpret_expr. Interpret the contents of
7438 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7439 If the buffer cannot be interpreted, return NULL_TREE. */
7442 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7444 enum machine_mode mode
= TYPE_MODE (type
);
7445 int total_bytes
= GET_MODE_SIZE (mode
);
7446 int byte
, offset
, word
, words
, bitpos
;
7447 unsigned char value
;
7448 /* There are always 32 bits in each long, no matter the size of
7449 the hosts long. We handle floating point representations with
7454 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7455 if (total_bytes
> len
|| total_bytes
> 24)
7457 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7459 memset (tmp
, 0, sizeof (tmp
));
7460 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7461 bitpos
+= BITS_PER_UNIT
)
7463 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7464 if (UNITS_PER_WORD
< 4)
7466 word
= byte
/ UNITS_PER_WORD
;
7467 if (WORDS_BIG_ENDIAN
)
7468 word
= (words
- 1) - word
;
7469 offset
= word
* UNITS_PER_WORD
;
7470 if (BYTES_BIG_ENDIAN
)
7471 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7473 offset
+= byte
% UNITS_PER_WORD
;
7476 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7477 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7479 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7482 real_from_target (&r
, tmp
, mode
);
7483 return build_real (type
, r
);
7487 /* Subroutine of native_interpret_expr. Interpret the contents of
7488 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7489 If the buffer cannot be interpreted, return NULL_TREE. */
7492 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7494 tree etype
, rpart
, ipart
;
7497 etype
= TREE_TYPE (type
);
7498 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7501 rpart
= native_interpret_expr (etype
, ptr
, size
);
7504 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7507 return build_complex (type
, rpart
, ipart
);
7511 /* Subroutine of native_interpret_expr. Interpret the contents of
7512 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7513 If the buffer cannot be interpreted, return NULL_TREE. */
7516 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7518 tree etype
, elem
, elements
;
7521 etype
= TREE_TYPE (type
);
7522 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7523 count
= TYPE_VECTOR_SUBPARTS (type
);
7524 if (size
* count
> len
)
7527 elements
= NULL_TREE
;
7528 for (i
= count
- 1; i
>= 0; i
--)
7530 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7533 elements
= tree_cons (NULL_TREE
, elem
, elements
);
7535 return build_vector (type
, elements
);
7539 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7540 the buffer PTR of length LEN as a constant of type TYPE. For
7541 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7542 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7543 return NULL_TREE. */
7546 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7548 switch (TREE_CODE (type
))
7553 return native_interpret_int (type
, ptr
, len
);
7556 return native_interpret_real (type
, ptr
, len
);
7559 return native_interpret_complex (type
, ptr
, len
);
7562 return native_interpret_vector (type
, ptr
, len
);
7570 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7571 TYPE at compile-time. If we're unable to perform the conversion
7572 return NULL_TREE. */
7575 fold_view_convert_expr (tree type
, tree expr
)
7577 /* We support up to 512-bit values (for V8DFmode). */
7578 unsigned char buffer
[64];
7581 /* Check that the host and target are sane. */
7582 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7585 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7589 return native_interpret_expr (type
, buffer
, len
);
7592 /* Build an expression for the address of T. Folds away INDIRECT_REF
7593 to avoid confusing the gimplify process. */
7596 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7598 /* The size of the object is not relevant when talking about its address. */
7599 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7600 t
= TREE_OPERAND (t
, 0);
7602 if (TREE_CODE (t
) == INDIRECT_REF
)
7604 t
= TREE_OPERAND (t
, 0);
7606 if (TREE_TYPE (t
) != ptrtype
)
7607 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7609 else if (TREE_CODE (t
) == MEM_REF
7610 && integer_zerop (TREE_OPERAND (t
, 1)))
7611 return TREE_OPERAND (t
, 0);
7612 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7614 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7616 if (TREE_TYPE (t
) != ptrtype
)
7617 t
= fold_convert_loc (loc
, ptrtype
, t
);
7620 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7625 /* Build an expression for the address of T. */
7628 build_fold_addr_expr_loc (location_t loc
, tree t
)
7630 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7632 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7635 /* Fold a unary expression of code CODE and type TYPE with operand
7636 OP0. Return the folded expression if folding is successful.
7637 Otherwise, return NULL_TREE. */
7640 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7644 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7646 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7647 && TREE_CODE_LENGTH (code
) == 1);
7652 if (CONVERT_EXPR_CODE_P (code
)
7653 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7655 /* Don't use STRIP_NOPS, because signedness of argument type
7657 STRIP_SIGN_NOPS (arg0
);
7661 /* Strip any conversions that don't change the mode. This
7662 is safe for every expression, except for a comparison
7663 expression because its signedness is derived from its
7666 Note that this is done as an internal manipulation within
7667 the constant folder, in order to find the simplest
7668 representation of the arguments so that their form can be
7669 studied. In any cases, the appropriate type conversions
7670 should be put back in the tree that will get out of the
7676 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7678 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7679 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7680 fold_build1_loc (loc
, code
, type
,
7681 fold_convert_loc (loc
, TREE_TYPE (op0
),
7682 TREE_OPERAND (arg0
, 1))));
7683 else if (TREE_CODE (arg0
) == COND_EXPR
)
7685 tree arg01
= TREE_OPERAND (arg0
, 1);
7686 tree arg02
= TREE_OPERAND (arg0
, 2);
7687 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7688 arg01
= fold_build1_loc (loc
, code
, type
,
7689 fold_convert_loc (loc
,
7690 TREE_TYPE (op0
), arg01
));
7691 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7692 arg02
= fold_build1_loc (loc
, code
, type
,
7693 fold_convert_loc (loc
,
7694 TREE_TYPE (op0
), arg02
));
7695 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7698 /* If this was a conversion, and all we did was to move into
7699 inside the COND_EXPR, bring it back out. But leave it if
7700 it is a conversion from integer to integer and the
7701 result precision is no wider than a word since such a
7702 conversion is cheap and may be optimized away by combine,
7703 while it couldn't if it were outside the COND_EXPR. Then return
7704 so we don't get into an infinite recursion loop taking the
7705 conversion out and then back in. */
7707 if ((CONVERT_EXPR_CODE_P (code
)
7708 || code
== NON_LVALUE_EXPR
)
7709 && TREE_CODE (tem
) == COND_EXPR
7710 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7711 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7712 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7713 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7714 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7715 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7716 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7718 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7719 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7720 || flag_syntax_only
))
7721 tem
= build1_loc (loc
, code
, type
,
7723 TREE_TYPE (TREE_OPERAND
7724 (TREE_OPERAND (tem
, 1), 0)),
7725 TREE_OPERAND (tem
, 0),
7726 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7727 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7736 /* Re-association barriers around constants and other re-association
7737 barriers can be removed. */
7738 if (CONSTANT_CLASS_P (op0
)
7739 || TREE_CODE (op0
) == PAREN_EXPR
)
7740 return fold_convert_loc (loc
, type
, op0
);
7745 case FIX_TRUNC_EXPR
:
7746 if (TREE_TYPE (op0
) == type
)
7749 if (COMPARISON_CLASS_P (op0
))
7751 /* If we have (type) (a CMP b) and type is an integral type, return
7752 new expression involving the new type. Canonicalize
7753 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7755 Do not fold the result as that would not simplify further, also
7756 folding again results in recursions. */
7757 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7758 return build2_loc (loc
, TREE_CODE (op0
), type
,
7759 TREE_OPERAND (op0
, 0),
7760 TREE_OPERAND (op0
, 1));
7761 else if (!INTEGRAL_TYPE_P (type
))
7762 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7763 constant_boolean_node (true, type
),
7764 constant_boolean_node (false, type
));
7767 /* Handle cases of two conversions in a row. */
7768 if (CONVERT_EXPR_P (op0
))
7770 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7771 tree inter_type
= TREE_TYPE (op0
);
7772 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7773 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7774 int inside_float
= FLOAT_TYPE_P (inside_type
);
7775 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7776 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7777 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7778 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7779 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7780 int inter_float
= FLOAT_TYPE_P (inter_type
);
7781 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7782 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7783 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7784 int final_int
= INTEGRAL_TYPE_P (type
);
7785 int final_ptr
= POINTER_TYPE_P (type
);
7786 int final_float
= FLOAT_TYPE_P (type
);
7787 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7788 unsigned int final_prec
= TYPE_PRECISION (type
);
7789 int final_unsignedp
= TYPE_UNSIGNED (type
);
7791 /* In addition to the cases of two conversions in a row
7792 handled below, if we are converting something to its own
7793 type via an object of identical or wider precision, neither
7794 conversion is needed. */
7795 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7796 && (((inter_int
|| inter_ptr
) && final_int
)
7797 || (inter_float
&& final_float
))
7798 && inter_prec
>= final_prec
)
7799 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7801 /* Likewise, if the intermediate and initial types are either both
7802 float or both integer, we don't need the middle conversion if the
7803 former is wider than the latter and doesn't change the signedness
7804 (for integers). Avoid this if the final type is a pointer since
7805 then we sometimes need the middle conversion. Likewise if the
7806 final type has a precision not equal to the size of its mode. */
7807 if (((inter_int
&& inside_int
)
7808 || (inter_float
&& inside_float
)
7809 || (inter_vec
&& inside_vec
))
7810 && inter_prec
>= inside_prec
7811 && (inter_float
|| inter_vec
7812 || inter_unsignedp
== inside_unsignedp
)
7813 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7814 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7816 && (! final_vec
|| inter_prec
== inside_prec
))
7817 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7819 /* If we have a sign-extension of a zero-extended value, we can
7820 replace that by a single zero-extension. */
7821 if (inside_int
&& inter_int
&& final_int
7822 && inside_prec
< inter_prec
&& inter_prec
< final_prec
7823 && inside_unsignedp
&& !inter_unsignedp
)
7824 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7826 /* Two conversions in a row are not needed unless:
7827 - some conversion is floating-point (overstrict for now), or
7828 - some conversion is a vector (overstrict for now), or
7829 - the intermediate type is narrower than both initial and
7831 - the intermediate type and innermost type differ in signedness,
7832 and the outermost type is wider than the intermediate, or
7833 - the initial type is a pointer type and the precisions of the
7834 intermediate and final types differ, or
7835 - the final type is a pointer type and the precisions of the
7836 initial and intermediate types differ. */
7837 if (! inside_float
&& ! inter_float
&& ! final_float
7838 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7839 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7840 && ! (inside_int
&& inter_int
7841 && inter_unsignedp
!= inside_unsignedp
7842 && inter_prec
< final_prec
)
7843 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7844 == (final_unsignedp
&& final_prec
> inter_prec
))
7845 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7846 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7847 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7848 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7849 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7852 /* Handle (T *)&A.B.C for A being of type T and B and C
7853 living at offset zero. This occurs frequently in
7854 C++ upcasting and then accessing the base. */
7855 if (TREE_CODE (op0
) == ADDR_EXPR
7856 && POINTER_TYPE_P (type
)
7857 && handled_component_p (TREE_OPERAND (op0
, 0)))
7859 HOST_WIDE_INT bitsize
, bitpos
;
7861 enum machine_mode mode
;
7862 int unsignedp
, volatilep
;
7863 tree base
= TREE_OPERAND (op0
, 0);
7864 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7865 &mode
, &unsignedp
, &volatilep
, false);
7866 /* If the reference was to a (constant) zero offset, we can use
7867 the address of the base if it has the same base type
7868 as the result type and the pointer type is unqualified. */
7869 if (! offset
&& bitpos
== 0
7870 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7871 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7872 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7873 return fold_convert_loc (loc
, type
,
7874 build_fold_addr_expr_loc (loc
, base
));
7877 if (TREE_CODE (op0
) == MODIFY_EXPR
7878 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7879 /* Detect assigning a bitfield. */
7880 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7882 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7884 /* Don't leave an assignment inside a conversion
7885 unless assigning a bitfield. */
7886 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7887 /* First do the assignment, then return converted constant. */
7888 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7889 TREE_NO_WARNING (tem
) = 1;
7890 TREE_USED (tem
) = 1;
7894 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7895 constants (if x has signed type, the sign bit cannot be set
7896 in c). This folds extension into the BIT_AND_EXPR.
7897 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7898 very likely don't have maximal range for their precision and this
7899 transformation effectively doesn't preserve non-maximal ranges. */
7900 if (TREE_CODE (type
) == INTEGER_TYPE
7901 && TREE_CODE (op0
) == BIT_AND_EXPR
7902 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7904 tree and_expr
= op0
;
7905 tree and0
= TREE_OPERAND (and_expr
, 0);
7906 tree and1
= TREE_OPERAND (and_expr
, 1);
7909 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7910 || (TYPE_PRECISION (type
)
7911 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7913 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7914 <= HOST_BITS_PER_WIDE_INT
7915 && host_integerp (and1
, 1))
7917 unsigned HOST_WIDE_INT cst
;
7919 cst
= tree_low_cst (and1
, 1);
7920 cst
&= (HOST_WIDE_INT
) -1
7921 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7922 change
= (cst
== 0);
7923 #ifdef LOAD_EXTEND_OP
7925 && !flag_syntax_only
7926 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7929 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7930 and0
= fold_convert_loc (loc
, uns
, and0
);
7931 and1
= fold_convert_loc (loc
, uns
, and1
);
7937 tem
= force_fit_type_double (type
, tree_to_double_int (and1
),
7938 0, TREE_OVERFLOW (and1
));
7939 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7940 fold_convert_loc (loc
, type
, and0
), tem
);
7944 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7945 when one of the new casts will fold away. Conservatively we assume
7946 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7947 if (POINTER_TYPE_P (type
)
7948 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7949 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
7950 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7951 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7952 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7954 tree arg00
= TREE_OPERAND (arg0
, 0);
7955 tree arg01
= TREE_OPERAND (arg0
, 1);
7957 return fold_build_pointer_plus_loc
7958 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7961 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7962 of the same precision, and X is an integer type not narrower than
7963 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7964 if (INTEGRAL_TYPE_P (type
)
7965 && TREE_CODE (op0
) == BIT_NOT_EXPR
7966 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7967 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7968 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7970 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7971 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7972 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7973 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7974 fold_convert_loc (loc
, type
, tem
));
7977 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7978 type of X and Y (integer types only). */
7979 if (INTEGRAL_TYPE_P (type
)
7980 && TREE_CODE (op0
) == MULT_EXPR
7981 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7982 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7984 /* Be careful not to introduce new overflows. */
7986 if (TYPE_OVERFLOW_WRAPS (type
))
7989 mult_type
= unsigned_type_for (type
);
7991 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
7993 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
7994 fold_convert_loc (loc
, mult_type
,
7995 TREE_OPERAND (op0
, 0)),
7996 fold_convert_loc (loc
, mult_type
,
7997 TREE_OPERAND (op0
, 1)));
7998 return fold_convert_loc (loc
, type
, tem
);
8002 tem
= fold_convert_const (code
, type
, op0
);
8003 return tem
? tem
: NULL_TREE
;
8005 case ADDR_SPACE_CONVERT_EXPR
:
8006 if (integer_zerop (arg0
))
8007 return fold_convert_const (code
, type
, arg0
);
8010 case FIXED_CONVERT_EXPR
:
8011 tem
= fold_convert_const (code
, type
, arg0
);
8012 return tem
? tem
: NULL_TREE
;
8014 case VIEW_CONVERT_EXPR
:
8015 if (TREE_TYPE (op0
) == type
)
8017 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8018 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8019 type
, TREE_OPERAND (op0
, 0));
8020 if (TREE_CODE (op0
) == MEM_REF
)
8021 return fold_build2_loc (loc
, MEM_REF
, type
,
8022 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8024 /* For integral conversions with the same precision or pointer
8025 conversions use a NOP_EXPR instead. */
8026 if ((INTEGRAL_TYPE_P (type
)
8027 || POINTER_TYPE_P (type
))
8028 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8029 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8030 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8031 return fold_convert_loc (loc
, type
, op0
);
8033 /* Strip inner integral conversions that do not change the precision. */
8034 if (CONVERT_EXPR_P (op0
)
8035 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8036 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8037 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8038 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8039 && (TYPE_PRECISION (TREE_TYPE (op0
))
8040 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8041 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8042 type
, TREE_OPERAND (op0
, 0));
8044 return fold_view_convert_expr (type
, op0
);
8047 tem
= fold_negate_expr (loc
, arg0
);
8049 return fold_convert_loc (loc
, type
, tem
);
8053 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8054 return fold_abs_const (arg0
, type
);
8055 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8056 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8057 /* Convert fabs((double)float) into (double)fabsf(float). */
8058 else if (TREE_CODE (arg0
) == NOP_EXPR
8059 && TREE_CODE (type
) == REAL_TYPE
)
8061 tree targ0
= strip_float_extensions (arg0
);
8063 return fold_convert_loc (loc
, type
,
8064 fold_build1_loc (loc
, ABS_EXPR
,
8068 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8069 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8071 else if (tree_expr_nonnegative_p (arg0
))
8074 /* Strip sign ops from argument. */
8075 if (TREE_CODE (type
) == REAL_TYPE
)
8077 tem
= fold_strip_sign_ops (arg0
);
8079 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8080 fold_convert_loc (loc
, type
, tem
));
8085 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8086 return fold_convert_loc (loc
, type
, arg0
);
8087 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8089 tree itype
= TREE_TYPE (type
);
8090 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8091 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8092 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8093 negate_expr (ipart
));
8095 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8097 tree itype
= TREE_TYPE (type
);
8098 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8099 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8100 return build_complex (type
, rpart
, negate_expr (ipart
));
8102 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8103 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8107 if (TREE_CODE (arg0
) == INTEGER_CST
)
8108 return fold_not_const (arg0
, type
);
8109 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8110 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8111 /* Convert ~ (-A) to A - 1. */
8112 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8113 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8114 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8115 build_int_cst (type
, 1));
8116 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8117 else if (INTEGRAL_TYPE_P (type
)
8118 && ((TREE_CODE (arg0
) == MINUS_EXPR
8119 && integer_onep (TREE_OPERAND (arg0
, 1)))
8120 || (TREE_CODE (arg0
) == PLUS_EXPR
8121 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8122 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8123 fold_convert_loc (loc
, type
,
8124 TREE_OPERAND (arg0
, 0)));
8125 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8126 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8127 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8128 fold_convert_loc (loc
, type
,
8129 TREE_OPERAND (arg0
, 0)))))
8130 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8131 fold_convert_loc (loc
, type
,
8132 TREE_OPERAND (arg0
, 1)));
8133 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8134 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8135 fold_convert_loc (loc
, type
,
8136 TREE_OPERAND (arg0
, 1)))))
8137 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8138 fold_convert_loc (loc
, type
,
8139 TREE_OPERAND (arg0
, 0)), tem
);
8140 /* Perform BIT_NOT_EXPR on each element individually. */
8141 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8143 tree elements
= TREE_VECTOR_CST_ELTS (arg0
), elem
, list
= NULL_TREE
;
8144 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
8146 for (i
= 0; i
< count
; i
++)
8150 elem
= TREE_VALUE (elements
);
8151 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8152 if (elem
== NULL_TREE
)
8154 elements
= TREE_CHAIN (elements
);
8157 elem
= build_int_cst (TREE_TYPE (type
), -1);
8158 list
= tree_cons (NULL_TREE
, elem
, list
);
8161 return build_vector (type
, nreverse (list
));
8166 case TRUTH_NOT_EXPR
:
8167 /* The argument to invert_truthvalue must have Boolean type. */
8168 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8169 arg0
= fold_convert_loc (loc
, boolean_type_node
, arg0
);
8171 /* Note that the operand of this must be an int
8172 and its values must be 0 or 1.
8173 ("true" is a fixed value perhaps depending on the language,
8174 but we don't handle values other than 1 correctly yet.) */
8175 tem
= fold_truth_not_expr (loc
, arg0
);
8178 return fold_convert_loc (loc
, type
, tem
);
8181 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8182 return fold_convert_loc (loc
, type
, arg0
);
8183 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8184 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8185 TREE_OPERAND (arg0
, 1));
8186 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8187 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8188 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8190 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8191 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8192 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8193 TREE_OPERAND (arg0
, 0)),
8194 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8195 TREE_OPERAND (arg0
, 1)));
8196 return fold_convert_loc (loc
, type
, tem
);
8198 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8200 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8201 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8202 TREE_OPERAND (arg0
, 0));
8203 return fold_convert_loc (loc
, type
, tem
);
8205 if (TREE_CODE (arg0
) == CALL_EXPR
)
8207 tree fn
= get_callee_fndecl (arg0
);
8208 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8209 switch (DECL_FUNCTION_CODE (fn
))
8211 CASE_FLT_FN (BUILT_IN_CEXPI
):
8212 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8214 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8224 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8225 return build_zero_cst (type
);
8226 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8227 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8228 TREE_OPERAND (arg0
, 0));
8229 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8230 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8231 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8233 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8234 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8235 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8236 TREE_OPERAND (arg0
, 0)),
8237 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8238 TREE_OPERAND (arg0
, 1)));
8239 return fold_convert_loc (loc
, type
, tem
);
8241 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8243 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8244 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8245 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8247 if (TREE_CODE (arg0
) == CALL_EXPR
)
8249 tree fn
= get_callee_fndecl (arg0
);
8250 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8251 switch (DECL_FUNCTION_CODE (fn
))
8253 CASE_FLT_FN (BUILT_IN_CEXPI
):
8254 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8256 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8266 /* Fold *&X to X if X is an lvalue. */
8267 if (TREE_CODE (op0
) == ADDR_EXPR
)
8269 tree op00
= TREE_OPERAND (op0
, 0);
8270 if ((TREE_CODE (op00
) == VAR_DECL
8271 || TREE_CODE (op00
) == PARM_DECL
8272 || TREE_CODE (op00
) == RESULT_DECL
)
8273 && !TREE_READONLY (op00
))
8280 } /* switch (code) */
8284 /* If the operation was a conversion do _not_ mark a resulting constant
8285 with TREE_OVERFLOW if the original constant was not. These conversions
8286 have implementation defined behavior and retaining the TREE_OVERFLOW
8287 flag here would confuse later passes such as VRP. */
8289 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8290 tree type
, tree op0
)
8292 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8294 && TREE_CODE (res
) == INTEGER_CST
8295 && TREE_CODE (op0
) == INTEGER_CST
8296 && CONVERT_EXPR_CODE_P (code
))
8297 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8302 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8303 operands OP0 and OP1. LOC is the location of the resulting expression.
8304 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8305 Return the folded expression if folding is successful. Otherwise,
8306 return NULL_TREE. */
8308 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8309 tree arg0
, tree arg1
, tree op0
, tree op1
)
8313 /* We only do these simplifications if we are optimizing. */
8317 /* Check for things like (A || B) && (A || C). We can convert this
8318 to A || (B && C). Note that either operator can be any of the four
8319 truth and/or operations and the transformation will still be
8320 valid. Also note that we only care about order for the
8321 ANDIF and ORIF operators. If B contains side effects, this
8322 might change the truth-value of A. */
8323 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8324 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8325 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8326 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8327 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8328 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8330 tree a00
= TREE_OPERAND (arg0
, 0);
8331 tree a01
= TREE_OPERAND (arg0
, 1);
8332 tree a10
= TREE_OPERAND (arg1
, 0);
8333 tree a11
= TREE_OPERAND (arg1
, 1);
8334 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8335 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8336 && (code
== TRUTH_AND_EXPR
8337 || code
== TRUTH_OR_EXPR
));
8339 if (operand_equal_p (a00
, a10
, 0))
8340 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8341 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8342 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8343 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8344 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8345 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8346 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8347 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8349 /* This case if tricky because we must either have commutative
8350 operators or else A10 must not have side-effects. */
8352 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8353 && operand_equal_p (a01
, a11
, 0))
8354 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8355 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8359 /* See if we can build a range comparison. */
8360 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8363 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8364 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8366 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8368 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8371 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8372 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8374 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8376 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8379 /* Check for the possibility of merging component references. If our
8380 lhs is another similar operation, try to merge its rhs with our
8381 rhs. Then try to merge our lhs and rhs. */
8382 if (TREE_CODE (arg0
) == code
8383 && 0 != (tem
= fold_truthop (loc
, code
, type
,
8384 TREE_OPERAND (arg0
, 1), arg1
)))
8385 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8387 if ((tem
= fold_truthop (loc
, code
, type
, arg0
, arg1
)) != 0)
8393 /* Fold a binary expression of code CODE and type TYPE with operands
8394 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8395 Return the folded expression if folding is successful. Otherwise,
8396 return NULL_TREE. */
8399 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8401 enum tree_code compl_code
;
8403 if (code
== MIN_EXPR
)
8404 compl_code
= MAX_EXPR
;
8405 else if (code
== MAX_EXPR
)
8406 compl_code
= MIN_EXPR
;
8410 /* MIN (MAX (a, b), b) == b. */
8411 if (TREE_CODE (op0
) == compl_code
8412 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8413 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8415 /* MIN (MAX (b, a), b) == b. */
8416 if (TREE_CODE (op0
) == compl_code
8417 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8418 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8419 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8421 /* MIN (a, MAX (a, b)) == a. */
8422 if (TREE_CODE (op1
) == compl_code
8423 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8424 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8425 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8427 /* MIN (a, MAX (b, a)) == a. */
8428 if (TREE_CODE (op1
) == compl_code
8429 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8430 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8431 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8436 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8437 by changing CODE to reduce the magnitude of constants involved in
8438 ARG0 of the comparison.
8439 Returns a canonicalized comparison tree if a simplification was
8440 possible, otherwise returns NULL_TREE.
8441 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8442 valid if signed overflow is undefined. */
8445 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8446 tree arg0
, tree arg1
,
8447 bool *strict_overflow_p
)
8449 enum tree_code code0
= TREE_CODE (arg0
);
8450 tree t
, cst0
= NULL_TREE
;
8454 /* Match A +- CST code arg1 and CST code arg1. We can change the
8455 first form only if overflow is undefined. */
8456 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8457 /* In principle pointers also have undefined overflow behavior,
8458 but that causes problems elsewhere. */
8459 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8460 && (code0
== MINUS_EXPR
8461 || code0
== PLUS_EXPR
)
8462 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8463 || code0
== INTEGER_CST
))
8466 /* Identify the constant in arg0 and its sign. */
8467 if (code0
== INTEGER_CST
)
8470 cst0
= TREE_OPERAND (arg0
, 1);
8471 sgn0
= tree_int_cst_sgn (cst0
);
8473 /* Overflowed constants and zero will cause problems. */
8474 if (integer_zerop (cst0
)
8475 || TREE_OVERFLOW (cst0
))
8478 /* See if we can reduce the magnitude of the constant in
8479 arg0 by changing the comparison code. */
8480 if (code0
== INTEGER_CST
)
8482 /* CST <= arg1 -> CST-1 < arg1. */
8483 if (code
== LE_EXPR
&& sgn0
== 1)
8485 /* -CST < arg1 -> -CST-1 <= arg1. */
8486 else if (code
== LT_EXPR
&& sgn0
== -1)
8488 /* CST > arg1 -> CST-1 >= arg1. */
8489 else if (code
== GT_EXPR
&& sgn0
== 1)
8491 /* -CST >= arg1 -> -CST-1 > arg1. */
8492 else if (code
== GE_EXPR
&& sgn0
== -1)
8496 /* arg1 code' CST' might be more canonical. */
8501 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8503 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8505 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8506 else if (code
== GT_EXPR
8507 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8509 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8510 else if (code
== LE_EXPR
8511 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8513 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8514 else if (code
== GE_EXPR
8515 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8519 *strict_overflow_p
= true;
8522 /* Now build the constant reduced in magnitude. But not if that
8523 would produce one outside of its types range. */
8524 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8526 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8527 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8529 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8530 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8531 /* We cannot swap the comparison here as that would cause us to
8532 endlessly recurse. */
8535 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8536 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8537 if (code0
!= INTEGER_CST
)
8538 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8539 t
= fold_convert (TREE_TYPE (arg1
), t
);
8541 /* If swapping might yield to a more canonical form, do so. */
8543 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8545 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8548 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8549 overflow further. Try to decrease the magnitude of constants involved
8550 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8551 and put sole constants at the second argument position.
8552 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8555 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8556 tree arg0
, tree arg1
)
8559 bool strict_overflow_p
;
8560 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8561 "when reducing constant in comparison");
8563 /* Try canonicalization by simplifying arg0. */
8564 strict_overflow_p
= false;
8565 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8566 &strict_overflow_p
);
8569 if (strict_overflow_p
)
8570 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8574 /* Try canonicalization by simplifying arg1 using the swapped
8576 code
= swap_tree_comparison (code
);
8577 strict_overflow_p
= false;
8578 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8579 &strict_overflow_p
);
8580 if (t
&& strict_overflow_p
)
8581 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8585 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8586 space. This is used to avoid issuing overflow warnings for
8587 expressions like &p->x which can not wrap. */
8590 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8592 unsigned HOST_WIDE_INT offset_low
, total_low
;
8593 HOST_WIDE_INT size
, offset_high
, total_high
;
8595 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8601 if (offset
== NULL_TREE
)
8606 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8610 offset_low
= TREE_INT_CST_LOW (offset
);
8611 offset_high
= TREE_INT_CST_HIGH (offset
);
8614 if (add_double_with_sign (offset_low
, offset_high
,
8615 bitpos
/ BITS_PER_UNIT
, 0,
8616 &total_low
, &total_high
,
8620 if (total_high
!= 0)
8623 size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8627 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8629 if (TREE_CODE (base
) == ADDR_EXPR
)
8631 HOST_WIDE_INT base_size
;
8633 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8634 if (base_size
> 0 && size
< base_size
)
8638 return total_low
> (unsigned HOST_WIDE_INT
) size
;
8641 /* Subroutine of fold_binary. This routine performs all of the
8642 transformations that are common to the equality/inequality
8643 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8644 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8645 fold_binary should call fold_binary. Fold a comparison with
8646 tree code CODE and type TYPE with operands OP0 and OP1. Return
8647 the folded comparison or NULL_TREE. */
8650 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8653 tree arg0
, arg1
, tem
;
8658 STRIP_SIGN_NOPS (arg0
);
8659 STRIP_SIGN_NOPS (arg1
);
8661 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8662 if (tem
!= NULL_TREE
)
8665 /* If one arg is a real or integer constant, put it last. */
8666 if (tree_swap_operands_p (arg0
, arg1
, true))
8667 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8669 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8670 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8671 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8672 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8673 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8674 && (TREE_CODE (arg1
) == INTEGER_CST
8675 && !TREE_OVERFLOW (arg1
)))
8677 tree const1
= TREE_OPERAND (arg0
, 1);
8679 tree variable
= TREE_OPERAND (arg0
, 0);
8682 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8684 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8685 TREE_TYPE (arg1
), const2
, const1
);
8687 /* If the constant operation overflowed this can be
8688 simplified as a comparison against INT_MAX/INT_MIN. */
8689 if (TREE_CODE (lhs
) == INTEGER_CST
8690 && TREE_OVERFLOW (lhs
))
8692 int const1_sgn
= tree_int_cst_sgn (const1
);
8693 enum tree_code code2
= code
;
8695 /* Get the sign of the constant on the lhs if the
8696 operation were VARIABLE + CONST1. */
8697 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8698 const1_sgn
= -const1_sgn
;
8700 /* The sign of the constant determines if we overflowed
8701 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8702 Canonicalize to the INT_MIN overflow by swapping the comparison
8704 if (const1_sgn
== -1)
8705 code2
= swap_tree_comparison (code
);
8707 /* We now can look at the canonicalized case
8708 VARIABLE + 1 CODE2 INT_MIN
8709 and decide on the result. */
8710 if (code2
== LT_EXPR
8712 || code2
== EQ_EXPR
)
8713 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8714 else if (code2
== NE_EXPR
8716 || code2
== GT_EXPR
)
8717 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8720 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8721 && (TREE_CODE (lhs
) != INTEGER_CST
8722 || !TREE_OVERFLOW (lhs
)))
8724 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
8725 fold_overflow_warning ("assuming signed overflow does not occur "
8726 "when changing X +- C1 cmp C2 to "
8728 WARN_STRICT_OVERFLOW_COMPARISON
);
8729 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
8733 /* For comparisons of pointers we can decompose it to a compile time
8734 comparison of the base objects and the offsets into the object.
8735 This requires at least one operand being an ADDR_EXPR or a
8736 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8737 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8738 && (TREE_CODE (arg0
) == ADDR_EXPR
8739 || TREE_CODE (arg1
) == ADDR_EXPR
8740 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8741 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8743 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8744 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8745 enum machine_mode mode
;
8746 int volatilep
, unsignedp
;
8747 bool indirect_base0
= false, indirect_base1
= false;
8749 /* Get base and offset for the access. Strip ADDR_EXPR for
8750 get_inner_reference, but put it back by stripping INDIRECT_REF
8751 off the base object if possible. indirect_baseN will be true
8752 if baseN is not an address but refers to the object itself. */
8754 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8756 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8757 &bitsize
, &bitpos0
, &offset0
, &mode
,
8758 &unsignedp
, &volatilep
, false);
8759 if (TREE_CODE (base0
) == INDIRECT_REF
)
8760 base0
= TREE_OPERAND (base0
, 0);
8762 indirect_base0
= true;
8764 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8766 base0
= TREE_OPERAND (arg0
, 0);
8767 STRIP_SIGN_NOPS (base0
);
8768 if (TREE_CODE (base0
) == ADDR_EXPR
)
8770 base0
= TREE_OPERAND (base0
, 0);
8771 indirect_base0
= true;
8773 offset0
= TREE_OPERAND (arg0
, 1);
8777 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8779 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8780 &bitsize
, &bitpos1
, &offset1
, &mode
,
8781 &unsignedp
, &volatilep
, false);
8782 if (TREE_CODE (base1
) == INDIRECT_REF
)
8783 base1
= TREE_OPERAND (base1
, 0);
8785 indirect_base1
= true;
8787 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8789 base1
= TREE_OPERAND (arg1
, 0);
8790 STRIP_SIGN_NOPS (base1
);
8791 if (TREE_CODE (base1
) == ADDR_EXPR
)
8793 base1
= TREE_OPERAND (base1
, 0);
8794 indirect_base1
= true;
8796 offset1
= TREE_OPERAND (arg1
, 1);
8799 /* A local variable can never be pointed to by
8800 the default SSA name of an incoming parameter. */
8801 if ((TREE_CODE (arg0
) == ADDR_EXPR
8803 && TREE_CODE (base0
) == VAR_DECL
8804 && auto_var_in_fn_p (base0
, current_function_decl
)
8806 && TREE_CODE (base1
) == SSA_NAME
8807 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
8808 && SSA_NAME_IS_DEFAULT_DEF (base1
))
8809 || (TREE_CODE (arg1
) == ADDR_EXPR
8811 && TREE_CODE (base1
) == VAR_DECL
8812 && auto_var_in_fn_p (base1
, current_function_decl
)
8814 && TREE_CODE (base0
) == SSA_NAME
8815 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
8816 && SSA_NAME_IS_DEFAULT_DEF (base0
)))
8818 if (code
== NE_EXPR
)
8819 return constant_boolean_node (1, type
);
8820 else if (code
== EQ_EXPR
)
8821 return constant_boolean_node (0, type
);
8823 /* If we have equivalent bases we might be able to simplify. */
8824 else if (indirect_base0
== indirect_base1
8825 && operand_equal_p (base0
, base1
, 0))
8827 /* We can fold this expression to a constant if the non-constant
8828 offset parts are equal. */
8829 if ((offset0
== offset1
8830 || (offset0
&& offset1
8831 && operand_equal_p (offset0
, offset1
, 0)))
8834 || (indirect_base0
&& DECL_P (base0
))
8835 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8840 && bitpos0
!= bitpos1
8841 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8842 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8843 fold_overflow_warning (("assuming pointer wraparound does not "
8844 "occur when comparing P +- C1 with "
8846 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8851 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8853 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8855 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8857 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8859 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8861 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8865 /* We can simplify the comparison to a comparison of the variable
8866 offset parts if the constant offset parts are equal.
8867 Be careful to use signed size type here because otherwise we
8868 mess with array offsets in the wrong way. This is possible
8869 because pointer arithmetic is restricted to retain within an
8870 object and overflow on pointer differences is undefined as of
8871 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8872 else if (bitpos0
== bitpos1
8873 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8874 || (indirect_base0
&& DECL_P (base0
))
8875 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8877 /* By converting to signed size type we cover middle-end pointer
8878 arithmetic which operates on unsigned pointer types of size
8879 type size and ARRAY_REF offsets which are properly sign or
8880 zero extended from their type in case it is narrower than
8882 if (offset0
== NULL_TREE
)
8883 offset0
= build_int_cst (ssizetype
, 0);
8885 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8886 if (offset1
== NULL_TREE
)
8887 offset1
= build_int_cst (ssizetype
, 0);
8889 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
8893 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8894 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8895 fold_overflow_warning (("assuming pointer wraparound does not "
8896 "occur when comparing P +- C1 with "
8898 WARN_STRICT_OVERFLOW_COMPARISON
);
8900 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
8903 /* For non-equal bases we can simplify if they are addresses
8904 of local binding decls or constants. */
8905 else if (indirect_base0
&& indirect_base1
8906 /* We know that !operand_equal_p (base0, base1, 0)
8907 because the if condition was false. But make
8908 sure two decls are not the same. */
8910 && TREE_CODE (arg0
) == ADDR_EXPR
8911 && TREE_CODE (arg1
) == ADDR_EXPR
8912 && (((TREE_CODE (base0
) == VAR_DECL
8913 || TREE_CODE (base0
) == PARM_DECL
)
8914 && (targetm
.binds_local_p (base0
)
8915 || CONSTANT_CLASS_P (base1
)))
8916 || CONSTANT_CLASS_P (base0
))
8917 && (((TREE_CODE (base1
) == VAR_DECL
8918 || TREE_CODE (base1
) == PARM_DECL
)
8919 && (targetm
.binds_local_p (base1
)
8920 || CONSTANT_CLASS_P (base0
)))
8921 || CONSTANT_CLASS_P (base1
)))
8923 if (code
== EQ_EXPR
)
8924 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
8926 else if (code
== NE_EXPR
)
8927 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
8930 /* For equal offsets we can simplify to a comparison of the
8932 else if (bitpos0
== bitpos1
8934 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
8936 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
8937 && ((offset0
== offset1
)
8938 || (offset0
&& offset1
8939 && operand_equal_p (offset0
, offset1
, 0))))
8942 base0
= build_fold_addr_expr_loc (loc
, base0
);
8944 base1
= build_fold_addr_expr_loc (loc
, base1
);
8945 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
8949 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8950 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8951 the resulting offset is smaller in absolute value than the
8953 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8954 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8955 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8956 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
8957 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
8958 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
8959 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
8961 tree const1
= TREE_OPERAND (arg0
, 1);
8962 tree const2
= TREE_OPERAND (arg1
, 1);
8963 tree variable1
= TREE_OPERAND (arg0
, 0);
8964 tree variable2
= TREE_OPERAND (arg1
, 0);
8966 const char * const warnmsg
= G_("assuming signed overflow does not "
8967 "occur when combining constants around "
8970 /* Put the constant on the side where it doesn't overflow and is
8971 of lower absolute value than before. */
8972 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8973 ? MINUS_EXPR
: PLUS_EXPR
,
8975 if (!TREE_OVERFLOW (cst
)
8976 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
8978 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8979 return fold_build2_loc (loc
, code
, type
,
8981 fold_build2_loc (loc
,
8982 TREE_CODE (arg1
), TREE_TYPE (arg1
),
8986 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8987 ? MINUS_EXPR
: PLUS_EXPR
,
8989 if (!TREE_OVERFLOW (cst
)
8990 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
8992 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
8993 return fold_build2_loc (loc
, code
, type
,
8994 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
9000 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9001 signed arithmetic case. That form is created by the compiler
9002 often enough for folding it to be of value. One example is in
9003 computing loop trip counts after Operator Strength Reduction. */
9004 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9005 && TREE_CODE (arg0
) == MULT_EXPR
9006 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9007 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9008 && integer_zerop (arg1
))
9010 tree const1
= TREE_OPERAND (arg0
, 1);
9011 tree const2
= arg1
; /* zero */
9012 tree variable1
= TREE_OPERAND (arg0
, 0);
9013 enum tree_code cmp_code
= code
;
9015 /* Handle unfolded multiplication by zero. */
9016 if (integer_zerop (const1
))
9017 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9019 fold_overflow_warning (("assuming signed overflow does not occur when "
9020 "eliminating multiplication in comparison "
9022 WARN_STRICT_OVERFLOW_COMPARISON
);
9024 /* If const1 is negative we swap the sense of the comparison. */
9025 if (tree_int_cst_sgn (const1
) < 0)
9026 cmp_code
= swap_tree_comparison (cmp_code
);
9028 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9031 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9035 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9037 tree targ0
= strip_float_extensions (arg0
);
9038 tree targ1
= strip_float_extensions (arg1
);
9039 tree newtype
= TREE_TYPE (targ0
);
9041 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9042 newtype
= TREE_TYPE (targ1
);
9044 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9045 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9046 return fold_build2_loc (loc
, code
, type
,
9047 fold_convert_loc (loc
, newtype
, targ0
),
9048 fold_convert_loc (loc
, newtype
, targ1
));
9050 /* (-a) CMP (-b) -> b CMP a */
9051 if (TREE_CODE (arg0
) == NEGATE_EXPR
9052 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9053 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9054 TREE_OPERAND (arg0
, 0));
9056 if (TREE_CODE (arg1
) == REAL_CST
)
9058 REAL_VALUE_TYPE cst
;
9059 cst
= TREE_REAL_CST (arg1
);
9061 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9062 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9063 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9064 TREE_OPERAND (arg0
, 0),
9065 build_real (TREE_TYPE (arg1
),
9066 real_value_negate (&cst
)));
9068 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9069 /* a CMP (-0) -> a CMP 0 */
9070 if (REAL_VALUE_MINUS_ZERO (cst
))
9071 return fold_build2_loc (loc
, code
, type
, arg0
,
9072 build_real (TREE_TYPE (arg1
), dconst0
));
9074 /* x != NaN is always true, other ops are always false. */
9075 if (REAL_VALUE_ISNAN (cst
)
9076 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9078 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9079 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9082 /* Fold comparisons against infinity. */
9083 if (REAL_VALUE_ISINF (cst
)
9084 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9086 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9087 if (tem
!= NULL_TREE
)
9092 /* If this is a comparison of a real constant with a PLUS_EXPR
9093 or a MINUS_EXPR of a real constant, we can convert it into a
9094 comparison with a revised real constant as long as no overflow
9095 occurs when unsafe_math_optimizations are enabled. */
9096 if (flag_unsafe_math_optimizations
9097 && TREE_CODE (arg1
) == REAL_CST
9098 && (TREE_CODE (arg0
) == PLUS_EXPR
9099 || TREE_CODE (arg0
) == MINUS_EXPR
)
9100 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9101 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9102 ? MINUS_EXPR
: PLUS_EXPR
,
9103 arg1
, TREE_OPERAND (arg0
, 1)))
9104 && !TREE_OVERFLOW (tem
))
9105 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9107 /* Likewise, we can simplify a comparison of a real constant with
9108 a MINUS_EXPR whose first operand is also a real constant, i.e.
9109 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9110 floating-point types only if -fassociative-math is set. */
9111 if (flag_associative_math
9112 && TREE_CODE (arg1
) == REAL_CST
9113 && TREE_CODE (arg0
) == MINUS_EXPR
9114 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9115 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9117 && !TREE_OVERFLOW (tem
))
9118 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9119 TREE_OPERAND (arg0
, 1), tem
);
9121 /* Fold comparisons against built-in math functions. */
9122 if (TREE_CODE (arg1
) == REAL_CST
9123 && flag_unsafe_math_optimizations
9124 && ! flag_errno_math
)
9126 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9128 if (fcode
!= END_BUILTINS
)
9130 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9131 if (tem
!= NULL_TREE
)
9137 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9138 && CONVERT_EXPR_P (arg0
))
9140 /* If we are widening one operand of an integer comparison,
9141 see if the other operand is similarly being widened. Perhaps we
9142 can do the comparison in the narrower type. */
9143 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9147 /* Or if we are changing signedness. */
9148 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9153 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9154 constant, we can simplify it. */
9155 if (TREE_CODE (arg1
) == INTEGER_CST
9156 && (TREE_CODE (arg0
) == MIN_EXPR
9157 || TREE_CODE (arg0
) == MAX_EXPR
)
9158 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9160 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9165 /* Simplify comparison of something with itself. (For IEEE
9166 floating-point, we can only do some of these simplifications.) */
9167 if (operand_equal_p (arg0
, arg1
, 0))
9172 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9173 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9174 return constant_boolean_node (1, type
);
9179 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9180 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9181 return constant_boolean_node (1, type
);
9182 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9185 /* For NE, we can only do this simplification if integer
9186 or we don't honor IEEE floating point NaNs. */
9187 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9188 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9190 /* ... fall through ... */
9193 return constant_boolean_node (0, type
);
9199 /* If we are comparing an expression that just has comparisons
9200 of two integer values, arithmetic expressions of those comparisons,
9201 and constants, we can simplify it. There are only three cases
9202 to check: the two values can either be equal, the first can be
9203 greater, or the second can be greater. Fold the expression for
9204 those three values. Since each value must be 0 or 1, we have
9205 eight possibilities, each of which corresponds to the constant 0
9206 or 1 or one of the six possible comparisons.
9208 This handles common cases like (a > b) == 0 but also handles
9209 expressions like ((x > y) - (y > x)) > 0, which supposedly
9210 occur in macroized code. */
9212 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9214 tree cval1
= 0, cval2
= 0;
9217 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9218 /* Don't handle degenerate cases here; they should already
9219 have been handled anyway. */
9220 && cval1
!= 0 && cval2
!= 0
9221 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9222 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9223 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9224 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9225 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9226 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9227 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9229 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9230 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9232 /* We can't just pass T to eval_subst in case cval1 or cval2
9233 was the same as ARG1. */
9236 = fold_build2_loc (loc
, code
, type
,
9237 eval_subst (loc
, arg0
, cval1
, maxval
,
9241 = fold_build2_loc (loc
, code
, type
,
9242 eval_subst (loc
, arg0
, cval1
, maxval
,
9246 = fold_build2_loc (loc
, code
, type
,
9247 eval_subst (loc
, arg0
, cval1
, minval
,
9251 /* All three of these results should be 0 or 1. Confirm they are.
9252 Then use those values to select the proper code to use. */
9254 if (TREE_CODE (high_result
) == INTEGER_CST
9255 && TREE_CODE (equal_result
) == INTEGER_CST
9256 && TREE_CODE (low_result
) == INTEGER_CST
)
9258 /* Make a 3-bit mask with the high-order bit being the
9259 value for `>', the next for '=', and the low for '<'. */
9260 switch ((integer_onep (high_result
) * 4)
9261 + (integer_onep (equal_result
) * 2)
9262 + integer_onep (low_result
))
9266 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9287 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9292 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9293 SET_EXPR_LOCATION (tem
, loc
);
9296 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9301 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9302 into a single range test. */
9303 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9304 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9305 && TREE_CODE (arg1
) == INTEGER_CST
9306 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9307 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9308 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9309 && !TREE_OVERFLOW (arg1
))
9311 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9312 if (tem
!= NULL_TREE
)
9316 /* Fold ~X op ~Y as Y op X. */
9317 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9318 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9320 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9321 return fold_build2_loc (loc
, code
, type
,
9322 fold_convert_loc (loc
, cmp_type
,
9323 TREE_OPERAND (arg1
, 0)),
9324 TREE_OPERAND (arg0
, 0));
9327 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9328 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9329 && TREE_CODE (arg1
) == INTEGER_CST
)
9331 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9332 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9333 TREE_OPERAND (arg0
, 0),
9334 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9335 fold_convert_loc (loc
, cmp_type
, arg1
)));
9342 /* Subroutine of fold_binary. Optimize complex multiplications of the
9343 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9344 argument EXPR represents the expression "z" of type TYPE. */
9347 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9349 tree itype
= TREE_TYPE (type
);
9350 tree rpart
, ipart
, tem
;
9352 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9354 rpart
= TREE_OPERAND (expr
, 0);
9355 ipart
= TREE_OPERAND (expr
, 1);
9357 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9359 rpart
= TREE_REALPART (expr
);
9360 ipart
= TREE_IMAGPART (expr
);
9364 expr
= save_expr (expr
);
9365 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9366 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9369 rpart
= save_expr (rpart
);
9370 ipart
= save_expr (ipart
);
9371 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9372 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9373 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9374 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9375 build_zero_cst (itype
));
9379 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9380 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9381 guarantees that P and N have the same least significant log2(M) bits.
9382 N is not otherwise constrained. In particular, N is not normalized to
9383 0 <= N < M as is common. In general, the precise value of P is unknown.
9384 M is chosen as large as possible such that constant N can be determined.
9386 Returns M and sets *RESIDUE to N.
9388 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9389 account. This is not always possible due to PR 35705.
9392 static unsigned HOST_WIDE_INT
9393 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9394 bool allow_func_align
)
9396 enum tree_code code
;
9400 code
= TREE_CODE (expr
);
9401 if (code
== ADDR_EXPR
)
9403 unsigned int bitalign
;
9404 bitalign
= get_object_alignment_1 (TREE_OPERAND (expr
, 0), residue
);
9405 *residue
/= BITS_PER_UNIT
;
9406 return bitalign
/ BITS_PER_UNIT
;
9408 else if (code
== POINTER_PLUS_EXPR
)
9411 unsigned HOST_WIDE_INT modulus
;
9412 enum tree_code inner_code
;
9414 op0
= TREE_OPERAND (expr
, 0);
9416 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9419 op1
= TREE_OPERAND (expr
, 1);
9421 inner_code
= TREE_CODE (op1
);
9422 if (inner_code
== INTEGER_CST
)
9424 *residue
+= TREE_INT_CST_LOW (op1
);
9427 else if (inner_code
== MULT_EXPR
)
9429 op1
= TREE_OPERAND (op1
, 1);
9430 if (TREE_CODE (op1
) == INTEGER_CST
)
9432 unsigned HOST_WIDE_INT align
;
9434 /* Compute the greatest power-of-2 divisor of op1. */
9435 align
= TREE_INT_CST_LOW (op1
);
9438 /* If align is non-zero and less than *modulus, replace
9439 *modulus with align., If align is 0, then either op1 is 0
9440 or the greatest power-of-2 divisor of op1 doesn't fit in an
9441 unsigned HOST_WIDE_INT. In either case, no additional
9442 constraint is imposed. */
9444 modulus
= MIN (modulus
, align
);
9451 /* If we get here, we were unable to determine anything useful about the
9457 /* Fold a binary expression of code CODE and type TYPE with operands
9458 OP0 and OP1. LOC is the location of the resulting expression.
9459 Return the folded expression if folding is successful. Otherwise,
9460 return NULL_TREE. */
9463 fold_binary_loc (location_t loc
,
9464 enum tree_code code
, tree type
, tree op0
, tree op1
)
9466 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9467 tree arg0
, arg1
, tem
;
9468 tree t1
= NULL_TREE
;
9469 bool strict_overflow_p
;
9471 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9472 && TREE_CODE_LENGTH (code
) == 2
9474 && op1
!= NULL_TREE
);
9479 /* Strip any conversions that don't change the mode. This is
9480 safe for every expression, except for a comparison expression
9481 because its signedness is derived from its operands. So, in
9482 the latter case, only strip conversions that don't change the
9483 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9486 Note that this is done as an internal manipulation within the
9487 constant folder, in order to find the simplest representation
9488 of the arguments so that their form can be studied. In any
9489 cases, the appropriate type conversions should be put back in
9490 the tree that will get out of the constant folder. */
9492 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9494 STRIP_SIGN_NOPS (arg0
);
9495 STRIP_SIGN_NOPS (arg1
);
9503 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9504 constant but we can't do arithmetic on them. */
9505 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9506 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9507 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9508 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9509 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9510 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
9512 if (kind
== tcc_binary
)
9514 /* Make sure type and arg0 have the same saturating flag. */
9515 gcc_assert (TYPE_SATURATING (type
)
9516 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9517 tem
= const_binop (code
, arg0
, arg1
);
9519 else if (kind
== tcc_comparison
)
9520 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9524 if (tem
!= NULL_TREE
)
9526 if (TREE_TYPE (tem
) != type
)
9527 tem
= fold_convert_loc (loc
, type
, tem
);
9532 /* If this is a commutative operation, and ARG0 is a constant, move it
9533 to ARG1 to reduce the number of tests below. */
9534 if (commutative_tree_code (code
)
9535 && tree_swap_operands_p (arg0
, arg1
, true))
9536 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9538 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9540 First check for cases where an arithmetic operation is applied to a
9541 compound, conditional, or comparison operation. Push the arithmetic
9542 operation inside the compound or conditional to see if any folding
9543 can then be done. Convert comparison to conditional for this purpose.
9544 The also optimizes non-constant cases that used to be done in
9547 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9548 one of the operands is a comparison and the other is a comparison, a
9549 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9550 code below would make the expression more complex. Change it to a
9551 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9552 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9554 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9555 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9556 && ((truth_value_p (TREE_CODE (arg0
))
9557 && (truth_value_p (TREE_CODE (arg1
))
9558 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9559 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9560 || (truth_value_p (TREE_CODE (arg1
))
9561 && (truth_value_p (TREE_CODE (arg0
))
9562 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9563 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9565 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9566 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9569 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9570 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9572 if (code
== EQ_EXPR
)
9573 tem
= invert_truthvalue_loc (loc
, tem
);
9575 return fold_convert_loc (loc
, type
, tem
);
9578 if (TREE_CODE_CLASS (code
) == tcc_binary
9579 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9581 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9583 tem
= fold_build2_loc (loc
, code
, type
,
9584 fold_convert_loc (loc
, TREE_TYPE (op0
),
9585 TREE_OPERAND (arg0
, 1)), op1
);
9586 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9589 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9590 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9592 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9593 fold_convert_loc (loc
, TREE_TYPE (op1
),
9594 TREE_OPERAND (arg1
, 1)));
9595 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9599 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
9601 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9603 /*cond_first_p=*/1);
9604 if (tem
!= NULL_TREE
)
9608 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
9610 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9612 /*cond_first_p=*/0);
9613 if (tem
!= NULL_TREE
)
9621 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9622 if (TREE_CODE (arg0
) == ADDR_EXPR
9623 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9625 tree iref
= TREE_OPERAND (arg0
, 0);
9626 return fold_build2 (MEM_REF
, type
,
9627 TREE_OPERAND (iref
, 0),
9628 int_const_binop (PLUS_EXPR
, arg1
,
9629 TREE_OPERAND (iref
, 1)));
9632 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9633 if (TREE_CODE (arg0
) == ADDR_EXPR
9634 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9637 HOST_WIDE_INT coffset
;
9638 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9642 return fold_build2 (MEM_REF
, type
,
9643 build_fold_addr_expr (base
),
9644 int_const_binop (PLUS_EXPR
, arg1
,
9645 size_int (coffset
)));
9650 case POINTER_PLUS_EXPR
:
9651 /* 0 +p index -> (type)index */
9652 if (integer_zerop (arg0
))
9653 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
9655 /* PTR +p 0 -> PTR */
9656 if (integer_zerop (arg1
))
9657 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9659 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9660 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9661 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9662 return fold_convert_loc (loc
, type
,
9663 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9664 fold_convert_loc (loc
, sizetype
,
9666 fold_convert_loc (loc
, sizetype
,
9669 /* (PTR +p B) +p A -> PTR +p (B + A) */
9670 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9673 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
9674 tree arg00
= TREE_OPERAND (arg0
, 0);
9675 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9676 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
9677 return fold_convert_loc (loc
, type
,
9678 fold_build_pointer_plus_loc (loc
,
9682 /* PTR_CST +p CST -> CST1 */
9683 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9684 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
9685 fold_convert_loc (loc
, type
, arg1
));
9687 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9688 of the array. Loop optimizer sometimes produce this type of
9690 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9692 tem
= try_move_mult_to_index (loc
, arg0
,
9693 fold_convert_loc (loc
, sizetype
, arg1
));
9695 return fold_convert_loc (loc
, type
, tem
);
9701 /* A + (-B) -> A - B */
9702 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9703 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9704 fold_convert_loc (loc
, type
, arg0
),
9705 fold_convert_loc (loc
, type
,
9706 TREE_OPERAND (arg1
, 0)));
9707 /* (-A) + B -> B - A */
9708 if (TREE_CODE (arg0
) == NEGATE_EXPR
9709 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
9710 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9711 fold_convert_loc (loc
, type
, arg1
),
9712 fold_convert_loc (loc
, type
,
9713 TREE_OPERAND (arg0
, 0)));
9715 if (INTEGRAL_TYPE_P (type
))
9717 /* Convert ~A + 1 to -A. */
9718 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9719 && integer_onep (arg1
))
9720 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
9721 fold_convert_loc (loc
, type
,
9722 TREE_OPERAND (arg0
, 0)));
9725 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9726 && !TYPE_OVERFLOW_TRAPS (type
))
9728 tree tem
= TREE_OPERAND (arg0
, 0);
9731 if (operand_equal_p (tem
, arg1
, 0))
9733 t1
= build_int_cst_type (type
, -1);
9734 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
9739 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9740 && !TYPE_OVERFLOW_TRAPS (type
))
9742 tree tem
= TREE_OPERAND (arg1
, 0);
9745 if (operand_equal_p (arg0
, tem
, 0))
9747 t1
= build_int_cst_type (type
, -1);
9748 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
9752 /* X + (X / CST) * -CST is X % CST. */
9753 if (TREE_CODE (arg1
) == MULT_EXPR
9754 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9755 && operand_equal_p (arg0
,
9756 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9758 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9759 tree cst1
= TREE_OPERAND (arg1
, 1);
9760 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9762 if (sum
&& integer_zerop (sum
))
9763 return fold_convert_loc (loc
, type
,
9764 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9765 TREE_TYPE (arg0
), arg0
,
9770 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9771 same or one. Make sure type is not saturating.
9772 fold_plusminus_mult_expr will re-associate. */
9773 if ((TREE_CODE (arg0
) == MULT_EXPR
9774 || TREE_CODE (arg1
) == MULT_EXPR
)
9775 && !TYPE_SATURATING (type
)
9776 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9778 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9783 if (! FLOAT_TYPE_P (type
))
9785 if (integer_zerop (arg1
))
9786 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9788 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9789 with a constant, and the two constants have no bits in common,
9790 we should treat this as a BIT_IOR_EXPR since this may produce more
9792 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9793 && TREE_CODE (arg1
) == BIT_AND_EXPR
9794 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9795 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9796 && integer_zerop (const_binop (BIT_AND_EXPR
,
9797 TREE_OPERAND (arg0
, 1),
9798 TREE_OPERAND (arg1
, 1))))
9800 code
= BIT_IOR_EXPR
;
9804 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9805 (plus (plus (mult) (mult)) (foo)) so that we can
9806 take advantage of the factoring cases below. */
9807 if (TYPE_OVERFLOW_WRAPS (type
)
9808 && (((TREE_CODE (arg0
) == PLUS_EXPR
9809 || TREE_CODE (arg0
) == MINUS_EXPR
)
9810 && TREE_CODE (arg1
) == MULT_EXPR
)
9811 || ((TREE_CODE (arg1
) == PLUS_EXPR
9812 || TREE_CODE (arg1
) == MINUS_EXPR
)
9813 && TREE_CODE (arg0
) == MULT_EXPR
)))
9815 tree parg0
, parg1
, parg
, marg
;
9816 enum tree_code pcode
;
9818 if (TREE_CODE (arg1
) == MULT_EXPR
)
9819 parg
= arg0
, marg
= arg1
;
9821 parg
= arg1
, marg
= arg0
;
9822 pcode
= TREE_CODE (parg
);
9823 parg0
= TREE_OPERAND (parg
, 0);
9824 parg1
= TREE_OPERAND (parg
, 1);
9828 if (TREE_CODE (parg0
) == MULT_EXPR
9829 && TREE_CODE (parg1
) != MULT_EXPR
)
9830 return fold_build2_loc (loc
, pcode
, type
,
9831 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9832 fold_convert_loc (loc
, type
,
9834 fold_convert_loc (loc
, type
,
9836 fold_convert_loc (loc
, type
, parg1
));
9837 if (TREE_CODE (parg0
) != MULT_EXPR
9838 && TREE_CODE (parg1
) == MULT_EXPR
)
9840 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9841 fold_convert_loc (loc
, type
, parg0
),
9842 fold_build2_loc (loc
, pcode
, type
,
9843 fold_convert_loc (loc
, type
, marg
),
9844 fold_convert_loc (loc
, type
,
9850 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9851 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
9852 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9854 /* Likewise if the operands are reversed. */
9855 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
9856 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
9858 /* Convert X + -C into X - C. */
9859 if (TREE_CODE (arg1
) == REAL_CST
9860 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
9862 tem
= fold_negate_const (arg1
, type
);
9863 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
9864 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9865 fold_convert_loc (loc
, type
, arg0
),
9866 fold_convert_loc (loc
, type
, tem
));
9869 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9870 to __complex__ ( x, y ). This is not the same for SNaNs or
9871 if signed zeros are involved. */
9872 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9873 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
9874 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9876 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
9877 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
9878 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
9879 bool arg0rz
= false, arg0iz
= false;
9880 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
9881 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
9883 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
9884 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
9885 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
9887 tree rp
= arg1r
? arg1r
9888 : build1 (REALPART_EXPR
, rtype
, arg1
);
9889 tree ip
= arg0i
? arg0i
9890 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
9891 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9893 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
9895 tree rp
= arg0r
? arg0r
9896 : build1 (REALPART_EXPR
, rtype
, arg0
);
9897 tree ip
= arg1i
? arg1i
9898 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
9899 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
9904 if (flag_unsafe_math_optimizations
9905 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
9906 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
9907 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
9910 /* Convert x+x into x*2.0. */
9911 if (operand_equal_p (arg0
, arg1
, 0)
9912 && SCALAR_FLOAT_TYPE_P (type
))
9913 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
9914 build_real (type
, dconst2
));
9916 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9917 We associate floats only if the user has specified
9918 -fassociative-math. */
9919 if (flag_associative_math
9920 && TREE_CODE (arg1
) == PLUS_EXPR
9921 && TREE_CODE (arg0
) != MULT_EXPR
)
9923 tree tree10
= TREE_OPERAND (arg1
, 0);
9924 tree tree11
= TREE_OPERAND (arg1
, 1);
9925 if (TREE_CODE (tree11
) == MULT_EXPR
9926 && TREE_CODE (tree10
) == MULT_EXPR
)
9929 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
9930 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
9933 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9934 We associate floats only if the user has specified
9935 -fassociative-math. */
9936 if (flag_associative_math
9937 && TREE_CODE (arg0
) == PLUS_EXPR
9938 && TREE_CODE (arg1
) != MULT_EXPR
)
9940 tree tree00
= TREE_OPERAND (arg0
, 0);
9941 tree tree01
= TREE_OPERAND (arg0
, 1);
9942 if (TREE_CODE (tree01
) == MULT_EXPR
9943 && TREE_CODE (tree00
) == MULT_EXPR
)
9946 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
9947 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
9953 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9954 is a rotate of A by C1 bits. */
9955 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9956 is a rotate of A by B bits. */
9958 enum tree_code code0
, code1
;
9960 code0
= TREE_CODE (arg0
);
9961 code1
= TREE_CODE (arg1
);
9962 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
9963 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
9964 && operand_equal_p (TREE_OPERAND (arg0
, 0),
9965 TREE_OPERAND (arg1
, 0), 0)
9966 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9967 TYPE_UNSIGNED (rtype
))
9968 /* Only create rotates in complete modes. Other cases are not
9969 expanded properly. */
9970 && TYPE_PRECISION (rtype
) == GET_MODE_PRECISION (TYPE_MODE (rtype
)))
9972 tree tree01
, tree11
;
9973 enum tree_code code01
, code11
;
9975 tree01
= TREE_OPERAND (arg0
, 1);
9976 tree11
= TREE_OPERAND (arg1
, 1);
9977 STRIP_NOPS (tree01
);
9978 STRIP_NOPS (tree11
);
9979 code01
= TREE_CODE (tree01
);
9980 code11
= TREE_CODE (tree11
);
9981 if (code01
== INTEGER_CST
9982 && code11
== INTEGER_CST
9983 && TREE_INT_CST_HIGH (tree01
) == 0
9984 && TREE_INT_CST_HIGH (tree11
) == 0
9985 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
9986 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
9988 tem
= build2_loc (loc
, LROTATE_EXPR
,
9989 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
9990 TREE_OPERAND (arg0
, 0),
9991 code0
== LSHIFT_EXPR
? tree01
: tree11
);
9992 return fold_convert_loc (loc
, type
, tem
);
9994 else if (code11
== MINUS_EXPR
)
9996 tree tree110
, tree111
;
9997 tree110
= TREE_OPERAND (tree11
, 0);
9998 tree111
= TREE_OPERAND (tree11
, 1);
9999 STRIP_NOPS (tree110
);
10000 STRIP_NOPS (tree111
);
10001 if (TREE_CODE (tree110
) == INTEGER_CST
10002 && 0 == compare_tree_int (tree110
,
10004 (TREE_TYPE (TREE_OPERAND
10006 && operand_equal_p (tree01
, tree111
, 0))
10008 fold_convert_loc (loc
, type
,
10009 build2 ((code0
== LSHIFT_EXPR
10012 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10013 TREE_OPERAND (arg0
, 0), tree01
));
10015 else if (code01
== MINUS_EXPR
)
10017 tree tree010
, tree011
;
10018 tree010
= TREE_OPERAND (tree01
, 0);
10019 tree011
= TREE_OPERAND (tree01
, 1);
10020 STRIP_NOPS (tree010
);
10021 STRIP_NOPS (tree011
);
10022 if (TREE_CODE (tree010
) == INTEGER_CST
10023 && 0 == compare_tree_int (tree010
,
10025 (TREE_TYPE (TREE_OPERAND
10027 && operand_equal_p (tree11
, tree011
, 0))
10028 return fold_convert_loc
10030 build2 ((code0
!= LSHIFT_EXPR
10033 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10034 TREE_OPERAND (arg0
, 0), tree11
));
10040 /* In most languages, can't associate operations on floats through
10041 parentheses. Rather than remember where the parentheses were, we
10042 don't associate floats at all, unless the user has specified
10043 -fassociative-math.
10044 And, we need to make sure type is not saturating. */
10046 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10047 && !TYPE_SATURATING (type
))
10049 tree var0
, con0
, lit0
, minus_lit0
;
10050 tree var1
, con1
, lit1
, minus_lit1
;
10053 /* Split both trees into variables, constants, and literals. Then
10054 associate each group together, the constants with literals,
10055 then the result with variables. This increases the chances of
10056 literals being recombined later and of generating relocatable
10057 expressions for the sum of a constant and literal. */
10058 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10059 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10060 code
== MINUS_EXPR
);
10062 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10063 if (code
== MINUS_EXPR
)
10066 /* With undefined overflow we can only associate constants with one
10067 variable, and constants whose association doesn't overflow. */
10068 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10069 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10076 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10077 tmp0
= TREE_OPERAND (tmp0
, 0);
10078 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10079 tmp1
= TREE_OPERAND (tmp1
, 0);
10080 /* The only case we can still associate with two variables
10081 is if they are the same, modulo negation. */
10082 if (!operand_equal_p (tmp0
, tmp1
, 0))
10086 if (ok
&& lit0
&& lit1
)
10088 tree tmp0
= fold_convert (type
, lit0
);
10089 tree tmp1
= fold_convert (type
, lit1
);
10091 if (!TREE_OVERFLOW (tmp0
) && !TREE_OVERFLOW (tmp1
)
10092 && TREE_OVERFLOW (fold_build2 (code
, type
, tmp0
, tmp1
)))
10097 /* Only do something if we found more than two objects. Otherwise,
10098 nothing has changed and we risk infinite recursion. */
10100 && (2 < ((var0
!= 0) + (var1
!= 0)
10101 + (con0
!= 0) + (con1
!= 0)
10102 + (lit0
!= 0) + (lit1
!= 0)
10103 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10105 var0
= associate_trees (loc
, var0
, var1
, code
, type
);
10106 con0
= associate_trees (loc
, con0
, con1
, code
, type
);
10107 lit0
= associate_trees (loc
, lit0
, lit1
, code
, type
);
10108 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
, code
, type
);
10110 /* Preserve the MINUS_EXPR if the negative part of the literal is
10111 greater than the positive part. Otherwise, the multiplicative
10112 folding code (i.e extract_muldiv) may be fooled in case
10113 unsigned constants are subtracted, like in the following
10114 example: ((X*2 + 4) - 8U)/2. */
10115 if (minus_lit0
&& lit0
)
10117 if (TREE_CODE (lit0
) == INTEGER_CST
10118 && TREE_CODE (minus_lit0
) == INTEGER_CST
10119 && tree_int_cst_lt (lit0
, minus_lit0
))
10121 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10127 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10136 fold_convert_loc (loc
, type
,
10137 associate_trees (loc
, var0
, minus_lit0
,
10138 MINUS_EXPR
, type
));
10141 con0
= associate_trees (loc
, con0
, minus_lit0
,
10144 fold_convert_loc (loc
, type
,
10145 associate_trees (loc
, var0
, con0
,
10150 con0
= associate_trees (loc
, con0
, lit0
, code
, type
);
10152 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10160 /* Pointer simplifications for subtraction, simple reassociations. */
10161 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10163 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10164 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10165 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10167 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10168 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10169 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10170 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10171 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10172 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10174 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10177 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10178 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10180 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10181 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10182 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10183 fold_convert_loc (loc
, type
, arg1
));
10185 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10188 /* A - (-B) -> A + B */
10189 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10190 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10191 fold_convert_loc (loc
, type
,
10192 TREE_OPERAND (arg1
, 0)));
10193 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10194 if (TREE_CODE (arg0
) == NEGATE_EXPR
10195 && (FLOAT_TYPE_P (type
)
10196 || INTEGRAL_TYPE_P (type
))
10197 && negate_expr_p (arg1
)
10198 && reorder_operands_p (arg0
, arg1
))
10199 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10200 fold_convert_loc (loc
, type
,
10201 negate_expr (arg1
)),
10202 fold_convert_loc (loc
, type
,
10203 TREE_OPERAND (arg0
, 0)));
10204 /* Convert -A - 1 to ~A. */
10205 if (INTEGRAL_TYPE_P (type
)
10206 && TREE_CODE (arg0
) == NEGATE_EXPR
10207 && integer_onep (arg1
)
10208 && !TYPE_OVERFLOW_TRAPS (type
))
10209 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10210 fold_convert_loc (loc
, type
,
10211 TREE_OPERAND (arg0
, 0)));
10213 /* Convert -1 - A to ~A. */
10214 if (INTEGRAL_TYPE_P (type
)
10215 && integer_all_onesp (arg0
))
10216 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10219 /* X - (X / CST) * CST is X % CST. */
10220 if (INTEGRAL_TYPE_P (type
)
10221 && TREE_CODE (arg1
) == MULT_EXPR
10222 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10223 && operand_equal_p (arg0
,
10224 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10225 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10226 TREE_OPERAND (arg1
, 1), 0))
10228 fold_convert_loc (loc
, type
,
10229 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10230 arg0
, TREE_OPERAND (arg1
, 1)));
10232 if (! FLOAT_TYPE_P (type
))
10234 if (integer_zerop (arg0
))
10235 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10236 if (integer_zerop (arg1
))
10237 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10239 /* Fold A - (A & B) into ~B & A. */
10240 if (!TREE_SIDE_EFFECTS (arg0
)
10241 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10243 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10245 tree arg10
= fold_convert_loc (loc
, type
,
10246 TREE_OPERAND (arg1
, 0));
10247 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10248 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10250 fold_convert_loc (loc
, type
, arg0
));
10252 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10254 tree arg11
= fold_convert_loc (loc
,
10255 type
, TREE_OPERAND (arg1
, 1));
10256 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10257 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10259 fold_convert_loc (loc
, type
, arg0
));
10263 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10264 any power of 2 minus 1. */
10265 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10266 && TREE_CODE (arg1
) == BIT_AND_EXPR
10267 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10268 TREE_OPERAND (arg1
, 0), 0))
10270 tree mask0
= TREE_OPERAND (arg0
, 1);
10271 tree mask1
= TREE_OPERAND (arg1
, 1);
10272 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10274 if (operand_equal_p (tem
, mask1
, 0))
10276 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10277 TREE_OPERAND (arg0
, 0), mask1
);
10278 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10283 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10284 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10285 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10287 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10288 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10289 (-ARG1 + ARG0) reduces to -ARG1. */
10290 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10291 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10293 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10294 __complex__ ( x, -y ). This is not the same for SNaNs or if
10295 signed zeros are involved. */
10296 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10297 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10298 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10300 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10301 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10302 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10303 bool arg0rz
= false, arg0iz
= false;
10304 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10305 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10307 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10308 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10309 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10311 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10313 : build1 (REALPART_EXPR
, rtype
, arg1
));
10314 tree ip
= arg0i
? arg0i
10315 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10316 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10318 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10320 tree rp
= arg0r
? arg0r
10321 : build1 (REALPART_EXPR
, rtype
, arg0
);
10322 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10324 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10325 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10330 /* Fold &x - &x. This can happen from &x.foo - &x.
10331 This is unsafe for certain floats even in non-IEEE formats.
10332 In IEEE, it is unsafe because it does wrong for NaNs.
10333 Also note that operand_equal_p is always false if an operand
10336 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10337 && operand_equal_p (arg0
, arg1
, 0))
10338 return build_zero_cst (type
);
10340 /* A - B -> A + (-B) if B is easily negatable. */
10341 if (negate_expr_p (arg1
)
10342 && ((FLOAT_TYPE_P (type
)
10343 /* Avoid this transformation if B is a positive REAL_CST. */
10344 && (TREE_CODE (arg1
) != REAL_CST
10345 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10346 || INTEGRAL_TYPE_P (type
)))
10347 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10348 fold_convert_loc (loc
, type
, arg0
),
10349 fold_convert_loc (loc
, type
,
10350 negate_expr (arg1
)));
10352 /* Try folding difference of addresses. */
10354 HOST_WIDE_INT diff
;
10356 if ((TREE_CODE (arg0
) == ADDR_EXPR
10357 || TREE_CODE (arg1
) == ADDR_EXPR
)
10358 && ptr_difference_const (arg0
, arg1
, &diff
))
10359 return build_int_cst_type (type
, diff
);
10362 /* Fold &a[i] - &a[j] to i-j. */
10363 if (TREE_CODE (arg0
) == ADDR_EXPR
10364 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10365 && TREE_CODE (arg1
) == ADDR_EXPR
10366 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10368 tree aref0
= TREE_OPERAND (arg0
, 0);
10369 tree aref1
= TREE_OPERAND (arg1
, 0);
10370 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
10371 TREE_OPERAND (aref1
, 0), 0))
10373 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
10374 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
10375 tree esz
= array_ref_element_size (aref0
);
10376 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
10377 return fold_build2_loc (loc
, MULT_EXPR
, type
, diff
,
10378 fold_convert_loc (loc
, type
, esz
));
10383 if (FLOAT_TYPE_P (type
)
10384 && flag_unsafe_math_optimizations
10385 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10386 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10387 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10390 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10391 same or one. Make sure type is not saturating.
10392 fold_plusminus_mult_expr will re-associate. */
10393 if ((TREE_CODE (arg0
) == MULT_EXPR
10394 || TREE_CODE (arg1
) == MULT_EXPR
)
10395 && !TYPE_SATURATING (type
)
10396 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10398 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10406 /* (-A) * (-B) -> A * B */
10407 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10408 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10409 fold_convert_loc (loc
, type
,
10410 TREE_OPERAND (arg0
, 0)),
10411 fold_convert_loc (loc
, type
,
10412 negate_expr (arg1
)));
10413 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10414 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10415 fold_convert_loc (loc
, type
,
10416 negate_expr (arg0
)),
10417 fold_convert_loc (loc
, type
,
10418 TREE_OPERAND (arg1
, 0)));
10420 if (! FLOAT_TYPE_P (type
))
10422 if (integer_zerop (arg1
))
10423 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10424 if (integer_onep (arg1
))
10425 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10426 /* Transform x * -1 into -x. Make sure to do the negation
10427 on the original operand with conversions not stripped
10428 because we can only strip non-sign-changing conversions. */
10429 if (integer_all_onesp (arg1
))
10430 return fold_convert_loc (loc
, type
, negate_expr (op0
));
10431 /* Transform x * -C into -x * C if x is easily negatable. */
10432 if (TREE_CODE (arg1
) == INTEGER_CST
10433 && tree_int_cst_sgn (arg1
) == -1
10434 && negate_expr_p (arg0
)
10435 && (tem
= negate_expr (arg1
)) != arg1
10436 && !TREE_OVERFLOW (tem
))
10437 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10438 fold_convert_loc (loc
, type
,
10439 negate_expr (arg0
)),
10442 /* (a * (1 << b)) is (a << b) */
10443 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10444 && integer_onep (TREE_OPERAND (arg1
, 0)))
10445 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10446 TREE_OPERAND (arg1
, 1));
10447 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10448 && integer_onep (TREE_OPERAND (arg0
, 0)))
10449 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10450 TREE_OPERAND (arg0
, 1));
10452 /* (A + A) * C -> A * 2 * C */
10453 if (TREE_CODE (arg0
) == PLUS_EXPR
10454 && TREE_CODE (arg1
) == INTEGER_CST
10455 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10456 TREE_OPERAND (arg0
, 1), 0))
10457 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10458 omit_one_operand_loc (loc
, type
,
10459 TREE_OPERAND (arg0
, 0),
10460 TREE_OPERAND (arg0
, 1)),
10461 fold_build2_loc (loc
, MULT_EXPR
, type
,
10462 build_int_cst (type
, 2) , arg1
));
10464 strict_overflow_p
= false;
10465 if (TREE_CODE (arg1
) == INTEGER_CST
10466 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10467 &strict_overflow_p
)))
10469 if (strict_overflow_p
)
10470 fold_overflow_warning (("assuming signed overflow does not "
10471 "occur when simplifying "
10473 WARN_STRICT_OVERFLOW_MISC
);
10474 return fold_convert_loc (loc
, type
, tem
);
10477 /* Optimize z * conj(z) for integer complex numbers. */
10478 if (TREE_CODE (arg0
) == CONJ_EXPR
10479 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10480 return fold_mult_zconjz (loc
, type
, arg1
);
10481 if (TREE_CODE (arg1
) == CONJ_EXPR
10482 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10483 return fold_mult_zconjz (loc
, type
, arg0
);
10487 /* Maybe fold x * 0 to 0. The expressions aren't the same
10488 when x is NaN, since x * 0 is also NaN. Nor are they the
10489 same in modes with signed zeros, since multiplying a
10490 negative value by 0 gives -0, not +0. */
10491 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10492 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10493 && real_zerop (arg1
))
10494 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10495 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10496 Likewise for complex arithmetic with signed zeros. */
10497 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10498 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10499 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10500 && real_onep (arg1
))
10501 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10503 /* Transform x * -1.0 into -x. */
10504 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10505 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10506 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10507 && real_minus_onep (arg1
))
10508 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
10510 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10511 the result for floating point types due to rounding so it is applied
10512 only if -fassociative-math was specify. */
10513 if (flag_associative_math
10514 && TREE_CODE (arg0
) == RDIV_EXPR
10515 && TREE_CODE (arg1
) == REAL_CST
10516 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10518 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10521 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10522 TREE_OPERAND (arg0
, 1));
10525 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10526 if (operand_equal_p (arg0
, arg1
, 0))
10528 tree tem
= fold_strip_sign_ops (arg0
);
10529 if (tem
!= NULL_TREE
)
10531 tem
= fold_convert_loc (loc
, type
, tem
);
10532 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10536 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10537 This is not the same for NaNs or if signed zeros are
10539 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10540 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10541 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10542 && TREE_CODE (arg1
) == COMPLEX_CST
10543 && real_zerop (TREE_REALPART (arg1
)))
10545 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10546 if (real_onep (TREE_IMAGPART (arg1
)))
10548 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10549 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10551 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10552 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10554 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10555 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10556 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10560 /* Optimize z * conj(z) for floating point complex numbers.
10561 Guarded by flag_unsafe_math_optimizations as non-finite
10562 imaginary components don't produce scalar results. */
10563 if (flag_unsafe_math_optimizations
10564 && TREE_CODE (arg0
) == CONJ_EXPR
10565 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10566 return fold_mult_zconjz (loc
, type
, arg1
);
10567 if (flag_unsafe_math_optimizations
10568 && TREE_CODE (arg1
) == CONJ_EXPR
10569 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10570 return fold_mult_zconjz (loc
, type
, arg0
);
10572 if (flag_unsafe_math_optimizations
)
10574 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10575 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10577 /* Optimizations of root(...)*root(...). */
10578 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10581 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10582 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10584 /* Optimize sqrt(x)*sqrt(x) as x. */
10585 if (BUILTIN_SQRT_P (fcode0
)
10586 && operand_equal_p (arg00
, arg10
, 0)
10587 && ! HONOR_SNANS (TYPE_MODE (type
)))
10590 /* Optimize root(x)*root(y) as root(x*y). */
10591 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10592 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10593 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10596 /* Optimize expN(x)*expN(y) as expN(x+y). */
10597 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10599 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10600 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10601 CALL_EXPR_ARG (arg0
, 0),
10602 CALL_EXPR_ARG (arg1
, 0));
10603 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10606 /* Optimizations of pow(...)*pow(...). */
10607 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10608 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10609 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10611 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10612 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10613 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10614 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10616 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10617 if (operand_equal_p (arg01
, arg11
, 0))
10619 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10620 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10622 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10625 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10626 if (operand_equal_p (arg00
, arg10
, 0))
10628 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10629 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10631 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10635 /* Optimize tan(x)*cos(x) as sin(x). */
10636 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10637 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10638 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10639 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10640 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10641 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10642 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10643 CALL_EXPR_ARG (arg1
, 0), 0))
10645 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10647 if (sinfn
!= NULL_TREE
)
10648 return build_call_expr_loc (loc
, sinfn
, 1,
10649 CALL_EXPR_ARG (arg0
, 0));
10652 /* Optimize x*pow(x,c) as pow(x,c+1). */
10653 if (fcode1
== BUILT_IN_POW
10654 || fcode1
== BUILT_IN_POWF
10655 || fcode1
== BUILT_IN_POWL
)
10657 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10658 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10659 if (TREE_CODE (arg11
) == REAL_CST
10660 && !TREE_OVERFLOW (arg11
)
10661 && operand_equal_p (arg0
, arg10
, 0))
10663 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10667 c
= TREE_REAL_CST (arg11
);
10668 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10669 arg
= build_real (type
, c
);
10670 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10674 /* Optimize pow(x,c)*x as pow(x,c+1). */
10675 if (fcode0
== BUILT_IN_POW
10676 || fcode0
== BUILT_IN_POWF
10677 || fcode0
== BUILT_IN_POWL
)
10679 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10680 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10681 if (TREE_CODE (arg01
) == REAL_CST
10682 && !TREE_OVERFLOW (arg01
)
10683 && operand_equal_p (arg1
, arg00
, 0))
10685 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10689 c
= TREE_REAL_CST (arg01
);
10690 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10691 arg
= build_real (type
, c
);
10692 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10696 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10697 if (!in_gimple_form
10699 && operand_equal_p (arg0
, arg1
, 0))
10701 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10705 tree arg
= build_real (type
, dconst2
);
10706 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10715 if (integer_all_onesp (arg1
))
10716 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10717 if (integer_zerop (arg1
))
10718 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10719 if (operand_equal_p (arg0
, arg1
, 0))
10720 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10722 /* ~X | X is -1. */
10723 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10724 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10726 t1
= build_zero_cst (type
);
10727 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10728 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10731 /* X | ~X is -1. */
10732 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10733 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10735 t1
= build_zero_cst (type
);
10736 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10737 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10740 /* Canonicalize (X & C1) | C2. */
10741 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10742 && TREE_CODE (arg1
) == INTEGER_CST
10743 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10745 unsigned HOST_WIDE_INT hi1
, lo1
, hi2
, lo2
, hi3
, lo3
, mlo
, mhi
;
10746 int width
= TYPE_PRECISION (type
), w
;
10747 hi1
= TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1));
10748 lo1
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
10749 hi2
= TREE_INT_CST_HIGH (arg1
);
10750 lo2
= TREE_INT_CST_LOW (arg1
);
10752 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10753 if ((hi1
& hi2
) == hi1
&& (lo1
& lo2
) == lo1
)
10754 return omit_one_operand_loc (loc
, type
, arg1
,
10755 TREE_OPERAND (arg0
, 0));
10757 if (width
> HOST_BITS_PER_WIDE_INT
)
10759 mhi
= (unsigned HOST_WIDE_INT
) -1
10760 >> (2 * HOST_BITS_PER_WIDE_INT
- width
);
10766 mlo
= (unsigned HOST_WIDE_INT
) -1
10767 >> (HOST_BITS_PER_WIDE_INT
- width
);
10770 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10771 if ((~(hi1
| hi2
) & mhi
) == 0 && (~(lo1
| lo2
) & mlo
) == 0)
10772 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10773 TREE_OPERAND (arg0
, 0), arg1
);
10775 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10776 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10777 mode which allows further optimizations. */
10784 for (w
= BITS_PER_UNIT
;
10785 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
10788 unsigned HOST_WIDE_INT mask
10789 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
10790 if (((lo1
| lo2
) & mask
) == mask
10791 && (lo1
& ~mask
) == 0 && hi1
== 0)
10798 if (hi3
!= hi1
|| lo3
!= lo1
)
10799 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10800 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10801 TREE_OPERAND (arg0
, 0),
10802 build_int_cst_wide (type
,
10807 /* (X & Y) | Y is (X, Y). */
10808 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10809 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10810 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
10811 /* (X & Y) | X is (Y, X). */
10812 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10813 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10814 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10815 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
10816 /* X | (X & Y) is (Y, X). */
10817 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10818 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10819 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10820 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
10821 /* X | (Y & X) is (Y, X). */
10822 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10823 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10824 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10825 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
10827 /* (X & ~Y) | (~X & Y) is X ^ Y */
10828 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10829 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10831 tree a0
, a1
, l0
, l1
, n0
, n1
;
10833 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10834 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10836 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10837 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10839 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
10840 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
10842 if ((operand_equal_p (n0
, a0
, 0)
10843 && operand_equal_p (n1
, a1
, 0))
10844 || (operand_equal_p (n0
, a1
, 0)
10845 && operand_equal_p (n1
, a0
, 0)))
10846 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
10849 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
10850 if (t1
!= NULL_TREE
)
10853 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10855 This results in more efficient code for machines without a NAND
10856 instruction. Combine will canonicalize to the first form
10857 which will allow use of NAND instructions provided by the
10858 backend if they exist. */
10859 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10860 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10863 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10864 build2 (BIT_AND_EXPR
, type
,
10865 fold_convert_loc (loc
, type
,
10866 TREE_OPERAND (arg0
, 0)),
10867 fold_convert_loc (loc
, type
,
10868 TREE_OPERAND (arg1
, 0))));
10871 /* See if this can be simplified into a rotate first. If that
10872 is unsuccessful continue in the association code. */
10876 if (integer_zerop (arg1
))
10877 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10878 if (integer_all_onesp (arg1
))
10879 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
10880 if (operand_equal_p (arg0
, arg1
, 0))
10881 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
10883 /* ~X ^ X is -1. */
10884 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10885 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10887 t1
= build_zero_cst (type
);
10888 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10889 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10892 /* X ^ ~X is -1. */
10893 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10894 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10896 t1
= build_zero_cst (type
);
10897 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10898 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10901 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10902 with a constant, and the two constants have no bits in common,
10903 we should treat this as a BIT_IOR_EXPR since this may produce more
10904 simplifications. */
10905 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10906 && TREE_CODE (arg1
) == BIT_AND_EXPR
10907 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
10908 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
10909 && integer_zerop (const_binop (BIT_AND_EXPR
,
10910 TREE_OPERAND (arg0
, 1),
10911 TREE_OPERAND (arg1
, 1))))
10913 code
= BIT_IOR_EXPR
;
10917 /* (X | Y) ^ X -> Y & ~ X*/
10918 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10919 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10921 tree t2
= TREE_OPERAND (arg0
, 1);
10922 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10924 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10925 fold_convert_loc (loc
, type
, t2
),
10926 fold_convert_loc (loc
, type
, t1
));
10930 /* (Y | X) ^ X -> Y & ~ X*/
10931 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
10932 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10934 tree t2
= TREE_OPERAND (arg0
, 0);
10935 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
10937 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10938 fold_convert_loc (loc
, type
, t2
),
10939 fold_convert_loc (loc
, type
, t1
));
10943 /* X ^ (X | Y) -> Y & ~ X*/
10944 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10945 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
10947 tree t2
= TREE_OPERAND (arg1
, 1);
10948 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10950 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10951 fold_convert_loc (loc
, type
, t2
),
10952 fold_convert_loc (loc
, type
, t1
));
10956 /* X ^ (Y | X) -> Y & ~ X*/
10957 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
10958 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
10960 tree t2
= TREE_OPERAND (arg1
, 0);
10961 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
10963 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10964 fold_convert_loc (loc
, type
, t2
),
10965 fold_convert_loc (loc
, type
, t1
));
10969 /* Convert ~X ^ ~Y to X ^ Y. */
10970 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10971 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
10972 return fold_build2_loc (loc
, code
, type
,
10973 fold_convert_loc (loc
, type
,
10974 TREE_OPERAND (arg0
, 0)),
10975 fold_convert_loc (loc
, type
,
10976 TREE_OPERAND (arg1
, 0)));
10978 /* Convert ~X ^ C to X ^ ~C. */
10979 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10980 && TREE_CODE (arg1
) == INTEGER_CST
)
10981 return fold_build2_loc (loc
, code
, type
,
10982 fold_convert_loc (loc
, type
,
10983 TREE_OPERAND (arg0
, 0)),
10984 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
10986 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10987 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10988 && integer_onep (TREE_OPERAND (arg0
, 1))
10989 && integer_onep (arg1
))
10990 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
10991 build_int_cst (TREE_TYPE (arg0
), 0));
10993 /* Fold (X & Y) ^ Y as ~X & Y. */
10994 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10995 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10997 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10998 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10999 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11000 fold_convert_loc (loc
, type
, arg1
));
11002 /* Fold (X & Y) ^ X as ~Y & X. */
11003 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11004 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11005 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11007 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11008 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11009 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11010 fold_convert_loc (loc
, type
, arg1
));
11012 /* Fold X ^ (X & Y) as X & ~Y. */
11013 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11014 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11016 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11017 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11018 fold_convert_loc (loc
, type
, arg0
),
11019 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11021 /* Fold X ^ (Y & X) as ~Y & X. */
11022 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11023 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11024 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11026 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11027 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11028 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11029 fold_convert_loc (loc
, type
, arg0
));
11032 /* See if this can be simplified into a rotate first. If that
11033 is unsuccessful continue in the association code. */
11037 if (integer_all_onesp (arg1
))
11038 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11039 if (integer_zerop (arg1
))
11040 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11041 if (operand_equal_p (arg0
, arg1
, 0))
11042 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11044 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11045 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11046 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11047 || (TREE_CODE (arg0
) == EQ_EXPR
11048 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11049 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11050 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11052 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11053 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11054 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11055 || (TREE_CODE (arg1
) == EQ_EXPR
11056 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11057 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11058 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11060 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11061 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11062 && TREE_CODE (arg1
) == INTEGER_CST
11063 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11065 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11066 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11067 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11068 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11069 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11071 fold_convert_loc (loc
, type
,
11072 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11073 type
, tmp2
, tmp3
));
11076 /* (X | Y) & Y is (X, Y). */
11077 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11078 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11079 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11080 /* (X | Y) & X is (Y, X). */
11081 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11082 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11083 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11084 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11085 /* X & (X | Y) is (Y, X). */
11086 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11087 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11088 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11089 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11090 /* X & (Y | X) is (Y, X). */
11091 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11092 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11093 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11094 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11096 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11097 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11098 && integer_onep (TREE_OPERAND (arg0
, 1))
11099 && integer_onep (arg1
))
11101 tem
= TREE_OPERAND (arg0
, 0);
11102 return fold_build2_loc (loc
, EQ_EXPR
, type
,
11103 fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
11104 build_int_cst (TREE_TYPE (tem
), 1)),
11105 build_int_cst (TREE_TYPE (tem
), 0));
11107 /* Fold ~X & 1 as (X & 1) == 0. */
11108 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11109 && integer_onep (arg1
))
11111 tem
= TREE_OPERAND (arg0
, 0);
11112 return fold_build2_loc (loc
, EQ_EXPR
, type
,
11113 fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
11114 build_int_cst (TREE_TYPE (tem
), 1)),
11115 build_int_cst (TREE_TYPE (tem
), 0));
11117 /* Fold !X & 1 as X == 0. */
11118 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11119 && integer_onep (arg1
))
11121 tem
= TREE_OPERAND (arg0
, 0);
11122 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11123 build_int_cst (TREE_TYPE (tem
), 0));
11126 /* Fold (X ^ Y) & Y as ~X & Y. */
11127 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11128 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11130 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11131 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11132 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11133 fold_convert_loc (loc
, type
, arg1
));
11135 /* Fold (X ^ Y) & X as ~Y & X. */
11136 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11137 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11138 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11140 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11141 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11142 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11143 fold_convert_loc (loc
, type
, arg1
));
11145 /* Fold X & (X ^ Y) as X & ~Y. */
11146 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11147 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11149 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11150 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11151 fold_convert_loc (loc
, type
, arg0
),
11152 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11154 /* Fold X & (Y ^ X) as ~Y & X. */
11155 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11156 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11157 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11159 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11160 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11161 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11162 fold_convert_loc (loc
, type
, arg0
));
11165 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11166 ((A & N) + B) & M -> (A + B) & M
11167 Similarly if (N & M) == 0,
11168 ((A | N) + B) & M -> (A + B) & M
11169 and for - instead of + (or unary - instead of +)
11170 and/or ^ instead of |.
11171 If B is constant and (B & M) == 0, fold into A & M. */
11172 if (host_integerp (arg1
, 1))
11174 unsigned HOST_WIDE_INT cst1
= tree_low_cst (arg1
, 1);
11175 if (~cst1
&& (cst1
& (cst1
+ 1)) == 0
11176 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11177 && (TREE_CODE (arg0
) == PLUS_EXPR
11178 || TREE_CODE (arg0
) == MINUS_EXPR
11179 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11180 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11181 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11185 unsigned HOST_WIDE_INT cst0
;
11187 /* Now we know that arg0 is (C + D) or (C - D) or
11188 -C and arg1 (M) is == (1LL << cst) - 1.
11189 Store C into PMOP[0] and D into PMOP[1]. */
11190 pmop
[0] = TREE_OPERAND (arg0
, 0);
11192 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11194 pmop
[1] = TREE_OPERAND (arg0
, 1);
11198 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11199 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11203 for (; which
>= 0; which
--)
11204 switch (TREE_CODE (pmop
[which
]))
11209 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11212 /* tree_low_cst not used, because we don't care about
11214 cst0
= TREE_INT_CST_LOW (TREE_OPERAND (pmop
[which
], 1));
11216 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11221 else if (cst0
!= 0)
11223 /* If C or D is of the form (A & N) where
11224 (N & M) == M, or of the form (A | N) or
11225 (A ^ N) where (N & M) == 0, replace it with A. */
11226 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11229 /* If C or D is a N where (N & M) == 0, it can be
11230 omitted (assumed 0). */
11231 if ((TREE_CODE (arg0
) == PLUS_EXPR
11232 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11233 && (TREE_INT_CST_LOW (pmop
[which
]) & cst1
) == 0)
11234 pmop
[which
] = NULL
;
11240 /* Only build anything new if we optimized one or both arguments
11242 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11243 || (TREE_CODE (arg0
) != NEGATE_EXPR
11244 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11246 tree utype
= TREE_TYPE (arg0
);
11247 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11249 /* Perform the operations in a type that has defined
11250 overflow behavior. */
11251 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11252 if (pmop
[0] != NULL
)
11253 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11254 if (pmop
[1] != NULL
)
11255 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11258 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11259 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11260 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11262 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11263 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11265 else if (pmop
[0] != NULL
)
11267 else if (pmop
[1] != NULL
)
11270 return build_int_cst (type
, 0);
11272 else if (pmop
[0] == NULL
)
11273 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11275 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11277 /* TEM is now the new binary +, - or unary - replacement. */
11278 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11279 fold_convert_loc (loc
, utype
, arg1
));
11280 return fold_convert_loc (loc
, type
, tem
);
11285 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11286 if (t1
!= NULL_TREE
)
11288 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11289 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11290 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11293 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11295 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
11296 && (~TREE_INT_CST_LOW (arg1
)
11297 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
11299 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11302 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11304 This results in more efficient code for machines without a NOR
11305 instruction. Combine will canonicalize to the first form
11306 which will allow use of NOR instructions provided by the
11307 backend if they exist. */
11308 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11309 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11311 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11312 build2 (BIT_IOR_EXPR
, type
,
11313 fold_convert_loc (loc
, type
,
11314 TREE_OPERAND (arg0
, 0)),
11315 fold_convert_loc (loc
, type
,
11316 TREE_OPERAND (arg1
, 0))));
11319 /* If arg0 is derived from the address of an object or function, we may
11320 be able to fold this expression using the object or function's
11322 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
11324 unsigned HOST_WIDE_INT modulus
, residue
;
11325 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
11327 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11328 integer_onep (arg1
));
11330 /* This works because modulus is a power of 2. If this weren't the
11331 case, we'd have to replace it by its greatest power-of-2
11332 divisor: modulus & -modulus. */
11334 return build_int_cst (type
, residue
& low
);
11337 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11338 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11339 if the new mask might be further optimized. */
11340 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11341 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11342 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
11343 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
11344 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
11345 < TYPE_PRECISION (TREE_TYPE (arg0
))
11346 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11347 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
11349 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
11350 unsigned HOST_WIDE_INT mask
11351 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11352 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11353 tree shift_type
= TREE_TYPE (arg0
);
11355 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11356 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11357 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11358 && TYPE_PRECISION (TREE_TYPE (arg0
))
11359 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
11361 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11362 tree arg00
= TREE_OPERAND (arg0
, 0);
11363 /* See if more bits can be proven as zero because of
11365 if (TREE_CODE (arg00
) == NOP_EXPR
11366 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11368 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11369 if (TYPE_PRECISION (inner_type
)
11370 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
11371 && TYPE_PRECISION (inner_type
) < prec
)
11373 prec
= TYPE_PRECISION (inner_type
);
11374 /* See if we can shorten the right shift. */
11376 shift_type
= inner_type
;
11379 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11380 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11381 zerobits
<<= prec
- shiftc
;
11382 /* For arithmetic shift if sign bit could be set, zerobits
11383 can contain actually sign bits, so no transformation is
11384 possible, unless MASK masks them all away. In that
11385 case the shift needs to be converted into logical shift. */
11386 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11387 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11389 if ((mask
& zerobits
) == 0)
11390 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11396 /* ((X << 16) & 0xff00) is (X, 0). */
11397 if ((mask
& zerobits
) == mask
)
11398 return omit_one_operand_loc (loc
, type
,
11399 build_int_cst (type
, 0), arg0
);
11401 newmask
= mask
| zerobits
;
11402 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11406 /* Only do the transformation if NEWMASK is some integer
11408 for (prec
= BITS_PER_UNIT
;
11409 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11410 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11412 if (prec
< HOST_BITS_PER_WIDE_INT
11413 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11417 if (shift_type
!= TREE_TYPE (arg0
))
11419 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11420 fold_convert_loc (loc
, shift_type
,
11421 TREE_OPERAND (arg0
, 0)),
11422 TREE_OPERAND (arg0
, 1));
11423 tem
= fold_convert_loc (loc
, type
, tem
);
11427 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11428 if (!tree_int_cst_equal (newmaskt
, arg1
))
11429 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11437 /* Don't touch a floating-point divide by zero unless the mode
11438 of the constant can represent infinity. */
11439 if (TREE_CODE (arg1
) == REAL_CST
11440 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11441 && real_zerop (arg1
))
11444 /* Optimize A / A to 1.0 if we don't care about
11445 NaNs or Infinities. Skip the transformation
11446 for non-real operands. */
11447 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11448 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11449 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11450 && operand_equal_p (arg0
, arg1
, 0))
11452 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11454 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11457 /* The complex version of the above A / A optimization. */
11458 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11459 && operand_equal_p (arg0
, arg1
, 0))
11461 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11462 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11463 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11465 tree r
= build_real (elem_type
, dconst1
);
11466 /* omit_two_operands will call fold_convert for us. */
11467 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11471 /* (-A) / (-B) -> A / B */
11472 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11473 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11474 TREE_OPERAND (arg0
, 0),
11475 negate_expr (arg1
));
11476 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11477 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11478 negate_expr (arg0
),
11479 TREE_OPERAND (arg1
, 0));
11481 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11482 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11483 && real_onep (arg1
))
11484 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11486 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11487 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11488 && real_minus_onep (arg1
))
11489 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
11490 negate_expr (arg0
)));
11492 /* If ARG1 is a constant, we can convert this to a multiply by the
11493 reciprocal. This does not have the same rounding properties,
11494 so only do this if -freciprocal-math. We can actually
11495 always safely do it if ARG1 is a power of two, but it's hard to
11496 tell if it is or not in a portable manner. */
11497 if (TREE_CODE (arg1
) == REAL_CST
)
11499 if (flag_reciprocal_math
11500 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
11502 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
11503 /* Find the reciprocal if optimizing and the result is exact. */
11507 r
= TREE_REAL_CST (arg1
);
11508 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
11510 tem
= build_real (type
, r
);
11511 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11512 fold_convert_loc (loc
, type
, arg0
), tem
);
11516 /* Convert A/B/C to A/(B*C). */
11517 if (flag_reciprocal_math
11518 && TREE_CODE (arg0
) == RDIV_EXPR
)
11519 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11520 fold_build2_loc (loc
, MULT_EXPR
, type
,
11521 TREE_OPERAND (arg0
, 1), arg1
));
11523 /* Convert A/(B/C) to (A/B)*C. */
11524 if (flag_reciprocal_math
11525 && TREE_CODE (arg1
) == RDIV_EXPR
)
11526 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11527 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11528 TREE_OPERAND (arg1
, 0)),
11529 TREE_OPERAND (arg1
, 1));
11531 /* Convert C1/(X*C2) into (C1/C2)/X. */
11532 if (flag_reciprocal_math
11533 && TREE_CODE (arg1
) == MULT_EXPR
11534 && TREE_CODE (arg0
) == REAL_CST
11535 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11537 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11538 TREE_OPERAND (arg1
, 1));
11540 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11541 TREE_OPERAND (arg1
, 0));
11544 if (flag_unsafe_math_optimizations
)
11546 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11547 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11549 /* Optimize sin(x)/cos(x) as tan(x). */
11550 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11551 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11552 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11553 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11554 CALL_EXPR_ARG (arg1
, 0), 0))
11556 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11558 if (tanfn
!= NULL_TREE
)
11559 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11562 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11563 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11564 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11565 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11566 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11567 CALL_EXPR_ARG (arg1
, 0), 0))
11569 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11571 if (tanfn
!= NULL_TREE
)
11573 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11574 CALL_EXPR_ARG (arg0
, 0));
11575 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11576 build_real (type
, dconst1
), tmp
);
11580 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11581 NaNs or Infinities. */
11582 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11583 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11584 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11586 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11587 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11589 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11590 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11591 && operand_equal_p (arg00
, arg01
, 0))
11593 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11595 if (cosfn
!= NULL_TREE
)
11596 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11600 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11601 NaNs or Infinities. */
11602 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11603 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11604 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11606 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11607 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11609 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11610 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11611 && operand_equal_p (arg00
, arg01
, 0))
11613 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11615 if (cosfn
!= NULL_TREE
)
11617 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11618 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11619 build_real (type
, dconst1
),
11625 /* Optimize pow(x,c)/x as pow(x,c-1). */
11626 if (fcode0
== BUILT_IN_POW
11627 || fcode0
== BUILT_IN_POWF
11628 || fcode0
== BUILT_IN_POWL
)
11630 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11631 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11632 if (TREE_CODE (arg01
) == REAL_CST
11633 && !TREE_OVERFLOW (arg01
)
11634 && operand_equal_p (arg1
, arg00
, 0))
11636 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11640 c
= TREE_REAL_CST (arg01
);
11641 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11642 arg
= build_real (type
, c
);
11643 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11647 /* Optimize a/root(b/c) into a*root(c/b). */
11648 if (BUILTIN_ROOT_P (fcode1
))
11650 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11652 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11654 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11655 tree b
= TREE_OPERAND (rootarg
, 0);
11656 tree c
= TREE_OPERAND (rootarg
, 1);
11658 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
11660 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
11661 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
11665 /* Optimize x/expN(y) into x*expN(-y). */
11666 if (BUILTIN_EXPONENT_P (fcode1
))
11668 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11669 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11670 arg1
= build_call_expr_loc (loc
,
11672 fold_convert_loc (loc
, type
, arg
));
11673 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11676 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11677 if (fcode1
== BUILT_IN_POW
11678 || fcode1
== BUILT_IN_POWF
11679 || fcode1
== BUILT_IN_POWL
)
11681 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11682 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11683 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11684 tree neg11
= fold_convert_loc (loc
, type
,
11685 negate_expr (arg11
));
11686 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
11687 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11692 case TRUNC_DIV_EXPR
:
11693 /* Optimize (X & (-A)) / A where A is a power of 2,
11695 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11696 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
11697 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
11699 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
11700 arg1
, TREE_OPERAND (arg0
, 1));
11701 if (sum
&& integer_zerop (sum
)) {
11702 unsigned long pow2
;
11704 if (TREE_INT_CST_LOW (arg1
))
11705 pow2
= exact_log2 (TREE_INT_CST_LOW (arg1
));
11707 pow2
= exact_log2 (TREE_INT_CST_HIGH (arg1
))
11708 + HOST_BITS_PER_WIDE_INT
;
11710 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11711 TREE_OPERAND (arg0
, 0),
11712 build_int_cst (integer_type_node
, pow2
));
11718 case FLOOR_DIV_EXPR
:
11719 /* Simplify A / (B << N) where A and B are positive and B is
11720 a power of 2, to A >> (N + log2(B)). */
11721 strict_overflow_p
= false;
11722 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11723 && (TYPE_UNSIGNED (type
)
11724 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11726 tree sval
= TREE_OPERAND (arg1
, 0);
11727 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11729 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11730 unsigned long pow2
;
11732 if (TREE_INT_CST_LOW (sval
))
11733 pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
11735 pow2
= exact_log2 (TREE_INT_CST_HIGH (sval
))
11736 + HOST_BITS_PER_WIDE_INT
;
11738 if (strict_overflow_p
)
11739 fold_overflow_warning (("assuming signed overflow does not "
11740 "occur when simplifying A / (B << N)"),
11741 WARN_STRICT_OVERFLOW_MISC
);
11743 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11745 build_int_cst (TREE_TYPE (sh_cnt
),
11747 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11748 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11752 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11753 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11754 if (INTEGRAL_TYPE_P (type
)
11755 && TYPE_UNSIGNED (type
)
11756 && code
== FLOOR_DIV_EXPR
)
11757 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
11761 case ROUND_DIV_EXPR
:
11762 case CEIL_DIV_EXPR
:
11763 case EXACT_DIV_EXPR
:
11764 if (integer_onep (arg1
))
11765 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11766 if (integer_zerop (arg1
))
11768 /* X / -1 is -X. */
11769 if (!TYPE_UNSIGNED (type
)
11770 && TREE_CODE (arg1
) == INTEGER_CST
11771 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11772 && TREE_INT_CST_HIGH (arg1
) == -1)
11773 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
11775 /* Convert -A / -B to A / B when the type is signed and overflow is
11777 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11778 && TREE_CODE (arg0
) == NEGATE_EXPR
11779 && negate_expr_p (arg1
))
11781 if (INTEGRAL_TYPE_P (type
))
11782 fold_overflow_warning (("assuming signed overflow does not occur "
11783 "when distributing negation across "
11785 WARN_STRICT_OVERFLOW_MISC
);
11786 return fold_build2_loc (loc
, code
, type
,
11787 fold_convert_loc (loc
, type
,
11788 TREE_OPERAND (arg0
, 0)),
11789 fold_convert_loc (loc
, type
,
11790 negate_expr (arg1
)));
11792 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11793 && TREE_CODE (arg1
) == NEGATE_EXPR
11794 && negate_expr_p (arg0
))
11796 if (INTEGRAL_TYPE_P (type
))
11797 fold_overflow_warning (("assuming signed overflow does not occur "
11798 "when distributing negation across "
11800 WARN_STRICT_OVERFLOW_MISC
);
11801 return fold_build2_loc (loc
, code
, type
,
11802 fold_convert_loc (loc
, type
,
11803 negate_expr (arg0
)),
11804 fold_convert_loc (loc
, type
,
11805 TREE_OPERAND (arg1
, 0)));
11808 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11809 operation, EXACT_DIV_EXPR.
11811 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11812 At one time others generated faster code, it's not clear if they do
11813 after the last round to changes to the DIV code in expmed.c. */
11814 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11815 && multiple_of_p (type
, arg0
, arg1
))
11816 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11818 strict_overflow_p
= false;
11819 if (TREE_CODE (arg1
) == INTEGER_CST
11820 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11821 &strict_overflow_p
)))
11823 if (strict_overflow_p
)
11824 fold_overflow_warning (("assuming signed overflow does not occur "
11825 "when simplifying division"),
11826 WARN_STRICT_OVERFLOW_MISC
);
11827 return fold_convert_loc (loc
, type
, tem
);
11832 case CEIL_MOD_EXPR
:
11833 case FLOOR_MOD_EXPR
:
11834 case ROUND_MOD_EXPR
:
11835 case TRUNC_MOD_EXPR
:
11836 /* X % 1 is always zero, but be sure to preserve any side
11838 if (integer_onep (arg1
))
11839 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11841 /* X % 0, return X % 0 unchanged so that we can get the
11842 proper warnings and errors. */
11843 if (integer_zerop (arg1
))
11846 /* 0 % X is always zero, but be sure to preserve any side
11847 effects in X. Place this after checking for X == 0. */
11848 if (integer_zerop (arg0
))
11849 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11851 /* X % -1 is zero. */
11852 if (!TYPE_UNSIGNED (type
)
11853 && TREE_CODE (arg1
) == INTEGER_CST
11854 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11855 && TREE_INT_CST_HIGH (arg1
) == -1)
11856 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11858 /* X % -C is the same as X % C. */
11859 if (code
== TRUNC_MOD_EXPR
11860 && !TYPE_UNSIGNED (type
)
11861 && TREE_CODE (arg1
) == INTEGER_CST
11862 && !TREE_OVERFLOW (arg1
)
11863 && TREE_INT_CST_HIGH (arg1
) < 0
11864 && !TYPE_OVERFLOW_TRAPS (type
)
11865 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11866 && !sign_bit_p (arg1
, arg1
))
11867 return fold_build2_loc (loc
, code
, type
,
11868 fold_convert_loc (loc
, type
, arg0
),
11869 fold_convert_loc (loc
, type
,
11870 negate_expr (arg1
)));
11872 /* X % -Y is the same as X % Y. */
11873 if (code
== TRUNC_MOD_EXPR
11874 && !TYPE_UNSIGNED (type
)
11875 && TREE_CODE (arg1
) == NEGATE_EXPR
11876 && !TYPE_OVERFLOW_TRAPS (type
))
11877 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
11878 fold_convert_loc (loc
, type
,
11879 TREE_OPERAND (arg1
, 0)));
11881 strict_overflow_p
= false;
11882 if (TREE_CODE (arg1
) == INTEGER_CST
11883 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11884 &strict_overflow_p
)))
11886 if (strict_overflow_p
)
11887 fold_overflow_warning (("assuming signed overflow does not occur "
11888 "when simplifying modulus"),
11889 WARN_STRICT_OVERFLOW_MISC
);
11890 return fold_convert_loc (loc
, type
, tem
);
11893 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11894 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11895 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
11896 && (TYPE_UNSIGNED (type
)
11897 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11900 /* Also optimize A % (C << N) where C is a power of 2,
11901 to A & ((C << N) - 1). */
11902 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
11903 c
= TREE_OPERAND (arg1
, 0);
11905 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
11908 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
11909 build_int_cst (TREE_TYPE (arg1
), 1));
11910 if (strict_overflow_p
)
11911 fold_overflow_warning (("assuming signed overflow does not "
11912 "occur when simplifying "
11913 "X % (power of two)"),
11914 WARN_STRICT_OVERFLOW_MISC
);
11915 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11916 fold_convert_loc (loc
, type
, arg0
),
11917 fold_convert_loc (loc
, type
, mask
));
11925 if (integer_all_onesp (arg0
))
11926 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11930 /* Optimize -1 >> x for arithmetic right shifts. */
11931 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
11932 && tree_expr_nonnegative_p (arg1
))
11933 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11934 /* ... fall through ... */
11938 if (integer_zerop (arg1
))
11939 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11940 if (integer_zerop (arg0
))
11941 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
11943 /* Since negative shift count is not well-defined,
11944 don't try to compute it in the compiler. */
11945 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
11948 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11949 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
11950 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11951 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11952 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11954 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
11955 + TREE_INT_CST_LOW (arg1
));
11957 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11958 being well defined. */
11959 if (low
>= TYPE_PRECISION (type
))
11961 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
11962 low
= low
% TYPE_PRECISION (type
);
11963 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
11964 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 0),
11965 TREE_OPERAND (arg0
, 0));
11967 low
= TYPE_PRECISION (type
) - 1;
11970 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
11971 build_int_cst (type
, low
));
11974 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11975 into x & ((unsigned)-1 >> c) for unsigned types. */
11976 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
11977 || (TYPE_UNSIGNED (type
)
11978 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
11979 && host_integerp (arg1
, false)
11980 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
11981 && host_integerp (TREE_OPERAND (arg0
, 1), false)
11982 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
11984 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
11985 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
11991 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11993 lshift
= build_int_cst (type
, -1);
11994 lshift
= int_const_binop (code
, lshift
, arg1
);
11996 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12000 /* Rewrite an LROTATE_EXPR by a constant into an
12001 RROTATE_EXPR by a new constant. */
12002 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12004 tree tem
= build_int_cst (TREE_TYPE (arg1
),
12005 TYPE_PRECISION (type
));
12006 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12007 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12010 /* If we have a rotate of a bit operation with the rotate count and
12011 the second operand of the bit operation both constant,
12012 permute the two operations. */
12013 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12014 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12015 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12016 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12017 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12018 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12019 fold_build2_loc (loc
, code
, type
,
12020 TREE_OPERAND (arg0
, 0), arg1
),
12021 fold_build2_loc (loc
, code
, type
,
12022 TREE_OPERAND (arg0
, 1), arg1
));
12024 /* Two consecutive rotates adding up to the precision of the
12025 type can be ignored. */
12026 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12027 && TREE_CODE (arg0
) == RROTATE_EXPR
12028 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12029 && TREE_INT_CST_HIGH (arg1
) == 0
12030 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
12031 && ((TREE_INT_CST_LOW (arg1
)
12032 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
12033 == (unsigned int) TYPE_PRECISION (type
)))
12034 return TREE_OPERAND (arg0
, 0);
12036 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12037 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12038 if the latter can be further optimized. */
12039 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12040 && TREE_CODE (arg0
) == BIT_AND_EXPR
12041 && TREE_CODE (arg1
) == INTEGER_CST
12042 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12044 tree mask
= fold_build2_loc (loc
, code
, type
,
12045 fold_convert_loc (loc
, type
,
12046 TREE_OPERAND (arg0
, 1)),
12048 tree shift
= fold_build2_loc (loc
, code
, type
,
12049 fold_convert_loc (loc
, type
,
12050 TREE_OPERAND (arg0
, 0)),
12052 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12060 if (operand_equal_p (arg0
, arg1
, 0))
12061 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12062 if (INTEGRAL_TYPE_P (type
)
12063 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12064 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12065 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12071 if (operand_equal_p (arg0
, arg1
, 0))
12072 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12073 if (INTEGRAL_TYPE_P (type
)
12074 && TYPE_MAX_VALUE (type
)
12075 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12076 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12077 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12082 case TRUTH_ANDIF_EXPR
:
12083 /* Note that the operands of this must be ints
12084 and their values must be 0 or 1.
12085 ("true" is a fixed value perhaps depending on the language.) */
12086 /* If first arg is constant zero, return it. */
12087 if (integer_zerop (arg0
))
12088 return fold_convert_loc (loc
, type
, arg0
);
12089 case TRUTH_AND_EXPR
:
12090 /* If either arg is constant true, drop it. */
12091 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12092 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12093 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12094 /* Preserve sequence points. */
12095 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12096 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12097 /* If second arg is constant zero, result is zero, but first arg
12098 must be evaluated. */
12099 if (integer_zerop (arg1
))
12100 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12101 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12102 case will be handled here. */
12103 if (integer_zerop (arg0
))
12104 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12106 /* !X && X is always false. */
12107 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12108 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12109 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12110 /* X && !X is always false. */
12111 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12112 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12113 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12115 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12116 means A >= Y && A != MAX, but in this case we know that
12119 if (!TREE_SIDE_EFFECTS (arg0
)
12120 && !TREE_SIDE_EFFECTS (arg1
))
12122 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12123 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12124 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12126 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12127 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12128 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12131 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12137 case TRUTH_ORIF_EXPR
:
12138 /* Note that the operands of this must be ints
12139 and their values must be 0 or true.
12140 ("true" is a fixed value perhaps depending on the language.) */
12141 /* If first arg is constant true, return it. */
12142 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12143 return fold_convert_loc (loc
, type
, arg0
);
12144 case TRUTH_OR_EXPR
:
12145 /* If either arg is constant zero, drop it. */
12146 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12147 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12148 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12149 /* Preserve sequence points. */
12150 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12151 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12152 /* If second arg is constant true, result is true, but we must
12153 evaluate first arg. */
12154 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12155 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12156 /* Likewise for first arg, but note this only occurs here for
12158 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12159 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12161 /* !X || X is always true. */
12162 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12163 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12164 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12165 /* X || !X is always true. */
12166 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12167 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12168 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12170 /* (X && !Y) || (!X && Y) is X ^ Y */
12171 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12172 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12174 tree a0
, a1
, l0
, l1
, n0
, n1
;
12176 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12177 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12179 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12180 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12182 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12183 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12185 if ((operand_equal_p (n0
, a0
, 0)
12186 && operand_equal_p (n1
, a1
, 0))
12187 || (operand_equal_p (n0
, a1
, 0)
12188 && operand_equal_p (n1
, a0
, 0)))
12189 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12192 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12198 case TRUTH_XOR_EXPR
:
12199 /* If the second arg is constant zero, drop it. */
12200 if (integer_zerop (arg1
))
12201 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12202 /* If the second arg is constant true, this is a logical inversion. */
12203 if (integer_onep (arg1
))
12205 /* Only call invert_truthvalue if operand is a truth value. */
12206 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
12207 tem
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
12209 tem
= invert_truthvalue_loc (loc
, arg0
);
12210 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12212 /* Identical arguments cancel to zero. */
12213 if (operand_equal_p (arg0
, arg1
, 0))
12214 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12216 /* !X ^ X is always true. */
12217 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12218 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12219 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12221 /* X ^ !X is always true. */
12222 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12223 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12224 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12233 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12234 if (tem
!= NULL_TREE
)
12237 /* bool_var != 0 becomes bool_var. */
12238 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12239 && code
== NE_EXPR
)
12240 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12242 /* bool_var == 1 becomes bool_var. */
12243 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12244 && code
== EQ_EXPR
)
12245 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12247 /* bool_var != 1 becomes !bool_var. */
12248 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12249 && code
== NE_EXPR
)
12250 return fold_convert_loc (loc
, type
,
12251 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12252 TREE_TYPE (arg0
), arg0
));
12254 /* bool_var == 0 becomes !bool_var. */
12255 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12256 && code
== EQ_EXPR
)
12257 return fold_convert_loc (loc
, type
,
12258 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12259 TREE_TYPE (arg0
), arg0
));
12261 /* !exp != 0 becomes !exp */
12262 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12263 && code
== NE_EXPR
)
12264 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12266 /* If this is an equality comparison of the address of two non-weak,
12267 unaliased symbols neither of which are extern (since we do not
12268 have access to attributes for externs), then we know the result. */
12269 if (TREE_CODE (arg0
) == ADDR_EXPR
12270 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12271 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12272 && ! lookup_attribute ("alias",
12273 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12274 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12275 && TREE_CODE (arg1
) == ADDR_EXPR
12276 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12277 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12278 && ! lookup_attribute ("alias",
12279 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12280 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12282 /* We know that we're looking at the address of two
12283 non-weak, unaliased, static _DECL nodes.
12285 It is both wasteful and incorrect to call operand_equal_p
12286 to compare the two ADDR_EXPR nodes. It is wasteful in that
12287 all we need to do is test pointer equality for the arguments
12288 to the two ADDR_EXPR nodes. It is incorrect to use
12289 operand_equal_p as that function is NOT equivalent to a
12290 C equality test. It can in fact return false for two
12291 objects which would test as equal using the C equality
12293 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12294 return constant_boolean_node (equal
12295 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12299 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12300 a MINUS_EXPR of a constant, we can convert it into a comparison with
12301 a revised constant as long as no overflow occurs. */
12302 if (TREE_CODE (arg1
) == INTEGER_CST
12303 && (TREE_CODE (arg0
) == PLUS_EXPR
12304 || TREE_CODE (arg0
) == MINUS_EXPR
)
12305 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12306 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
12307 ? MINUS_EXPR
: PLUS_EXPR
,
12308 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12310 TREE_OPERAND (arg0
, 1)))
12311 && !TREE_OVERFLOW (tem
))
12312 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12314 /* Similarly for a NEGATE_EXPR. */
12315 if (TREE_CODE (arg0
) == NEGATE_EXPR
12316 && TREE_CODE (arg1
) == INTEGER_CST
12317 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12319 && TREE_CODE (tem
) == INTEGER_CST
12320 && !TREE_OVERFLOW (tem
))
12321 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12323 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12324 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12325 && TREE_CODE (arg1
) == INTEGER_CST
12326 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12327 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12328 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12329 fold_convert_loc (loc
,
12332 TREE_OPERAND (arg0
, 1)));
12334 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12335 if ((TREE_CODE (arg0
) == PLUS_EXPR
12336 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12337 || TREE_CODE (arg0
) == MINUS_EXPR
)
12338 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12341 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12342 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12344 tree val
= TREE_OPERAND (arg0
, 1);
12345 return omit_two_operands_loc (loc
, type
,
12346 fold_build2_loc (loc
, code
, type
,
12348 build_int_cst (TREE_TYPE (val
),
12350 TREE_OPERAND (arg0
, 0), arg1
);
12353 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12354 if (TREE_CODE (arg0
) == MINUS_EXPR
12355 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12356 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12359 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
12361 return omit_two_operands_loc (loc
, type
,
12363 ? boolean_true_node
: boolean_false_node
,
12364 TREE_OPERAND (arg0
, 1), arg1
);
12367 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12368 for !=. Don't do this for ordered comparisons due to overflow. */
12369 if (TREE_CODE (arg0
) == MINUS_EXPR
12370 && integer_zerop (arg1
))
12371 return fold_build2_loc (loc
, code
, type
,
12372 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
12374 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12375 if (TREE_CODE (arg0
) == ABS_EXPR
12376 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12377 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12379 /* If this is an EQ or NE comparison with zero and ARG0 is
12380 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12381 two operations, but the latter can be done in one less insn
12382 on machines that have only two-operand insns or on which a
12383 constant cannot be the first operand. */
12384 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12385 && integer_zerop (arg1
))
12387 tree arg00
= TREE_OPERAND (arg0
, 0);
12388 tree arg01
= TREE_OPERAND (arg0
, 1);
12389 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12390 && integer_onep (TREE_OPERAND (arg00
, 0)))
12392 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12393 arg01
, TREE_OPERAND (arg00
, 1));
12394 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12395 build_int_cst (TREE_TYPE (arg0
), 1));
12396 return fold_build2_loc (loc
, code
, type
,
12397 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12400 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12401 && integer_onep (TREE_OPERAND (arg01
, 0)))
12403 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12404 arg00
, TREE_OPERAND (arg01
, 1));
12405 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12406 build_int_cst (TREE_TYPE (arg0
), 1));
12407 return fold_build2_loc (loc
, code
, type
,
12408 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12413 /* If this is an NE or EQ comparison of zero against the result of a
12414 signed MOD operation whose second operand is a power of 2, make
12415 the MOD operation unsigned since it is simpler and equivalent. */
12416 if (integer_zerop (arg1
)
12417 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12418 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12419 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12420 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12421 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12422 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12424 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12425 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12426 fold_convert_loc (loc
, newtype
,
12427 TREE_OPERAND (arg0
, 0)),
12428 fold_convert_loc (loc
, newtype
,
12429 TREE_OPERAND (arg0
, 1)));
12431 return fold_build2_loc (loc
, code
, type
, newmod
,
12432 fold_convert_loc (loc
, newtype
, arg1
));
12435 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12436 C1 is a valid shift constant, and C2 is a power of two, i.e.
12438 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12439 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12440 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12442 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12443 && integer_zerop (arg1
))
12445 tree itype
= TREE_TYPE (arg0
);
12446 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
12447 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12449 /* Check for a valid shift count. */
12450 if (TREE_INT_CST_HIGH (arg001
) == 0
12451 && TREE_INT_CST_LOW (arg001
) < prec
)
12453 tree arg01
= TREE_OPERAND (arg0
, 1);
12454 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12455 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12456 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12457 can be rewritten as (X & (C2 << C1)) != 0. */
12458 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12460 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12461 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12462 return fold_build2_loc (loc
, code
, type
, tem
,
12463 fold_convert_loc (loc
, itype
, arg1
));
12465 /* Otherwise, for signed (arithmetic) shifts,
12466 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12467 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12468 else if (!TYPE_UNSIGNED (itype
))
12469 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12470 arg000
, build_int_cst (itype
, 0));
12471 /* Otherwise, of unsigned (logical) shifts,
12472 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12473 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12475 return omit_one_operand_loc (loc
, type
,
12476 code
== EQ_EXPR
? integer_one_node
12477 : integer_zero_node
,
12482 /* If we have (A & C) == C where C is a power of 2, convert this into
12483 (A & C) != 0. Similarly for NE_EXPR. */
12484 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12485 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12486 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12487 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12488 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12489 integer_zero_node
));
12491 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12492 bit, then fold the expression into A < 0 or A >= 0. */
12493 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12497 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12498 Similarly for NE_EXPR. */
12499 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12500 && TREE_CODE (arg1
) == INTEGER_CST
12501 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12503 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12504 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12505 TREE_OPERAND (arg0
, 1));
12507 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12508 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12510 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12511 if (integer_nonzerop (dandnotc
))
12512 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12515 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12516 Similarly for NE_EXPR. */
12517 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12518 && TREE_CODE (arg1
) == INTEGER_CST
12519 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12521 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12523 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12524 TREE_OPERAND (arg0
, 1),
12525 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12526 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12527 if (integer_nonzerop (candnotd
))
12528 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12531 /* If this is a comparison of a field, we may be able to simplify it. */
12532 if ((TREE_CODE (arg0
) == COMPONENT_REF
12533 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12534 /* Handle the constant case even without -O
12535 to make sure the warnings are given. */
12536 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12538 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12543 /* Optimize comparisons of strlen vs zero to a compare of the
12544 first character of the string vs zero. To wit,
12545 strlen(ptr) == 0 => *ptr == 0
12546 strlen(ptr) != 0 => *ptr != 0
12547 Other cases should reduce to one of these two (or a constant)
12548 due to the return value of strlen being unsigned. */
12549 if (TREE_CODE (arg0
) == CALL_EXPR
12550 && integer_zerop (arg1
))
12552 tree fndecl
= get_callee_fndecl (arg0
);
12555 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12556 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12557 && call_expr_nargs (arg0
) == 1
12558 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12560 tree iref
= build_fold_indirect_ref_loc (loc
,
12561 CALL_EXPR_ARG (arg0
, 0));
12562 return fold_build2_loc (loc
, code
, type
, iref
,
12563 build_int_cst (TREE_TYPE (iref
), 0));
12567 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12568 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12569 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12570 && integer_zerop (arg1
)
12571 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12573 tree arg00
= TREE_OPERAND (arg0
, 0);
12574 tree arg01
= TREE_OPERAND (arg0
, 1);
12575 tree itype
= TREE_TYPE (arg00
);
12576 if (TREE_INT_CST_HIGH (arg01
) == 0
12577 && TREE_INT_CST_LOW (arg01
)
12578 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
12580 if (TYPE_UNSIGNED (itype
))
12582 itype
= signed_type_for (itype
);
12583 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12585 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12586 type
, arg00
, build_int_cst (itype
, 0));
12590 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12591 if (integer_zerop (arg1
)
12592 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12593 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12594 TREE_OPERAND (arg0
, 1));
12596 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12597 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12598 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12599 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12600 build_int_cst (TREE_TYPE (arg0
), 0));
12601 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12602 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12603 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12604 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12605 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12606 build_int_cst (TREE_TYPE (arg0
), 0));
12608 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12609 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12610 && TREE_CODE (arg1
) == INTEGER_CST
12611 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12612 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12613 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12614 TREE_OPERAND (arg0
, 1), arg1
));
12616 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12617 (X & C) == 0 when C is a single bit. */
12618 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12619 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12620 && integer_zerop (arg1
)
12621 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12623 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12624 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12625 TREE_OPERAND (arg0
, 1));
12626 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12628 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12632 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12633 constant C is a power of two, i.e. a single bit. */
12634 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12635 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12636 && integer_zerop (arg1
)
12637 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12638 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12639 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12641 tree arg00
= TREE_OPERAND (arg0
, 0);
12642 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12643 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12646 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12647 when is C is a power of two, i.e. a single bit. */
12648 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12649 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12650 && integer_zerop (arg1
)
12651 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12652 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12653 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12655 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12656 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
12657 arg000
, TREE_OPERAND (arg0
, 1));
12658 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12659 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12662 if (integer_zerop (arg1
)
12663 && tree_expr_nonzero_p (arg0
))
12665 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12666 return omit_one_operand_loc (loc
, type
, res
, arg0
);
12669 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12670 if (TREE_CODE (arg0
) == NEGATE_EXPR
12671 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12672 return fold_build2_loc (loc
, code
, type
,
12673 TREE_OPERAND (arg0
, 0),
12674 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12675 TREE_OPERAND (arg1
, 0)));
12677 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12678 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12679 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12681 tree arg00
= TREE_OPERAND (arg0
, 0);
12682 tree arg01
= TREE_OPERAND (arg0
, 1);
12683 tree arg10
= TREE_OPERAND (arg1
, 0);
12684 tree arg11
= TREE_OPERAND (arg1
, 1);
12685 tree itype
= TREE_TYPE (arg0
);
12687 if (operand_equal_p (arg01
, arg11
, 0))
12688 return fold_build2_loc (loc
, code
, type
,
12689 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12690 fold_build2_loc (loc
,
12691 BIT_XOR_EXPR
, itype
,
12694 build_int_cst (itype
, 0));
12696 if (operand_equal_p (arg01
, arg10
, 0))
12697 return fold_build2_loc (loc
, code
, type
,
12698 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12699 fold_build2_loc (loc
,
12700 BIT_XOR_EXPR
, itype
,
12703 build_int_cst (itype
, 0));
12705 if (operand_equal_p (arg00
, arg11
, 0))
12706 return fold_build2_loc (loc
, code
, type
,
12707 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12708 fold_build2_loc (loc
,
12709 BIT_XOR_EXPR
, itype
,
12712 build_int_cst (itype
, 0));
12714 if (operand_equal_p (arg00
, arg10
, 0))
12715 return fold_build2_loc (loc
, code
, type
,
12716 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12717 fold_build2_loc (loc
,
12718 BIT_XOR_EXPR
, itype
,
12721 build_int_cst (itype
, 0));
12724 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12725 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12727 tree arg00
= TREE_OPERAND (arg0
, 0);
12728 tree arg01
= TREE_OPERAND (arg0
, 1);
12729 tree arg10
= TREE_OPERAND (arg1
, 0);
12730 tree arg11
= TREE_OPERAND (arg1
, 1);
12731 tree itype
= TREE_TYPE (arg0
);
12733 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12734 operand_equal_p guarantees no side-effects so we don't need
12735 to use omit_one_operand on Z. */
12736 if (operand_equal_p (arg01
, arg11
, 0))
12737 return fold_build2_loc (loc
, code
, type
, arg00
,
12738 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12740 if (operand_equal_p (arg01
, arg10
, 0))
12741 return fold_build2_loc (loc
, code
, type
, arg00
,
12742 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12744 if (operand_equal_p (arg00
, arg11
, 0))
12745 return fold_build2_loc (loc
, code
, type
, arg01
,
12746 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12748 if (operand_equal_p (arg00
, arg10
, 0))
12749 return fold_build2_loc (loc
, code
, type
, arg01
,
12750 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12753 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12754 if (TREE_CODE (arg01
) == INTEGER_CST
12755 && TREE_CODE (arg11
) == INTEGER_CST
)
12757 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
12758 fold_convert_loc (loc
, itype
, arg11
));
12759 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
12760 return fold_build2_loc (loc
, code
, type
, tem
,
12761 fold_convert_loc (loc
, itype
, arg10
));
12765 /* Attempt to simplify equality/inequality comparisons of complex
12766 values. Only lower the comparison if the result is known or
12767 can be simplified to a single scalar comparison. */
12768 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12769 || TREE_CODE (arg0
) == COMPLEX_CST
)
12770 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12771 || TREE_CODE (arg1
) == COMPLEX_CST
))
12773 tree real0
, imag0
, real1
, imag1
;
12776 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12778 real0
= TREE_OPERAND (arg0
, 0);
12779 imag0
= TREE_OPERAND (arg0
, 1);
12783 real0
= TREE_REALPART (arg0
);
12784 imag0
= TREE_IMAGPART (arg0
);
12787 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12789 real1
= TREE_OPERAND (arg1
, 0);
12790 imag1
= TREE_OPERAND (arg1
, 1);
12794 real1
= TREE_REALPART (arg1
);
12795 imag1
= TREE_IMAGPART (arg1
);
12798 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
12799 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12801 if (integer_zerop (rcond
))
12803 if (code
== EQ_EXPR
)
12804 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12806 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
12810 if (code
== NE_EXPR
)
12811 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12813 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
12817 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
12818 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12820 if (integer_zerop (icond
))
12822 if (code
== EQ_EXPR
)
12823 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12825 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
12829 if (code
== NE_EXPR
)
12830 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12832 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
12843 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12844 if (tem
!= NULL_TREE
)
12847 /* Transform comparisons of the form X +- C CMP X. */
12848 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
12849 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12850 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
12851 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
12852 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12853 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
12855 tree arg01
= TREE_OPERAND (arg0
, 1);
12856 enum tree_code code0
= TREE_CODE (arg0
);
12859 if (TREE_CODE (arg01
) == REAL_CST
)
12860 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
12862 is_positive
= tree_int_cst_sgn (arg01
);
12864 /* (X - c) > X becomes false. */
12865 if (code
== GT_EXPR
12866 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12867 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12869 if (TREE_CODE (arg01
) == INTEGER_CST
12870 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12871 fold_overflow_warning (("assuming signed overflow does not "
12872 "occur when assuming that (X - c) > X "
12873 "is always false"),
12874 WARN_STRICT_OVERFLOW_ALL
);
12875 return constant_boolean_node (0, type
);
12878 /* Likewise (X + c) < X becomes false. */
12879 if (code
== LT_EXPR
12880 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12881 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12883 if (TREE_CODE (arg01
) == INTEGER_CST
12884 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12885 fold_overflow_warning (("assuming signed overflow does not "
12886 "occur when assuming that "
12887 "(X + c) < X is always false"),
12888 WARN_STRICT_OVERFLOW_ALL
);
12889 return constant_boolean_node (0, type
);
12892 /* Convert (X - c) <= X to true. */
12893 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12895 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
12896 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
12898 if (TREE_CODE (arg01
) == INTEGER_CST
12899 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12900 fold_overflow_warning (("assuming signed overflow does not "
12901 "occur when assuming that "
12902 "(X - c) <= X is always true"),
12903 WARN_STRICT_OVERFLOW_ALL
);
12904 return constant_boolean_node (1, type
);
12907 /* Convert (X + c) >= X to true. */
12908 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
12910 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
12911 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
12913 if (TREE_CODE (arg01
) == INTEGER_CST
12914 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12915 fold_overflow_warning (("assuming signed overflow does not "
12916 "occur when assuming that "
12917 "(X + c) >= X is always true"),
12918 WARN_STRICT_OVERFLOW_ALL
);
12919 return constant_boolean_node (1, type
);
12922 if (TREE_CODE (arg01
) == INTEGER_CST
)
12924 /* Convert X + c > X and X - c < X to true for integers. */
12925 if (code
== GT_EXPR
12926 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12927 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12929 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12930 fold_overflow_warning (("assuming signed overflow does "
12931 "not occur when assuming that "
12932 "(X + c) > X is always true"),
12933 WARN_STRICT_OVERFLOW_ALL
);
12934 return constant_boolean_node (1, type
);
12937 if (code
== LT_EXPR
12938 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12939 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12941 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12942 fold_overflow_warning (("assuming signed overflow does "
12943 "not occur when assuming that "
12944 "(X - c) < X is always true"),
12945 WARN_STRICT_OVERFLOW_ALL
);
12946 return constant_boolean_node (1, type
);
12949 /* Convert X + c <= X and X - c >= X to false for integers. */
12950 if (code
== LE_EXPR
12951 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
12952 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
12954 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12955 fold_overflow_warning (("assuming signed overflow does "
12956 "not occur when assuming that "
12957 "(X + c) <= X is always false"),
12958 WARN_STRICT_OVERFLOW_ALL
);
12959 return constant_boolean_node (0, type
);
12962 if (code
== GE_EXPR
12963 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
12964 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
12966 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
12967 fold_overflow_warning (("assuming signed overflow does "
12968 "not occur when assuming that "
12969 "(X - c) >= X is always false"),
12970 WARN_STRICT_OVERFLOW_ALL
);
12971 return constant_boolean_node (0, type
);
12976 /* Comparisons with the highest or lowest possible integer of
12977 the specified precision will have known values. */
12979 tree arg1_type
= TREE_TYPE (arg1
);
12980 unsigned int width
= TYPE_PRECISION (arg1_type
);
12982 if (TREE_CODE (arg1
) == INTEGER_CST
12983 && width
<= 2 * HOST_BITS_PER_WIDE_INT
12984 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
12986 HOST_WIDE_INT signed_max_hi
;
12987 unsigned HOST_WIDE_INT signed_max_lo
;
12988 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
12990 if (width
<= HOST_BITS_PER_WIDE_INT
)
12992 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
12997 if (TYPE_UNSIGNED (arg1_type
))
12999 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13005 max_lo
= signed_max_lo
;
13006 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13012 width
-= HOST_BITS_PER_WIDE_INT
;
13013 signed_max_lo
= -1;
13014 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13019 if (TYPE_UNSIGNED (arg1_type
))
13021 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13026 max_hi
= signed_max_hi
;
13027 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13031 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
13032 && TREE_INT_CST_LOW (arg1
) == max_lo
)
13036 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13039 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13042 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13045 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13047 /* The GE_EXPR and LT_EXPR cases above are not normally
13048 reached because of previous transformations. */
13053 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13055 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
13059 arg1
= const_binop (PLUS_EXPR
, arg1
,
13060 build_int_cst (TREE_TYPE (arg1
), 1));
13061 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13062 fold_convert_loc (loc
,
13063 TREE_TYPE (arg1
), arg0
),
13066 arg1
= const_binop (PLUS_EXPR
, arg1
,
13067 build_int_cst (TREE_TYPE (arg1
), 1));
13068 return fold_build2_loc (loc
, NE_EXPR
, type
,
13069 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13075 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13077 && TREE_INT_CST_LOW (arg1
) == min_lo
)
13081 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13084 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13087 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13090 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13095 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13097 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
13101 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13102 return fold_build2_loc (loc
, NE_EXPR
, type
,
13103 fold_convert_loc (loc
,
13104 TREE_TYPE (arg1
), arg0
),
13107 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13108 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13109 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13116 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
13117 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
13118 && TYPE_UNSIGNED (arg1_type
)
13119 /* We will flip the signedness of the comparison operator
13120 associated with the mode of arg1, so the sign bit is
13121 specified by this mode. Check that arg1 is the signed
13122 max associated with this sign bit. */
13123 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
13124 /* signed_type does not work on pointer types. */
13125 && INTEGRAL_TYPE_P (arg1_type
))
13127 /* The following case also applies to X < signed_max+1
13128 and X >= signed_max+1 because previous transformations. */
13129 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13132 st
= signed_type_for (TREE_TYPE (arg1
));
13133 return fold_build2_loc (loc
,
13134 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13135 type
, fold_convert_loc (loc
, st
, arg0
),
13136 build_int_cst (st
, 0));
13142 /* If we are comparing an ABS_EXPR with a constant, we can
13143 convert all the cases into explicit comparisons, but they may
13144 well not be faster than doing the ABS and one comparison.
13145 But ABS (X) <= C is a range comparison, which becomes a subtraction
13146 and a comparison, and is probably faster. */
13147 if (code
== LE_EXPR
13148 && TREE_CODE (arg1
) == INTEGER_CST
13149 && TREE_CODE (arg0
) == ABS_EXPR
13150 && ! TREE_SIDE_EFFECTS (arg0
)
13151 && (0 != (tem
= negate_expr (arg1
)))
13152 && TREE_CODE (tem
) == INTEGER_CST
13153 && !TREE_OVERFLOW (tem
))
13154 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13155 build2 (GE_EXPR
, type
,
13156 TREE_OPERAND (arg0
, 0), tem
),
13157 build2 (LE_EXPR
, type
,
13158 TREE_OPERAND (arg0
, 0), arg1
));
13160 /* Convert ABS_EXPR<x> >= 0 to true. */
13161 strict_overflow_p
= false;
13162 if (code
== GE_EXPR
13163 && (integer_zerop (arg1
)
13164 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13165 && real_zerop (arg1
)))
13166 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13168 if (strict_overflow_p
)
13169 fold_overflow_warning (("assuming signed overflow does not occur "
13170 "when simplifying comparison of "
13171 "absolute value and zero"),
13172 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13173 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13176 /* Convert ABS_EXPR<x> < 0 to false. */
13177 strict_overflow_p
= false;
13178 if (code
== LT_EXPR
13179 && (integer_zerop (arg1
) || real_zerop (arg1
))
13180 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13182 if (strict_overflow_p
)
13183 fold_overflow_warning (("assuming signed overflow does not occur "
13184 "when simplifying comparison of "
13185 "absolute value and zero"),
13186 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13187 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13190 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13191 and similarly for >= into !=. */
13192 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13193 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13194 && TREE_CODE (arg1
) == LSHIFT_EXPR
13195 && integer_onep (TREE_OPERAND (arg1
, 0)))
13196 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13197 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13198 TREE_OPERAND (arg1
, 1)),
13199 build_int_cst (TREE_TYPE (arg0
), 0));
13201 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13202 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13203 && CONVERT_EXPR_P (arg1
)
13204 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13205 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13207 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13208 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13209 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13210 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13211 build_int_cst (TREE_TYPE (arg0
), 0));
13216 case UNORDERED_EXPR
:
13224 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13226 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13227 if (t1
!= NULL_TREE
)
13231 /* If the first operand is NaN, the result is constant. */
13232 if (TREE_CODE (arg0
) == REAL_CST
13233 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13234 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13236 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13237 ? integer_zero_node
13238 : integer_one_node
;
13239 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13242 /* If the second operand is NaN, the result is constant. */
13243 if (TREE_CODE (arg1
) == REAL_CST
13244 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13245 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13247 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13248 ? integer_zero_node
13249 : integer_one_node
;
13250 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13253 /* Simplify unordered comparison of something with itself. */
13254 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13255 && operand_equal_p (arg0
, arg1
, 0))
13256 return constant_boolean_node (1, type
);
13258 if (code
== LTGT_EXPR
13259 && !flag_trapping_math
13260 && operand_equal_p (arg0
, arg1
, 0))
13261 return constant_boolean_node (0, type
);
13263 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13265 tree targ0
= strip_float_extensions (arg0
);
13266 tree targ1
= strip_float_extensions (arg1
);
13267 tree newtype
= TREE_TYPE (targ0
);
13269 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13270 newtype
= TREE_TYPE (targ1
);
13272 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13273 return fold_build2_loc (loc
, code
, type
,
13274 fold_convert_loc (loc
, newtype
, targ0
),
13275 fold_convert_loc (loc
, newtype
, targ1
));
13280 case COMPOUND_EXPR
:
13281 /* When pedantic, a compound expression can be neither an lvalue
13282 nor an integer constant expression. */
13283 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13285 /* Don't let (0, 0) be null pointer constant. */
13286 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13287 : fold_convert_loc (loc
, type
, arg1
);
13288 return pedantic_non_lvalue_loc (loc
, tem
);
13291 if ((TREE_CODE (arg0
) == REAL_CST
13292 && TREE_CODE (arg1
) == REAL_CST
)
13293 || (TREE_CODE (arg0
) == INTEGER_CST
13294 && TREE_CODE (arg1
) == INTEGER_CST
))
13295 return build_complex (type
, arg0
, arg1
);
13296 if (TREE_CODE (arg0
) == REALPART_EXPR
13297 && TREE_CODE (arg1
) == IMAGPART_EXPR
13298 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
13299 && operand_equal_p (TREE_OPERAND (arg0
, 0),
13300 TREE_OPERAND (arg1
, 0), 0))
13301 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
13302 TREE_OPERAND (arg1
, 0));
13306 /* An ASSERT_EXPR should never be passed to fold_binary. */
13307 gcc_unreachable ();
13311 } /* switch (code) */
13314 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13315 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13319 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13321 switch (TREE_CODE (*tp
))
13327 *walk_subtrees
= 0;
13329 /* ... fall through ... */
13336 /* Return whether the sub-tree ST contains a label which is accessible from
13337 outside the sub-tree. */
13340 contains_label_p (tree st
)
13343 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13346 /* Fold a ternary expression of code CODE and type TYPE with operands
13347 OP0, OP1, and OP2. Return the folded expression if folding is
13348 successful. Otherwise, return NULL_TREE. */
13351 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13352 tree op0
, tree op1
, tree op2
)
13355 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13356 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13358 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13359 && TREE_CODE_LENGTH (code
) == 3);
13361 /* Strip any conversions that don't change the mode. This is safe
13362 for every expression, except for a comparison expression because
13363 its signedness is derived from its operands. So, in the latter
13364 case, only strip conversions that don't change the signedness.
13366 Note that this is done as an internal manipulation within the
13367 constant folder, in order to find the simplest representation of
13368 the arguments so that their form can be studied. In any cases,
13369 the appropriate type conversions should be put back in the tree
13370 that will get out of the constant folder. */
13391 case COMPONENT_REF
:
13392 if (TREE_CODE (arg0
) == CONSTRUCTOR
13393 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13395 unsigned HOST_WIDE_INT idx
;
13397 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13404 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13405 so all simple results must be passed through pedantic_non_lvalue. */
13406 if (TREE_CODE (arg0
) == INTEGER_CST
)
13408 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13409 tem
= integer_zerop (arg0
) ? op2
: op1
;
13410 /* Only optimize constant conditions when the selected branch
13411 has the same type as the COND_EXPR. This avoids optimizing
13412 away "c ? x : throw", where the throw has a void type.
13413 Avoid throwing away that operand which contains label. */
13414 if ((!TREE_SIDE_EFFECTS (unused_op
)
13415 || !contains_label_p (unused_op
))
13416 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13417 || VOID_TYPE_P (type
)))
13418 return pedantic_non_lvalue_loc (loc
, tem
);
13421 if (operand_equal_p (arg1
, op2
, 0))
13422 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
13424 /* If we have A op B ? A : C, we may be able to convert this to a
13425 simpler expression, depending on the operation and the values
13426 of B and C. Signed zeros prevent all of these transformations,
13427 for reasons given above each one.
13429 Also try swapping the arguments and inverting the conditional. */
13430 if (COMPARISON_CLASS_P (arg0
)
13431 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13432 arg1
, TREE_OPERAND (arg0
, 1))
13433 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
13435 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13440 if (COMPARISON_CLASS_P (arg0
)
13441 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13443 TREE_OPERAND (arg0
, 1))
13444 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
13446 location_t loc0
= expr_location_or (arg0
, loc
);
13447 tem
= fold_truth_not_expr (loc0
, arg0
);
13448 if (tem
&& COMPARISON_CLASS_P (tem
))
13450 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13456 /* If the second operand is simpler than the third, swap them
13457 since that produces better jump optimization results. */
13458 if (truth_value_p (TREE_CODE (arg0
))
13459 && tree_swap_operands_p (op1
, op2
, false))
13461 location_t loc0
= expr_location_or (arg0
, loc
);
13462 /* See if this can be inverted. If it can't, possibly because
13463 it was a floating-point inequality comparison, don't do
13465 tem
= fold_truth_not_expr (loc0
, arg0
);
13467 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13470 /* Convert A ? 1 : 0 to simply A. */
13471 if (integer_onep (op1
)
13472 && integer_zerop (op2
)
13473 /* If we try to convert OP0 to our type, the
13474 call to fold will try to move the conversion inside
13475 a COND, which will recurse. In that case, the COND_EXPR
13476 is probably the best choice, so leave it alone. */
13477 && type
== TREE_TYPE (arg0
))
13478 return pedantic_non_lvalue_loc (loc
, arg0
);
13480 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13481 over COND_EXPR in cases such as floating point comparisons. */
13482 if (integer_zerop (op1
)
13483 && integer_onep (op2
)
13484 && truth_value_p (TREE_CODE (arg0
)))
13485 return pedantic_non_lvalue_loc (loc
,
13486 fold_convert_loc (loc
, type
,
13487 invert_truthvalue_loc (loc
,
13490 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13491 if (TREE_CODE (arg0
) == LT_EXPR
13492 && integer_zerop (TREE_OPERAND (arg0
, 1))
13493 && integer_zerop (op2
)
13494 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13496 /* sign_bit_p only checks ARG1 bits within A's precision.
13497 If <sign bit of A> has wider type than A, bits outside
13498 of A's precision in <sign bit of A> need to be checked.
13499 If they are all 0, this optimization needs to be done
13500 in unsigned A's type, if they are all 1 in signed A's type,
13501 otherwise this can't be done. */
13502 if (TYPE_PRECISION (TREE_TYPE (tem
))
13503 < TYPE_PRECISION (TREE_TYPE (arg1
))
13504 && TYPE_PRECISION (TREE_TYPE (tem
))
13505 < TYPE_PRECISION (type
))
13507 unsigned HOST_WIDE_INT mask_lo
;
13508 HOST_WIDE_INT mask_hi
;
13509 int inner_width
, outer_width
;
13512 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13513 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13514 if (outer_width
> TYPE_PRECISION (type
))
13515 outer_width
= TYPE_PRECISION (type
);
13517 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
13519 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
13520 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
13526 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
13527 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
13529 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
13531 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
13532 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13536 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
13537 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13539 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
13540 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
13542 tem_type
= signed_type_for (TREE_TYPE (tem
));
13543 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13545 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
13546 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
13548 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13549 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13557 fold_convert_loc (loc
, type
,
13558 fold_build2_loc (loc
, BIT_AND_EXPR
,
13559 TREE_TYPE (tem
), tem
,
13560 fold_convert_loc (loc
,
13565 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13566 already handled above. */
13567 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13568 && integer_onep (TREE_OPERAND (arg0
, 1))
13569 && integer_zerop (op2
)
13570 && integer_pow2p (arg1
))
13572 tree tem
= TREE_OPERAND (arg0
, 0);
13574 if (TREE_CODE (tem
) == RSHIFT_EXPR
13575 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
13576 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13577 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
13578 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13579 TREE_OPERAND (tem
, 0), arg1
);
13582 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13583 is probably obsolete because the first operand should be a
13584 truth value (that's why we have the two cases above), but let's
13585 leave it in until we can confirm this for all front-ends. */
13586 if (integer_zerop (op2
)
13587 && TREE_CODE (arg0
) == NE_EXPR
13588 && integer_zerop (TREE_OPERAND (arg0
, 1))
13589 && integer_pow2p (arg1
)
13590 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13591 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13592 arg1
, OEP_ONLY_CONST
))
13593 return pedantic_non_lvalue_loc (loc
,
13594 fold_convert_loc (loc
, type
,
13595 TREE_OPERAND (arg0
, 0)));
13597 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13598 if (integer_zerop (op2
)
13599 && truth_value_p (TREE_CODE (arg0
))
13600 && truth_value_p (TREE_CODE (arg1
)))
13601 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13602 fold_convert_loc (loc
, type
, arg0
),
13605 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13606 if (integer_onep (op2
)
13607 && truth_value_p (TREE_CODE (arg0
))
13608 && truth_value_p (TREE_CODE (arg1
)))
13610 location_t loc0
= expr_location_or (arg0
, loc
);
13611 /* Only perform transformation if ARG0 is easily inverted. */
13612 tem
= fold_truth_not_expr (loc0
, arg0
);
13614 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
13615 fold_convert_loc (loc
, type
, tem
),
13619 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13620 if (integer_zerop (arg1
)
13621 && truth_value_p (TREE_CODE (arg0
))
13622 && truth_value_p (TREE_CODE (op2
)))
13624 location_t loc0
= expr_location_or (arg0
, loc
);
13625 /* Only perform transformation if ARG0 is easily inverted. */
13626 tem
= fold_truth_not_expr (loc0
, arg0
);
13628 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13629 fold_convert_loc (loc
, type
, tem
),
13633 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13634 if (integer_onep (arg1
)
13635 && truth_value_p (TREE_CODE (arg0
))
13636 && truth_value_p (TREE_CODE (op2
)))
13637 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
13638 fold_convert_loc (loc
, type
, arg0
),
13644 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13645 of fold_ternary on them. */
13646 gcc_unreachable ();
13648 case BIT_FIELD_REF
:
13649 if ((TREE_CODE (arg0
) == VECTOR_CST
13650 || TREE_CODE (arg0
) == CONSTRUCTOR
)
13651 && type
== TREE_TYPE (TREE_TYPE (arg0
)))
13653 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
13654 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
13657 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
13658 && (idx
% width
) == 0
13659 && (idx
= idx
/ width
)
13660 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
13662 if (TREE_CODE (arg0
) == VECTOR_CST
)
13664 tree elements
= TREE_VECTOR_CST_ELTS (arg0
);
13665 while (idx
-- > 0 && elements
)
13666 elements
= TREE_CHAIN (elements
);
13668 return TREE_VALUE (elements
);
13670 else if (idx
< CONSTRUCTOR_NELTS (arg0
))
13671 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
13672 return build_zero_cst (type
);
13676 /* A bit-field-ref that referenced the full argument can be stripped. */
13677 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
13678 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_low_cst (arg1
, 1)
13679 && integer_zerop (op2
))
13680 return fold_convert_loc (loc
, type
, arg0
);
13685 /* For integers we can decompose the FMA if possible. */
13686 if (TREE_CODE (arg0
) == INTEGER_CST
13687 && TREE_CODE (arg1
) == INTEGER_CST
)
13688 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
13689 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
13690 if (integer_zerop (arg2
))
13691 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
13693 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
13697 } /* switch (code) */
13700 /* Perform constant folding and related simplification of EXPR.
13701 The related simplifications include x*1 => x, x*0 => 0, etc.,
13702 and application of the associative law.
13703 NOP_EXPR conversions may be removed freely (as long as we
13704 are careful not to change the type of the overall expression).
13705 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13706 but we can constant-fold them if they have constant operands. */
13708 #ifdef ENABLE_FOLD_CHECKING
13709 # define fold(x) fold_1 (x)
13710 static tree
fold_1 (tree
);
13716 const tree t
= expr
;
13717 enum tree_code code
= TREE_CODE (t
);
13718 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13720 location_t loc
= EXPR_LOCATION (expr
);
13722 /* Return right away if a constant. */
13723 if (kind
== tcc_constant
)
13726 /* CALL_EXPR-like objects with variable numbers of operands are
13727 treated specially. */
13728 if (kind
== tcc_vl_exp
)
13730 if (code
== CALL_EXPR
)
13732 tem
= fold_call_expr (loc
, expr
, false);
13733 return tem
? tem
: expr
;
13738 if (IS_EXPR_CODE_CLASS (kind
))
13740 tree type
= TREE_TYPE (t
);
13741 tree op0
, op1
, op2
;
13743 switch (TREE_CODE_LENGTH (code
))
13746 op0
= TREE_OPERAND (t
, 0);
13747 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13748 return tem
? tem
: expr
;
13750 op0
= TREE_OPERAND (t
, 0);
13751 op1
= TREE_OPERAND (t
, 1);
13752 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13753 return tem
? tem
: expr
;
13755 op0
= TREE_OPERAND (t
, 0);
13756 op1
= TREE_OPERAND (t
, 1);
13757 op2
= TREE_OPERAND (t
, 2);
13758 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
13759 return tem
? tem
: expr
;
13769 tree op0
= TREE_OPERAND (t
, 0);
13770 tree op1
= TREE_OPERAND (t
, 1);
13772 if (TREE_CODE (op1
) == INTEGER_CST
13773 && TREE_CODE (op0
) == CONSTRUCTOR
13774 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
13776 VEC(constructor_elt
,gc
) *elts
= CONSTRUCTOR_ELTS (op0
);
13777 unsigned HOST_WIDE_INT end
= VEC_length (constructor_elt
, elts
);
13778 unsigned HOST_WIDE_INT begin
= 0;
13780 /* Find a matching index by means of a binary search. */
13781 while (begin
!= end
)
13783 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
13784 tree index
= VEC_index (constructor_elt
, elts
, middle
)->index
;
13786 if (TREE_CODE (index
) == INTEGER_CST
13787 && tree_int_cst_lt (index
, op1
))
13788 begin
= middle
+ 1;
13789 else if (TREE_CODE (index
) == INTEGER_CST
13790 && tree_int_cst_lt (op1
, index
))
13792 else if (TREE_CODE (index
) == RANGE_EXPR
13793 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
13794 begin
= middle
+ 1;
13795 else if (TREE_CODE (index
) == RANGE_EXPR
13796 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
13799 return VEC_index (constructor_elt
, elts
, middle
)->value
;
13807 return fold (DECL_INITIAL (t
));
13811 } /* switch (code) */
13814 #ifdef ENABLE_FOLD_CHECKING
13817 static void fold_checksum_tree (const_tree
, struct md5_ctx
*, htab_t
);
13818 static void fold_check_failed (const_tree
, const_tree
);
13819 void print_fold_checksum (const_tree
);
13821 /* When --enable-checking=fold, compute a digest of expr before
13822 and after actual fold call to see if fold did not accidentally
13823 change original expr. */
13829 struct md5_ctx ctx
;
13830 unsigned char checksum_before
[16], checksum_after
[16];
13833 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13834 md5_init_ctx (&ctx
);
13835 fold_checksum_tree (expr
, &ctx
, ht
);
13836 md5_finish_ctx (&ctx
, checksum_before
);
13839 ret
= fold_1 (expr
);
13841 md5_init_ctx (&ctx
);
13842 fold_checksum_tree (expr
, &ctx
, ht
);
13843 md5_finish_ctx (&ctx
, checksum_after
);
13846 if (memcmp (checksum_before
, checksum_after
, 16))
13847 fold_check_failed (expr
, ret
);
13853 print_fold_checksum (const_tree expr
)
13855 struct md5_ctx ctx
;
13856 unsigned char checksum
[16], cnt
;
13859 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
13860 md5_init_ctx (&ctx
);
13861 fold_checksum_tree (expr
, &ctx
, ht
);
13862 md5_finish_ctx (&ctx
, checksum
);
13864 for (cnt
= 0; cnt
< 16; ++cnt
)
13865 fprintf (stderr
, "%02x", checksum
[cnt
]);
13866 putc ('\n', stderr
);
13870 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
13872 internal_error ("fold check: original tree changed by fold");
13876 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
13879 enum tree_code code
;
13880 union tree_node buf
;
13886 slot
= (void **) htab_find_slot (ht
, expr
, INSERT
);
13889 *slot
= CONST_CAST_TREE (expr
);
13890 code
= TREE_CODE (expr
);
13891 if (TREE_CODE_CLASS (code
) == tcc_declaration
13892 && DECL_ASSEMBLER_NAME_SET_P (expr
))
13894 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13895 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13896 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
13897 expr
= (tree
) &buf
;
13899 else if (TREE_CODE_CLASS (code
) == tcc_type
13900 && (TYPE_POINTER_TO (expr
)
13901 || TYPE_REFERENCE_TO (expr
)
13902 || TYPE_CACHED_VALUES_P (expr
)
13903 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
13904 || TYPE_NEXT_VARIANT (expr
)))
13906 /* Allow these fields to be modified. */
13908 memcpy ((char *) &buf
, expr
, tree_size (expr
));
13909 expr
= tmp
= (tree
) &buf
;
13910 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
13911 TYPE_POINTER_TO (tmp
) = NULL
;
13912 TYPE_REFERENCE_TO (tmp
) = NULL
;
13913 TYPE_NEXT_VARIANT (tmp
) = NULL
;
13914 if (TYPE_CACHED_VALUES_P (tmp
))
13916 TYPE_CACHED_VALUES_P (tmp
) = 0;
13917 TYPE_CACHED_VALUES (tmp
) = NULL
;
13920 md5_process_bytes (expr
, tree_size (expr
), ctx
);
13921 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
13922 if (TREE_CODE_CLASS (code
) != tcc_type
13923 && TREE_CODE_CLASS (code
) != tcc_declaration
13924 && code
!= TREE_LIST
13925 && code
!= SSA_NAME
13926 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
13927 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
13928 switch (TREE_CODE_CLASS (code
))
13934 md5_process_bytes (TREE_STRING_POINTER (expr
),
13935 TREE_STRING_LENGTH (expr
), ctx
);
13938 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
13939 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
13942 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
13948 case tcc_exceptional
:
13952 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
13953 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
13954 expr
= TREE_CHAIN (expr
);
13955 goto recursive_label
;
13958 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
13959 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
13965 case tcc_expression
:
13966 case tcc_reference
:
13967 case tcc_comparison
:
13970 case tcc_statement
:
13972 len
= TREE_OPERAND_LENGTH (expr
);
13973 for (i
= 0; i
< len
; ++i
)
13974 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
13976 case tcc_declaration
:
13977 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
13978 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
13979 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
13981 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
13982 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
13983 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
13984 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
13985 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
13987 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
13988 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
13990 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
13992 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
13993 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
13994 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
13998 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
13999 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14000 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14001 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14002 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14003 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14004 if (INTEGRAL_TYPE_P (expr
)
14005 || SCALAR_FLOAT_TYPE_P (expr
))
14007 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14008 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14010 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14011 if (TREE_CODE (expr
) == RECORD_TYPE
14012 || TREE_CODE (expr
) == UNION_TYPE
14013 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14014 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14015 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14022 /* Helper function for outputting the checksum of a tree T. When
14023 debugging with gdb, you can "define mynext" to be "next" followed
14024 by "call debug_fold_checksum (op0)", then just trace down till the
14027 DEBUG_FUNCTION
void
14028 debug_fold_checksum (const_tree t
)
14031 unsigned char checksum
[16];
14032 struct md5_ctx ctx
;
14033 htab_t ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14035 md5_init_ctx (&ctx
);
14036 fold_checksum_tree (t
, &ctx
, ht
);
14037 md5_finish_ctx (&ctx
, checksum
);
14040 for (i
= 0; i
< 16; i
++)
14041 fprintf (stderr
, "%d ", checksum
[i
]);
14043 fprintf (stderr
, "\n");
14048 /* Fold a unary tree expression with code CODE of type TYPE with an
14049 operand OP0. LOC is the location of the resulting expression.
14050 Return a folded expression if successful. Otherwise, return a tree
14051 expression with code CODE of type TYPE with an operand OP0. */
14054 fold_build1_stat_loc (location_t loc
,
14055 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14058 #ifdef ENABLE_FOLD_CHECKING
14059 unsigned char checksum_before
[16], checksum_after
[16];
14060 struct md5_ctx ctx
;
14063 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14064 md5_init_ctx (&ctx
);
14065 fold_checksum_tree (op0
, &ctx
, ht
);
14066 md5_finish_ctx (&ctx
, checksum_before
);
14070 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14072 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14074 #ifdef ENABLE_FOLD_CHECKING
14075 md5_init_ctx (&ctx
);
14076 fold_checksum_tree (op0
, &ctx
, ht
);
14077 md5_finish_ctx (&ctx
, checksum_after
);
14080 if (memcmp (checksum_before
, checksum_after
, 16))
14081 fold_check_failed (op0
, tem
);
14086 /* Fold a binary tree expression with code CODE of type TYPE with
14087 operands OP0 and OP1. LOC is the location of the resulting
14088 expression. Return a folded expression if successful. Otherwise,
14089 return a tree expression with code CODE of type TYPE with operands
14093 fold_build2_stat_loc (location_t loc
,
14094 enum tree_code code
, tree type
, tree op0
, tree op1
14098 #ifdef ENABLE_FOLD_CHECKING
14099 unsigned char checksum_before_op0
[16],
14100 checksum_before_op1
[16],
14101 checksum_after_op0
[16],
14102 checksum_after_op1
[16];
14103 struct md5_ctx ctx
;
14106 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14107 md5_init_ctx (&ctx
);
14108 fold_checksum_tree (op0
, &ctx
, ht
);
14109 md5_finish_ctx (&ctx
, checksum_before_op0
);
14112 md5_init_ctx (&ctx
);
14113 fold_checksum_tree (op1
, &ctx
, ht
);
14114 md5_finish_ctx (&ctx
, checksum_before_op1
);
14118 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14120 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14122 #ifdef ENABLE_FOLD_CHECKING
14123 md5_init_ctx (&ctx
);
14124 fold_checksum_tree (op0
, &ctx
, ht
);
14125 md5_finish_ctx (&ctx
, checksum_after_op0
);
14128 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14129 fold_check_failed (op0
, tem
);
14131 md5_init_ctx (&ctx
);
14132 fold_checksum_tree (op1
, &ctx
, ht
);
14133 md5_finish_ctx (&ctx
, checksum_after_op1
);
14136 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14137 fold_check_failed (op1
, tem
);
14142 /* Fold a ternary tree expression with code CODE of type TYPE with
14143 operands OP0, OP1, and OP2. Return a folded expression if
14144 successful. Otherwise, return a tree expression with code CODE of
14145 type TYPE with operands OP0, OP1, and OP2. */
14148 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14149 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14152 #ifdef ENABLE_FOLD_CHECKING
14153 unsigned char checksum_before_op0
[16],
14154 checksum_before_op1
[16],
14155 checksum_before_op2
[16],
14156 checksum_after_op0
[16],
14157 checksum_after_op1
[16],
14158 checksum_after_op2
[16];
14159 struct md5_ctx ctx
;
14162 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14163 md5_init_ctx (&ctx
);
14164 fold_checksum_tree (op0
, &ctx
, ht
);
14165 md5_finish_ctx (&ctx
, checksum_before_op0
);
14168 md5_init_ctx (&ctx
);
14169 fold_checksum_tree (op1
, &ctx
, ht
);
14170 md5_finish_ctx (&ctx
, checksum_before_op1
);
14173 md5_init_ctx (&ctx
);
14174 fold_checksum_tree (op2
, &ctx
, ht
);
14175 md5_finish_ctx (&ctx
, checksum_before_op2
);
14179 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14180 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14182 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14184 #ifdef ENABLE_FOLD_CHECKING
14185 md5_init_ctx (&ctx
);
14186 fold_checksum_tree (op0
, &ctx
, ht
);
14187 md5_finish_ctx (&ctx
, checksum_after_op0
);
14190 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14191 fold_check_failed (op0
, tem
);
14193 md5_init_ctx (&ctx
);
14194 fold_checksum_tree (op1
, &ctx
, ht
);
14195 md5_finish_ctx (&ctx
, checksum_after_op1
);
14198 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14199 fold_check_failed (op1
, tem
);
14201 md5_init_ctx (&ctx
);
14202 fold_checksum_tree (op2
, &ctx
, ht
);
14203 md5_finish_ctx (&ctx
, checksum_after_op2
);
14206 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14207 fold_check_failed (op2
, tem
);
14212 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14213 arguments in ARGARRAY, and a null static chain.
14214 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14215 of type TYPE from the given operands as constructed by build_call_array. */
14218 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14219 int nargs
, tree
*argarray
)
14222 #ifdef ENABLE_FOLD_CHECKING
14223 unsigned char checksum_before_fn
[16],
14224 checksum_before_arglist
[16],
14225 checksum_after_fn
[16],
14226 checksum_after_arglist
[16];
14227 struct md5_ctx ctx
;
14231 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14232 md5_init_ctx (&ctx
);
14233 fold_checksum_tree (fn
, &ctx
, ht
);
14234 md5_finish_ctx (&ctx
, checksum_before_fn
);
14237 md5_init_ctx (&ctx
);
14238 for (i
= 0; i
< nargs
; i
++)
14239 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14240 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14244 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14246 #ifdef ENABLE_FOLD_CHECKING
14247 md5_init_ctx (&ctx
);
14248 fold_checksum_tree (fn
, &ctx
, ht
);
14249 md5_finish_ctx (&ctx
, checksum_after_fn
);
14252 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14253 fold_check_failed (fn
, tem
);
14255 md5_init_ctx (&ctx
);
14256 for (i
= 0; i
< nargs
; i
++)
14257 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14258 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14261 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14262 fold_check_failed (NULL_TREE
, tem
);
14267 /* Perform constant folding and related simplification of initializer
14268 expression EXPR. These behave identically to "fold_buildN" but ignore
14269 potential run-time traps and exceptions that fold must preserve. */
14271 #define START_FOLD_INIT \
14272 int saved_signaling_nans = flag_signaling_nans;\
14273 int saved_trapping_math = flag_trapping_math;\
14274 int saved_rounding_math = flag_rounding_math;\
14275 int saved_trapv = flag_trapv;\
14276 int saved_folding_initializer = folding_initializer;\
14277 flag_signaling_nans = 0;\
14278 flag_trapping_math = 0;\
14279 flag_rounding_math = 0;\
14281 folding_initializer = 1;
14283 #define END_FOLD_INIT \
14284 flag_signaling_nans = saved_signaling_nans;\
14285 flag_trapping_math = saved_trapping_math;\
14286 flag_rounding_math = saved_rounding_math;\
14287 flag_trapv = saved_trapv;\
14288 folding_initializer = saved_folding_initializer;
14291 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14292 tree type
, tree op
)
14297 result
= fold_build1_loc (loc
, code
, type
, op
);
14304 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14305 tree type
, tree op0
, tree op1
)
14310 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14317 fold_build3_initializer_loc (location_t loc
, enum tree_code code
,
14318 tree type
, tree op0
, tree op1
, tree op2
)
14323 result
= fold_build3_loc (loc
, code
, type
, op0
, op1
, op2
);
14330 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14331 int nargs
, tree
*argarray
)
14336 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14342 #undef START_FOLD_INIT
14343 #undef END_FOLD_INIT
14345 /* Determine if first argument is a multiple of second argument. Return 0 if
14346 it is not, or we cannot easily determined it to be.
14348 An example of the sort of thing we care about (at this point; this routine
14349 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14350 fold cases do now) is discovering that
14352 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14358 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14360 This code also handles discovering that
14362 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14364 is a multiple of 8 so we don't have to worry about dealing with a
14365 possible remainder.
14367 Note that we *look* inside a SAVE_EXPR only to determine how it was
14368 calculated; it is not safe for fold to do much of anything else with the
14369 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14370 at run time. For example, the latter example above *cannot* be implemented
14371 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14372 evaluation time of the original SAVE_EXPR is not necessarily the same at
14373 the time the new expression is evaluated. The only optimization of this
14374 sort that would be valid is changing
14376 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14380 SAVE_EXPR (I) * SAVE_EXPR (J)
14382 (where the same SAVE_EXPR (J) is used in the original and the
14383 transformed version). */
14386 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
14388 if (operand_equal_p (top
, bottom
, 0))
14391 if (TREE_CODE (type
) != INTEGER_TYPE
)
14394 switch (TREE_CODE (top
))
14397 /* Bitwise and provides a power of two multiple. If the mask is
14398 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14399 if (!integer_pow2p (bottom
))
14404 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14405 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14409 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14410 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14413 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14417 op1
= TREE_OPERAND (top
, 1);
14418 /* const_binop may not detect overflow correctly,
14419 so check for it explicitly here. */
14420 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
14421 > TREE_INT_CST_LOW (op1
)
14422 && TREE_INT_CST_HIGH (op1
) == 0
14423 && 0 != (t1
= fold_convert (type
,
14424 const_binop (LSHIFT_EXPR
,
14427 && !TREE_OVERFLOW (t1
))
14428 return multiple_of_p (type
, t1
, bottom
);
14433 /* Can't handle conversions from non-integral or wider integral type. */
14434 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14435 || (TYPE_PRECISION (type
)
14436 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14439 /* .. fall through ... */
14442 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14445 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
14446 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
14449 if (TREE_CODE (bottom
) != INTEGER_CST
14450 || integer_zerop (bottom
)
14451 || (TYPE_UNSIGNED (type
)
14452 && (tree_int_cst_sgn (top
) < 0
14453 || tree_int_cst_sgn (bottom
) < 0)))
14455 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
14463 /* Return true if CODE or TYPE is known to be non-negative. */
14466 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
14468 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14469 && truth_value_p (code
))
14470 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14471 have a signed:1 type (where the value is -1 and 0). */
14476 /* Return true if (CODE OP0) is known to be non-negative. If the return
14477 value is based on the assumption that signed overflow is undefined,
14478 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14479 *STRICT_OVERFLOW_P. */
14482 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14483 bool *strict_overflow_p
)
14485 if (TYPE_UNSIGNED (type
))
14491 /* We can't return 1 if flag_wrapv is set because
14492 ABS_EXPR<INT_MIN> = INT_MIN. */
14493 if (!INTEGRAL_TYPE_P (type
))
14495 if (TYPE_OVERFLOW_UNDEFINED (type
))
14497 *strict_overflow_p
= true;
14502 case NON_LVALUE_EXPR
:
14504 case FIX_TRUNC_EXPR
:
14505 return tree_expr_nonnegative_warnv_p (op0
,
14506 strict_overflow_p
);
14510 tree inner_type
= TREE_TYPE (op0
);
14511 tree outer_type
= type
;
14513 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14515 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14516 return tree_expr_nonnegative_warnv_p (op0
,
14517 strict_overflow_p
);
14518 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14520 if (TYPE_UNSIGNED (inner_type
))
14522 return tree_expr_nonnegative_warnv_p (op0
,
14523 strict_overflow_p
);
14526 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
14528 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14529 return tree_expr_nonnegative_warnv_p (op0
,
14530 strict_overflow_p
);
14531 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14532 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14533 && TYPE_UNSIGNED (inner_type
);
14539 return tree_simple_nonnegative_warnv_p (code
, type
);
14542 /* We don't know sign of `t', so be conservative and return false. */
14546 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14547 value is based on the assumption that signed overflow is undefined,
14548 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14549 *STRICT_OVERFLOW_P. */
14552 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14553 tree op1
, bool *strict_overflow_p
)
14555 if (TYPE_UNSIGNED (type
))
14560 case POINTER_PLUS_EXPR
:
14562 if (FLOAT_TYPE_P (type
))
14563 return (tree_expr_nonnegative_warnv_p (op0
,
14565 && tree_expr_nonnegative_warnv_p (op1
,
14566 strict_overflow_p
));
14568 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14569 both unsigned and at least 2 bits shorter than the result. */
14570 if (TREE_CODE (type
) == INTEGER_TYPE
14571 && TREE_CODE (op0
) == NOP_EXPR
14572 && TREE_CODE (op1
) == NOP_EXPR
)
14574 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
14575 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
14576 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14577 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14579 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14580 TYPE_PRECISION (inner2
)) + 1;
14581 return prec
< TYPE_PRECISION (type
);
14587 if (FLOAT_TYPE_P (type
))
14589 /* x * x for floating point x is always non-negative. */
14590 if (operand_equal_p (op0
, op1
, 0))
14592 return (tree_expr_nonnegative_warnv_p (op0
,
14594 && tree_expr_nonnegative_warnv_p (op1
,
14595 strict_overflow_p
));
14598 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14599 both unsigned and their total bits is shorter than the result. */
14600 if (TREE_CODE (type
) == INTEGER_TYPE
14601 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
14602 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
14604 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
14605 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
14607 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
14608 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
14611 bool unsigned0
= TYPE_UNSIGNED (inner0
);
14612 bool unsigned1
= TYPE_UNSIGNED (inner1
);
14614 if (TREE_CODE (op0
) == INTEGER_CST
)
14615 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
14617 if (TREE_CODE (op1
) == INTEGER_CST
)
14618 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
14620 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
14621 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
14623 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
14624 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
14625 : TYPE_PRECISION (inner0
);
14627 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
14628 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
14629 : TYPE_PRECISION (inner1
);
14631 return precision0
+ precision1
< TYPE_PRECISION (type
);
14638 return (tree_expr_nonnegative_warnv_p (op0
,
14640 || tree_expr_nonnegative_warnv_p (op1
,
14641 strict_overflow_p
));
14647 case TRUNC_DIV_EXPR
:
14648 case CEIL_DIV_EXPR
:
14649 case FLOOR_DIV_EXPR
:
14650 case ROUND_DIV_EXPR
:
14651 return (tree_expr_nonnegative_warnv_p (op0
,
14653 && tree_expr_nonnegative_warnv_p (op1
,
14654 strict_overflow_p
));
14656 case TRUNC_MOD_EXPR
:
14657 case CEIL_MOD_EXPR
:
14658 case FLOOR_MOD_EXPR
:
14659 case ROUND_MOD_EXPR
:
14660 return tree_expr_nonnegative_warnv_p (op0
,
14661 strict_overflow_p
);
14663 return tree_simple_nonnegative_warnv_p (code
, type
);
14666 /* We don't know sign of `t', so be conservative and return false. */
14670 /* Return true if T is known to be non-negative. If the return
14671 value is based on the assumption that signed overflow is undefined,
14672 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14673 *STRICT_OVERFLOW_P. */
14676 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14678 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14681 switch (TREE_CODE (t
))
14684 return tree_int_cst_sgn (t
) >= 0;
14687 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
14690 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
14693 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14695 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
14696 strict_overflow_p
));
14698 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14701 /* We don't know sign of `t', so be conservative and return false. */
14705 /* Return true if T is known to be non-negative. If the return
14706 value is based on the assumption that signed overflow is undefined,
14707 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14708 *STRICT_OVERFLOW_P. */
14711 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
14712 tree arg0
, tree arg1
, bool *strict_overflow_p
)
14714 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
14715 switch (DECL_FUNCTION_CODE (fndecl
))
14717 CASE_FLT_FN (BUILT_IN_ACOS
):
14718 CASE_FLT_FN (BUILT_IN_ACOSH
):
14719 CASE_FLT_FN (BUILT_IN_CABS
):
14720 CASE_FLT_FN (BUILT_IN_COSH
):
14721 CASE_FLT_FN (BUILT_IN_ERFC
):
14722 CASE_FLT_FN (BUILT_IN_EXP
):
14723 CASE_FLT_FN (BUILT_IN_EXP10
):
14724 CASE_FLT_FN (BUILT_IN_EXP2
):
14725 CASE_FLT_FN (BUILT_IN_FABS
):
14726 CASE_FLT_FN (BUILT_IN_FDIM
):
14727 CASE_FLT_FN (BUILT_IN_HYPOT
):
14728 CASE_FLT_FN (BUILT_IN_POW10
):
14729 CASE_INT_FN (BUILT_IN_FFS
):
14730 CASE_INT_FN (BUILT_IN_PARITY
):
14731 CASE_INT_FN (BUILT_IN_POPCOUNT
):
14732 case BUILT_IN_BSWAP32
:
14733 case BUILT_IN_BSWAP64
:
14737 CASE_FLT_FN (BUILT_IN_SQRT
):
14738 /* sqrt(-0.0) is -0.0. */
14739 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
14741 return tree_expr_nonnegative_warnv_p (arg0
,
14742 strict_overflow_p
);
14744 CASE_FLT_FN (BUILT_IN_ASINH
):
14745 CASE_FLT_FN (BUILT_IN_ATAN
):
14746 CASE_FLT_FN (BUILT_IN_ATANH
):
14747 CASE_FLT_FN (BUILT_IN_CBRT
):
14748 CASE_FLT_FN (BUILT_IN_CEIL
):
14749 CASE_FLT_FN (BUILT_IN_ERF
):
14750 CASE_FLT_FN (BUILT_IN_EXPM1
):
14751 CASE_FLT_FN (BUILT_IN_FLOOR
):
14752 CASE_FLT_FN (BUILT_IN_FMOD
):
14753 CASE_FLT_FN (BUILT_IN_FREXP
):
14754 CASE_FLT_FN (BUILT_IN_ICEIL
):
14755 CASE_FLT_FN (BUILT_IN_IFLOOR
):
14756 CASE_FLT_FN (BUILT_IN_IRINT
):
14757 CASE_FLT_FN (BUILT_IN_IROUND
):
14758 CASE_FLT_FN (BUILT_IN_LCEIL
):
14759 CASE_FLT_FN (BUILT_IN_LDEXP
):
14760 CASE_FLT_FN (BUILT_IN_LFLOOR
):
14761 CASE_FLT_FN (BUILT_IN_LLCEIL
):
14762 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
14763 CASE_FLT_FN (BUILT_IN_LLRINT
):
14764 CASE_FLT_FN (BUILT_IN_LLROUND
):
14765 CASE_FLT_FN (BUILT_IN_LRINT
):
14766 CASE_FLT_FN (BUILT_IN_LROUND
):
14767 CASE_FLT_FN (BUILT_IN_MODF
):
14768 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
14769 CASE_FLT_FN (BUILT_IN_RINT
):
14770 CASE_FLT_FN (BUILT_IN_ROUND
):
14771 CASE_FLT_FN (BUILT_IN_SCALB
):
14772 CASE_FLT_FN (BUILT_IN_SCALBLN
):
14773 CASE_FLT_FN (BUILT_IN_SCALBN
):
14774 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
14775 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
14776 CASE_FLT_FN (BUILT_IN_SINH
):
14777 CASE_FLT_FN (BUILT_IN_TANH
):
14778 CASE_FLT_FN (BUILT_IN_TRUNC
):
14779 /* True if the 1st argument is nonnegative. */
14780 return tree_expr_nonnegative_warnv_p (arg0
,
14781 strict_overflow_p
);
14783 CASE_FLT_FN (BUILT_IN_FMAX
):
14784 /* True if the 1st OR 2nd arguments are nonnegative. */
14785 return (tree_expr_nonnegative_warnv_p (arg0
,
14787 || (tree_expr_nonnegative_warnv_p (arg1
,
14788 strict_overflow_p
)));
14790 CASE_FLT_FN (BUILT_IN_FMIN
):
14791 /* True if the 1st AND 2nd arguments are nonnegative. */
14792 return (tree_expr_nonnegative_warnv_p (arg0
,
14794 && (tree_expr_nonnegative_warnv_p (arg1
,
14795 strict_overflow_p
)));
14797 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
14798 /* True if the 2nd argument is nonnegative. */
14799 return tree_expr_nonnegative_warnv_p (arg1
,
14800 strict_overflow_p
);
14802 CASE_FLT_FN (BUILT_IN_POWI
):
14803 /* True if the 1st argument is nonnegative or the second
14804 argument is an even integer. */
14805 if (TREE_CODE (arg1
) == INTEGER_CST
14806 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
14808 return tree_expr_nonnegative_warnv_p (arg0
,
14809 strict_overflow_p
);
14811 CASE_FLT_FN (BUILT_IN_POW
):
14812 /* True if the 1st argument is nonnegative or the second
14813 argument is an even integer valued real. */
14814 if (TREE_CODE (arg1
) == REAL_CST
)
14819 c
= TREE_REAL_CST (arg1
);
14820 n
= real_to_integer (&c
);
14823 REAL_VALUE_TYPE cint
;
14824 real_from_integer (&cint
, VOIDmode
, n
,
14825 n
< 0 ? -1 : 0, 0);
14826 if (real_identical (&c
, &cint
))
14830 return tree_expr_nonnegative_warnv_p (arg0
,
14831 strict_overflow_p
);
14836 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
14840 /* Return true if T is known to be non-negative. If the return
14841 value is based on the assumption that signed overflow is undefined,
14842 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14843 *STRICT_OVERFLOW_P. */
14846 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14848 enum tree_code code
= TREE_CODE (t
);
14849 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14856 tree temp
= TARGET_EXPR_SLOT (t
);
14857 t
= TARGET_EXPR_INITIAL (t
);
14859 /* If the initializer is non-void, then it's a normal expression
14860 that will be assigned to the slot. */
14861 if (!VOID_TYPE_P (t
))
14862 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
14864 /* Otherwise, the initializer sets the slot in some way. One common
14865 way is an assignment statement at the end of the initializer. */
14868 if (TREE_CODE (t
) == BIND_EXPR
)
14869 t
= expr_last (BIND_EXPR_BODY (t
));
14870 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
14871 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
14872 t
= expr_last (TREE_OPERAND (t
, 0));
14873 else if (TREE_CODE (t
) == STATEMENT_LIST
)
14878 if (TREE_CODE (t
) == MODIFY_EXPR
14879 && TREE_OPERAND (t
, 0) == temp
)
14880 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14881 strict_overflow_p
);
14888 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
14889 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
14891 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
14892 get_callee_fndecl (t
),
14895 strict_overflow_p
);
14897 case COMPOUND_EXPR
:
14899 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14900 strict_overflow_p
);
14902 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
14903 strict_overflow_p
);
14905 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
14906 strict_overflow_p
);
14909 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14913 /* We don't know sign of `t', so be conservative and return false. */
14917 /* Return true if T is known to be non-negative. If the return
14918 value is based on the assumption that signed overflow is undefined,
14919 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14920 *STRICT_OVERFLOW_P. */
14923 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14925 enum tree_code code
;
14926 if (t
== error_mark_node
)
14929 code
= TREE_CODE (t
);
14930 switch (TREE_CODE_CLASS (code
))
14933 case tcc_comparison
:
14934 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14936 TREE_OPERAND (t
, 0),
14937 TREE_OPERAND (t
, 1),
14938 strict_overflow_p
);
14941 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14943 TREE_OPERAND (t
, 0),
14944 strict_overflow_p
);
14947 case tcc_declaration
:
14948 case tcc_reference
:
14949 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
14957 case TRUTH_AND_EXPR
:
14958 case TRUTH_OR_EXPR
:
14959 case TRUTH_XOR_EXPR
:
14960 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
14962 TREE_OPERAND (t
, 0),
14963 TREE_OPERAND (t
, 1),
14964 strict_overflow_p
);
14965 case TRUTH_NOT_EXPR
:
14966 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
14968 TREE_OPERAND (t
, 0),
14969 strict_overflow_p
);
14976 case WITH_SIZE_EXPR
:
14978 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
14981 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
14985 /* Return true if `t' is known to be non-negative. Handle warnings
14986 about undefined signed overflow. */
14989 tree_expr_nonnegative_p (tree t
)
14991 bool ret
, strict_overflow_p
;
14993 strict_overflow_p
= false;
14994 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
14995 if (strict_overflow_p
)
14996 fold_overflow_warning (("assuming signed overflow does not occur when "
14997 "determining that expression is always "
14999 WARN_STRICT_OVERFLOW_MISC
);
15004 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15005 For floating point we further ensure that T is not denormal.
15006 Similar logic is present in nonzero_address in rtlanal.h.
15008 If the return value is based on the assumption that signed overflow
15009 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15010 change *STRICT_OVERFLOW_P. */
15013 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15014 bool *strict_overflow_p
)
15019 return tree_expr_nonzero_warnv_p (op0
,
15020 strict_overflow_p
);
15024 tree inner_type
= TREE_TYPE (op0
);
15025 tree outer_type
= type
;
15027 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15028 && tree_expr_nonzero_warnv_p (op0
,
15029 strict_overflow_p
));
15033 case NON_LVALUE_EXPR
:
15034 return tree_expr_nonzero_warnv_p (op0
,
15035 strict_overflow_p
);
15044 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15045 For floating point we further ensure that T is not denormal.
15046 Similar logic is present in nonzero_address in rtlanal.h.
15048 If the return value is based on the assumption that signed overflow
15049 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15050 change *STRICT_OVERFLOW_P. */
15053 tree_binary_nonzero_warnv_p (enum tree_code code
,
15056 tree op1
, bool *strict_overflow_p
)
15058 bool sub_strict_overflow_p
;
15061 case POINTER_PLUS_EXPR
:
15063 if (TYPE_OVERFLOW_UNDEFINED (type
))
15065 /* With the presence of negative values it is hard
15066 to say something. */
15067 sub_strict_overflow_p
= false;
15068 if (!tree_expr_nonnegative_warnv_p (op0
,
15069 &sub_strict_overflow_p
)
15070 || !tree_expr_nonnegative_warnv_p (op1
,
15071 &sub_strict_overflow_p
))
15073 /* One of operands must be positive and the other non-negative. */
15074 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15075 overflows, on a twos-complement machine the sum of two
15076 nonnegative numbers can never be zero. */
15077 return (tree_expr_nonzero_warnv_p (op0
,
15079 || tree_expr_nonzero_warnv_p (op1
,
15080 strict_overflow_p
));
15085 if (TYPE_OVERFLOW_UNDEFINED (type
))
15087 if (tree_expr_nonzero_warnv_p (op0
,
15089 && tree_expr_nonzero_warnv_p (op1
,
15090 strict_overflow_p
))
15092 *strict_overflow_p
= true;
15099 sub_strict_overflow_p
= false;
15100 if (tree_expr_nonzero_warnv_p (op0
,
15101 &sub_strict_overflow_p
)
15102 && tree_expr_nonzero_warnv_p (op1
,
15103 &sub_strict_overflow_p
))
15105 if (sub_strict_overflow_p
)
15106 *strict_overflow_p
= true;
15111 sub_strict_overflow_p
= false;
15112 if (tree_expr_nonzero_warnv_p (op0
,
15113 &sub_strict_overflow_p
))
15115 if (sub_strict_overflow_p
)
15116 *strict_overflow_p
= true;
15118 /* When both operands are nonzero, then MAX must be too. */
15119 if (tree_expr_nonzero_warnv_p (op1
,
15120 strict_overflow_p
))
15123 /* MAX where operand 0 is positive is positive. */
15124 return tree_expr_nonnegative_warnv_p (op0
,
15125 strict_overflow_p
);
15127 /* MAX where operand 1 is positive is positive. */
15128 else if (tree_expr_nonzero_warnv_p (op1
,
15129 &sub_strict_overflow_p
)
15130 && tree_expr_nonnegative_warnv_p (op1
,
15131 &sub_strict_overflow_p
))
15133 if (sub_strict_overflow_p
)
15134 *strict_overflow_p
= true;
15140 return (tree_expr_nonzero_warnv_p (op1
,
15142 || tree_expr_nonzero_warnv_p (op0
,
15143 strict_overflow_p
));
15152 /* Return true when T is an address and is known to be nonzero.
15153 For floating point we further ensure that T is not denormal.
15154 Similar logic is present in nonzero_address in rtlanal.h.
15156 If the return value is based on the assumption that signed overflow
15157 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15158 change *STRICT_OVERFLOW_P. */
15161 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15163 bool sub_strict_overflow_p
;
15164 switch (TREE_CODE (t
))
15167 return !integer_zerop (t
);
15171 tree base
= TREE_OPERAND (t
, 0);
15172 if (!DECL_P (base
))
15173 base
= get_base_address (base
);
15178 /* Weak declarations may link to NULL. Other things may also be NULL
15179 so protect with -fdelete-null-pointer-checks; but not variables
15180 allocated on the stack. */
15182 && (flag_delete_null_pointer_checks
15183 || (DECL_CONTEXT (base
)
15184 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15185 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
)))))
15186 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
15188 /* Constants are never weak. */
15189 if (CONSTANT_CLASS_P (base
))
15196 sub_strict_overflow_p
= false;
15197 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15198 &sub_strict_overflow_p
)
15199 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15200 &sub_strict_overflow_p
))
15202 if (sub_strict_overflow_p
)
15203 *strict_overflow_p
= true;
15214 /* Return true when T is an address and is known to be nonzero.
15215 For floating point we further ensure that T is not denormal.
15216 Similar logic is present in nonzero_address in rtlanal.h.
15218 If the return value is based on the assumption that signed overflow
15219 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15220 change *STRICT_OVERFLOW_P. */
15223 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15225 tree type
= TREE_TYPE (t
);
15226 enum tree_code code
;
15228 /* Doing something useful for floating point would need more work. */
15229 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
15232 code
= TREE_CODE (t
);
15233 switch (TREE_CODE_CLASS (code
))
15236 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15237 strict_overflow_p
);
15239 case tcc_comparison
:
15240 return tree_binary_nonzero_warnv_p (code
, type
,
15241 TREE_OPERAND (t
, 0),
15242 TREE_OPERAND (t
, 1),
15243 strict_overflow_p
);
15245 case tcc_declaration
:
15246 case tcc_reference
:
15247 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15255 case TRUTH_NOT_EXPR
:
15256 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15257 strict_overflow_p
);
15259 case TRUTH_AND_EXPR
:
15260 case TRUTH_OR_EXPR
:
15261 case TRUTH_XOR_EXPR
:
15262 return tree_binary_nonzero_warnv_p (code
, type
,
15263 TREE_OPERAND (t
, 0),
15264 TREE_OPERAND (t
, 1),
15265 strict_overflow_p
);
15272 case WITH_SIZE_EXPR
:
15274 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15276 case COMPOUND_EXPR
:
15279 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15280 strict_overflow_p
);
15283 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
15284 strict_overflow_p
);
15287 return alloca_call_p (t
);
15295 /* Return true when T is an address and is known to be nonzero.
15296 Handle warnings about undefined signed overflow. */
15299 tree_expr_nonzero_p (tree t
)
15301 bool ret
, strict_overflow_p
;
15303 strict_overflow_p
= false;
15304 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
15305 if (strict_overflow_p
)
15306 fold_overflow_warning (("assuming signed overflow does not occur when "
15307 "determining that expression is always "
15309 WARN_STRICT_OVERFLOW_MISC
);
15313 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15314 attempt to fold the expression to a constant without modifying TYPE,
15317 If the expression could be simplified to a constant, then return
15318 the constant. If the expression would not be simplified to a
15319 constant, then return NULL_TREE. */
15322 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15324 tree tem
= fold_binary (code
, type
, op0
, op1
);
15325 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15328 /* Given the components of a unary expression CODE, TYPE and OP0,
15329 attempt to fold the expression to a constant without modifying
15332 If the expression could be simplified to a constant, then return
15333 the constant. If the expression would not be simplified to a
15334 constant, then return NULL_TREE. */
15337 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15339 tree tem
= fold_unary (code
, type
, op0
);
15340 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15343 /* If EXP represents referencing an element in a constant string
15344 (either via pointer arithmetic or array indexing), return the
15345 tree representing the value accessed, otherwise return NULL. */
15348 fold_read_from_constant_string (tree exp
)
15350 if ((TREE_CODE (exp
) == INDIRECT_REF
15351 || TREE_CODE (exp
) == ARRAY_REF
)
15352 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15354 tree exp1
= TREE_OPERAND (exp
, 0);
15357 location_t loc
= EXPR_LOCATION (exp
);
15359 if (TREE_CODE (exp
) == INDIRECT_REF
)
15360 string
= string_constant (exp1
, &index
);
15363 tree low_bound
= array_ref_low_bound (exp
);
15364 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15366 /* Optimize the special-case of a zero lower bound.
15368 We convert the low_bound to sizetype to avoid some problems
15369 with constant folding. (E.g. suppose the lower bound is 1,
15370 and its mode is QI. Without the conversion,l (ARRAY
15371 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15372 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15373 if (! integer_zerop (low_bound
))
15374 index
= size_diffop_loc (loc
, index
,
15375 fold_convert_loc (loc
, sizetype
, low_bound
));
15381 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15382 && TREE_CODE (string
) == STRING_CST
15383 && TREE_CODE (index
) == INTEGER_CST
15384 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15385 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15387 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15388 return build_int_cst_type (TREE_TYPE (exp
),
15389 (TREE_STRING_POINTER (string
)
15390 [TREE_INT_CST_LOW (index
)]));
15395 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15396 an integer constant, real, or fixed-point constant.
15398 TYPE is the type of the result. */
15401 fold_negate_const (tree arg0
, tree type
)
15403 tree t
= NULL_TREE
;
15405 switch (TREE_CODE (arg0
))
15409 double_int val
= tree_to_double_int (arg0
);
15410 int overflow
= neg_double (val
.low
, val
.high
, &val
.low
, &val
.high
);
15412 t
= force_fit_type_double (type
, val
, 1,
15413 (overflow
| TREE_OVERFLOW (arg0
))
15414 && !TYPE_UNSIGNED (type
));
15419 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15424 FIXED_VALUE_TYPE f
;
15425 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15426 &(TREE_FIXED_CST (arg0
)), NULL
,
15427 TYPE_SATURATING (type
));
15428 t
= build_fixed (type
, f
);
15429 /* Propagate overflow flags. */
15430 if (overflow_p
| TREE_OVERFLOW (arg0
))
15431 TREE_OVERFLOW (t
) = 1;
15436 gcc_unreachable ();
15442 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15443 an integer constant or real constant.
15445 TYPE is the type of the result. */
15448 fold_abs_const (tree arg0
, tree type
)
15450 tree t
= NULL_TREE
;
15452 switch (TREE_CODE (arg0
))
15456 double_int val
= tree_to_double_int (arg0
);
15458 /* If the value is unsigned or non-negative, then the absolute value
15459 is the same as the ordinary value. */
15460 if (TYPE_UNSIGNED (type
)
15461 || !double_int_negative_p (val
))
15464 /* If the value is negative, then the absolute value is
15470 overflow
= neg_double (val
.low
, val
.high
, &val
.low
, &val
.high
);
15471 t
= force_fit_type_double (type
, val
, -1,
15472 overflow
| TREE_OVERFLOW (arg0
));
15478 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
15479 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15485 gcc_unreachable ();
15491 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15492 constant. TYPE is the type of the result. */
15495 fold_not_const (const_tree arg0
, tree type
)
15499 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
15501 val
= double_int_not (tree_to_double_int (arg0
));
15502 return force_fit_type_double (type
, val
, 0, TREE_OVERFLOW (arg0
));
15505 /* Given CODE, a relational operator, the target type, TYPE and two
15506 constant operands OP0 and OP1, return the result of the
15507 relational operation. If the result is not a compile time
15508 constant, then return NULL_TREE. */
15511 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
15513 int result
, invert
;
15515 /* From here on, the only cases we handle are when the result is
15516 known to be a constant. */
15518 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
15520 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
15521 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
15523 /* Handle the cases where either operand is a NaN. */
15524 if (real_isnan (c0
) || real_isnan (c1
))
15534 case UNORDERED_EXPR
:
15548 if (flag_trapping_math
)
15554 gcc_unreachable ();
15557 return constant_boolean_node (result
, type
);
15560 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
15563 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
15565 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
15566 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
15567 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
15570 /* Handle equality/inequality of complex constants. */
15571 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
15573 tree rcond
= fold_relational_const (code
, type
,
15574 TREE_REALPART (op0
),
15575 TREE_REALPART (op1
));
15576 tree icond
= fold_relational_const (code
, type
,
15577 TREE_IMAGPART (op0
),
15578 TREE_IMAGPART (op1
));
15579 if (code
== EQ_EXPR
)
15580 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
15581 else if (code
== NE_EXPR
)
15582 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
15587 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15589 To compute GT, swap the arguments and do LT.
15590 To compute GE, do LT and invert the result.
15591 To compute LE, swap the arguments, do LT and invert the result.
15592 To compute NE, do EQ and invert the result.
15594 Therefore, the code below must handle only EQ and LT. */
15596 if (code
== LE_EXPR
|| code
== GT_EXPR
)
15601 code
= swap_tree_comparison (code
);
15604 /* Note that it is safe to invert for real values here because we
15605 have already handled the one case that it matters. */
15608 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15611 code
= invert_tree_comparison (code
, false);
15614 /* Compute a result for LT or EQ if args permit;
15615 Otherwise return T. */
15616 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15618 if (code
== EQ_EXPR
)
15619 result
= tree_int_cst_equal (op0
, op1
);
15620 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
15621 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
15623 result
= INT_CST_LT (op0
, op1
);
15630 return constant_boolean_node (result
, type
);
15633 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15634 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15638 fold_build_cleanup_point_expr (tree type
, tree expr
)
15640 /* If the expression does not have side effects then we don't have to wrap
15641 it with a cleanup point expression. */
15642 if (!TREE_SIDE_EFFECTS (expr
))
15645 /* If the expression is a return, check to see if the expression inside the
15646 return has no side effects or the right hand side of the modify expression
15647 inside the return. If either don't have side effects set we don't need to
15648 wrap the expression in a cleanup point expression. Note we don't check the
15649 left hand side of the modify because it should always be a return decl. */
15650 if (TREE_CODE (expr
) == RETURN_EXPR
)
15652 tree op
= TREE_OPERAND (expr
, 0);
15653 if (!op
|| !TREE_SIDE_EFFECTS (op
))
15655 op
= TREE_OPERAND (op
, 1);
15656 if (!TREE_SIDE_EFFECTS (op
))
15660 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
15663 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15664 of an indirection through OP0, or NULL_TREE if no simplification is
15668 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
15674 subtype
= TREE_TYPE (sub
);
15675 if (!POINTER_TYPE_P (subtype
))
15678 if (TREE_CODE (sub
) == ADDR_EXPR
)
15680 tree op
= TREE_OPERAND (sub
, 0);
15681 tree optype
= TREE_TYPE (op
);
15682 /* *&CONST_DECL -> to the value of the const decl. */
15683 if (TREE_CODE (op
) == CONST_DECL
)
15684 return DECL_INITIAL (op
);
15685 /* *&p => p; make sure to handle *&"str"[cst] here. */
15686 if (type
== optype
)
15688 tree fop
= fold_read_from_constant_string (op
);
15694 /* *(foo *)&fooarray => fooarray[0] */
15695 else if (TREE_CODE (optype
) == ARRAY_TYPE
15696 && type
== TREE_TYPE (optype
)
15697 && (!in_gimple_form
15698 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15700 tree type_domain
= TYPE_DOMAIN (optype
);
15701 tree min_val
= size_zero_node
;
15702 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15703 min_val
= TYPE_MIN_VALUE (type_domain
);
15705 && TREE_CODE (min_val
) != INTEGER_CST
)
15707 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
15708 NULL_TREE
, NULL_TREE
);
15710 /* *(foo *)&complexfoo => __real__ complexfoo */
15711 else if (TREE_CODE (optype
) == COMPLEX_TYPE
15712 && type
== TREE_TYPE (optype
))
15713 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
15714 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15715 else if (TREE_CODE (optype
) == VECTOR_TYPE
15716 && type
== TREE_TYPE (optype
))
15718 tree part_width
= TYPE_SIZE (type
);
15719 tree index
= bitsize_int (0);
15720 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
15724 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15725 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
15727 tree op00
= TREE_OPERAND (sub
, 0);
15728 tree op01
= TREE_OPERAND (sub
, 1);
15731 if (TREE_CODE (op00
) == ADDR_EXPR
)
15734 op00
= TREE_OPERAND (op00
, 0);
15735 op00type
= TREE_TYPE (op00
);
15737 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15738 if (TREE_CODE (op00type
) == VECTOR_TYPE
15739 && type
== TREE_TYPE (op00type
))
15741 HOST_WIDE_INT offset
= tree_low_cst (op01
, 0);
15742 tree part_width
= TYPE_SIZE (type
);
15743 unsigned HOST_WIDE_INT part_widthi
= tree_low_cst (part_width
, 0)/BITS_PER_UNIT
;
15744 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
15745 tree index
= bitsize_int (indexi
);
15747 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (op00type
))
15748 return fold_build3_loc (loc
,
15749 BIT_FIELD_REF
, type
, op00
,
15750 part_width
, index
);
15753 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15754 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
15755 && type
== TREE_TYPE (op00type
))
15757 tree size
= TYPE_SIZE_UNIT (type
);
15758 if (tree_int_cst_equal (size
, op01
))
15759 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
15761 /* ((foo *)&fooarray)[1] => fooarray[1] */
15762 else if (TREE_CODE (op00type
) == ARRAY_TYPE
15763 && type
== TREE_TYPE (op00type
))
15765 tree type_domain
= TYPE_DOMAIN (op00type
);
15766 tree min_val
= size_zero_node
;
15767 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15768 min_val
= TYPE_MIN_VALUE (type_domain
);
15769 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
15770 TYPE_SIZE_UNIT (type
));
15771 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
15772 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
15773 NULL_TREE
, NULL_TREE
);
15778 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15779 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
15780 && type
== TREE_TYPE (TREE_TYPE (subtype
))
15781 && (!in_gimple_form
15782 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15785 tree min_val
= size_zero_node
;
15786 sub
= build_fold_indirect_ref_loc (loc
, sub
);
15787 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
15788 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15789 min_val
= TYPE_MIN_VALUE (type_domain
);
15791 && TREE_CODE (min_val
) != INTEGER_CST
)
15793 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
15800 /* Builds an expression for an indirection through T, simplifying some
15804 build_fold_indirect_ref_loc (location_t loc
, tree t
)
15806 tree type
= TREE_TYPE (TREE_TYPE (t
));
15807 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
15812 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
15815 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15818 fold_indirect_ref_loc (location_t loc
, tree t
)
15820 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
15828 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15829 whose result is ignored. The type of the returned tree need not be
15830 the same as the original expression. */
15833 fold_ignored_result (tree t
)
15835 if (!TREE_SIDE_EFFECTS (t
))
15836 return integer_zero_node
;
15839 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
15842 t
= TREE_OPERAND (t
, 0);
15846 case tcc_comparison
:
15847 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15848 t
= TREE_OPERAND (t
, 0);
15849 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
15850 t
= TREE_OPERAND (t
, 1);
15855 case tcc_expression
:
15856 switch (TREE_CODE (t
))
15858 case COMPOUND_EXPR
:
15859 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
15861 t
= TREE_OPERAND (t
, 0);
15865 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
15866 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
15868 t
= TREE_OPERAND (t
, 0);
15881 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15882 This can only be applied to objects of a sizetype. */
15885 round_up_loc (location_t loc
, tree value
, int divisor
)
15887 tree div
= NULL_TREE
;
15889 gcc_assert (divisor
> 0);
15893 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15894 have to do anything. Only do this when we are not given a const,
15895 because in that case, this check is more expensive than just
15897 if (TREE_CODE (value
) != INTEGER_CST
)
15899 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15901 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15905 /* If divisor is a power of two, simplify this to bit manipulation. */
15906 if (divisor
== (divisor
& -divisor
))
15908 if (TREE_CODE (value
) == INTEGER_CST
)
15910 double_int val
= tree_to_double_int (value
);
15913 if ((val
.low
& (divisor
- 1)) == 0)
15916 overflow_p
= TREE_OVERFLOW (value
);
15917 val
.low
&= ~(divisor
- 1);
15918 val
.low
+= divisor
;
15926 return force_fit_type_double (TREE_TYPE (value
), val
,
15933 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
15934 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
15935 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15936 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
15942 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15943 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
15944 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
15950 /* Likewise, but round down. */
15953 round_down_loc (location_t loc
, tree value
, int divisor
)
15955 tree div
= NULL_TREE
;
15957 gcc_assert (divisor
> 0);
15961 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15962 have to do anything. Only do this when we are not given a const,
15963 because in that case, this check is more expensive than just
15965 if (TREE_CODE (value
) != INTEGER_CST
)
15967 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15969 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
15973 /* If divisor is a power of two, simplify this to bit manipulation. */
15974 if (divisor
== (divisor
& -divisor
))
15978 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
15979 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
15984 div
= build_int_cst (TREE_TYPE (value
), divisor
);
15985 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
15986 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
15992 /* Returns the pointer to the base of the object addressed by EXP and
15993 extracts the information about the offset of the access, storing it
15994 to PBITPOS and POFFSET. */
15997 split_address_to_core_and_offset (tree exp
,
15998 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16001 enum machine_mode mode
;
16002 int unsignedp
, volatilep
;
16003 HOST_WIDE_INT bitsize
;
16004 location_t loc
= EXPR_LOCATION (exp
);
16006 if (TREE_CODE (exp
) == ADDR_EXPR
)
16008 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16009 poffset
, &mode
, &unsignedp
, &volatilep
,
16011 core
= build_fold_addr_expr_loc (loc
, core
);
16017 *poffset
= NULL_TREE
;
16023 /* Returns true if addresses of E1 and E2 differ by a constant, false
16024 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16027 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16030 HOST_WIDE_INT bitpos1
, bitpos2
;
16031 tree toffset1
, toffset2
, tdiff
, type
;
16033 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16034 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16036 if (bitpos1
% BITS_PER_UNIT
!= 0
16037 || bitpos2
% BITS_PER_UNIT
!= 0
16038 || !operand_equal_p (core1
, core2
, 0))
16041 if (toffset1
&& toffset2
)
16043 type
= TREE_TYPE (toffset1
);
16044 if (type
!= TREE_TYPE (toffset2
))
16045 toffset2
= fold_convert (type
, toffset2
);
16047 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16048 if (!cst_and_fits_in_hwi (tdiff
))
16051 *diff
= int_cst_value (tdiff
);
16053 else if (toffset1
|| toffset2
)
16055 /* If only one of the offsets is non-constant, the difference cannot
16062 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16066 /* Simplify the floating point expression EXP when the sign of the
16067 result is not significant. Return NULL_TREE if no simplification
16071 fold_strip_sign_ops (tree exp
)
16074 location_t loc
= EXPR_LOCATION (exp
);
16076 switch (TREE_CODE (exp
))
16080 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16081 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16085 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16087 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16088 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16089 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16090 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16091 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16092 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16095 case COMPOUND_EXPR
:
16096 arg0
= TREE_OPERAND (exp
, 0);
16097 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16099 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16103 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16104 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16106 return fold_build3_loc (loc
,
16107 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16108 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16109 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16114 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16117 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16118 /* Strip copysign function call, return the 1st argument. */
16119 arg0
= CALL_EXPR_ARG (exp
, 0);
16120 arg1
= CALL_EXPR_ARG (exp
, 1);
16121 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16124 /* Strip sign ops from the argument of "odd" math functions. */
16125 if (negate_mathfn_p (fcode
))
16127 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16129 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);