1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer
= 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code
{
91 static bool negate_mathfn_p (enum built_in_function
);
92 static bool negate_expr_p (tree
);
93 static tree
negate_expr (tree
);
94 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
95 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
96 static tree
const_binop (enum tree_code
, tree
, tree
);
97 static enum comparison_code
comparison_to_compcode (enum tree_code
);
98 static enum tree_code
compcode_to_comparison (enum comparison_code
);
99 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
100 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
101 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
102 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
103 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
104 static tree
make_bit_field_ref (location_t
, tree
, tree
,
105 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
106 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
108 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
110 enum machine_mode
*, int *, int *,
112 static int all_ones_mask_p (const_tree
, int);
113 static tree
sign_bit_p (tree
, const_tree
);
114 static int simple_operand_p (const_tree
);
115 static bool simple_operand_p_2 (tree
);
116 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
117 static tree
range_predecessor (tree
);
118 static tree
range_successor (tree
);
119 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
120 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
121 static tree
unextend (tree
, int, int, tree
);
122 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
124 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
125 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
126 static tree
fold_binary_op_with_conditional_arg (location_t
,
127 enum tree_code
, tree
,
130 static tree
fold_mathfn_compare (location_t
,
131 enum built_in_function
, enum tree_code
,
133 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
134 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
135 static bool reorder_operands_p (const_tree
, const_tree
);
136 static tree
fold_negate_const (tree
, tree
);
137 static tree
fold_not_const (const_tree
, tree
);
138 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
139 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
145 expr_location_or (tree t
, location_t loc
)
147 location_t tloc
= EXPR_LOCATION (t
);
148 return tloc
!= UNKNOWN_LOCATION
? tloc
: loc
;
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
155 protected_set_expr_location_unshare (tree x
, location_t loc
)
157 if (CAN_HAVE_LOCATION_P (x
)
158 && EXPR_LOCATION (x
) != loc
159 && !(TREE_CODE (x
) == SAVE_EXPR
160 || TREE_CODE (x
) == TARGET_EXPR
161 || TREE_CODE (x
) == BIND_EXPR
))
164 SET_EXPR_LOCATION (x
, loc
);
170 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
171 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
172 and SUM1. Then this yields nonzero if overflow occurred during the
175 Overflow occurs if A and B have the same sign, but A and SUM differ in
176 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
178 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
180 /* If ARG2 divides ARG1 with zero remainder, carries out the division
181 of type CODE and returns the quotient.
182 Otherwise returns NULL_TREE. */
185 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
190 /* The sign of the division is according to operand two, that
191 does the correct thing for POINTER_PLUS_EXPR where we want
192 a signed division. */
193 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
194 if (TREE_CODE (TREE_TYPE (arg2
)) == INTEGER_TYPE
195 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2
)))
198 quo
= double_int_divmod (tree_to_double_int (arg1
),
199 tree_to_double_int (arg2
),
202 if (double_int_zero_p (rem
))
203 return build_int_cst_wide (TREE_TYPE (arg1
), quo
.low
, quo
.high
);
208 /* This is nonzero if we should defer warnings about undefined
209 overflow. This facility exists because these warnings are a
210 special case. The code to estimate loop iterations does not want
211 to issue any warnings, since it works with expressions which do not
212 occur in user code. Various bits of cleanup code call fold(), but
213 only use the result if it has certain characteristics (e.g., is a
214 constant); that code only wants to issue a warning if the result is
217 static int fold_deferring_overflow_warnings
;
219 /* If a warning about undefined overflow is deferred, this is the
220 warning. Note that this may cause us to turn two warnings into
221 one, but that is fine since it is sufficient to only give one
222 warning per expression. */
224 static const char* fold_deferred_overflow_warning
;
226 /* If a warning about undefined overflow is deferred, this is the
227 level at which the warning should be emitted. */
229 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
231 /* Start deferring overflow warnings. We could use a stack here to
232 permit nested calls, but at present it is not necessary. */
235 fold_defer_overflow_warnings (void)
237 ++fold_deferring_overflow_warnings
;
240 /* Stop deferring overflow warnings. If there is a pending warning,
241 and ISSUE is true, then issue the warning if appropriate. STMT is
242 the statement with which the warning should be associated (used for
243 location information); STMT may be NULL. CODE is the level of the
244 warning--a warn_strict_overflow_code value. This function will use
245 the smaller of CODE and the deferred code when deciding whether to
246 issue the warning. CODE may be zero to mean to always use the
250 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
255 gcc_assert (fold_deferring_overflow_warnings
> 0);
256 --fold_deferring_overflow_warnings
;
257 if (fold_deferring_overflow_warnings
> 0)
259 if (fold_deferred_overflow_warning
!= NULL
261 && code
< (int) fold_deferred_overflow_code
)
262 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
266 warnmsg
= fold_deferred_overflow_warning
;
267 fold_deferred_overflow_warning
= NULL
;
269 if (!issue
|| warnmsg
== NULL
)
272 if (gimple_no_warning_p (stmt
))
275 /* Use the smallest code level when deciding to issue the
277 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
278 code
= fold_deferred_overflow_code
;
280 if (!issue_strict_overflow_warning (code
))
284 locus
= input_location
;
286 locus
= gimple_location (stmt
);
287 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
290 /* Stop deferring overflow warnings, ignoring any deferred
294 fold_undefer_and_ignore_overflow_warnings (void)
296 fold_undefer_overflow_warnings (false, NULL
, 0);
299 /* Whether we are deferring overflow warnings. */
302 fold_deferring_overflow_warnings_p (void)
304 return fold_deferring_overflow_warnings
> 0;
307 /* This is called when we fold something based on the fact that signed
308 overflow is undefined. */
311 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
313 if (fold_deferring_overflow_warnings
> 0)
315 if (fold_deferred_overflow_warning
== NULL
316 || wc
< fold_deferred_overflow_code
)
318 fold_deferred_overflow_warning
= gmsgid
;
319 fold_deferred_overflow_code
= wc
;
322 else if (issue_strict_overflow_warning (wc
))
323 warning (OPT_Wstrict_overflow
, gmsgid
);
326 /* Return true if the built-in mathematical function specified by CODE
327 is odd, i.e. -f(x) == f(-x). */
330 negate_mathfn_p (enum built_in_function code
)
334 CASE_FLT_FN (BUILT_IN_ASIN
):
335 CASE_FLT_FN (BUILT_IN_ASINH
):
336 CASE_FLT_FN (BUILT_IN_ATAN
):
337 CASE_FLT_FN (BUILT_IN_ATANH
):
338 CASE_FLT_FN (BUILT_IN_CASIN
):
339 CASE_FLT_FN (BUILT_IN_CASINH
):
340 CASE_FLT_FN (BUILT_IN_CATAN
):
341 CASE_FLT_FN (BUILT_IN_CATANH
):
342 CASE_FLT_FN (BUILT_IN_CBRT
):
343 CASE_FLT_FN (BUILT_IN_CPROJ
):
344 CASE_FLT_FN (BUILT_IN_CSIN
):
345 CASE_FLT_FN (BUILT_IN_CSINH
):
346 CASE_FLT_FN (BUILT_IN_CTAN
):
347 CASE_FLT_FN (BUILT_IN_CTANH
):
348 CASE_FLT_FN (BUILT_IN_ERF
):
349 CASE_FLT_FN (BUILT_IN_LLROUND
):
350 CASE_FLT_FN (BUILT_IN_LROUND
):
351 CASE_FLT_FN (BUILT_IN_ROUND
):
352 CASE_FLT_FN (BUILT_IN_SIN
):
353 CASE_FLT_FN (BUILT_IN_SINH
):
354 CASE_FLT_FN (BUILT_IN_TAN
):
355 CASE_FLT_FN (BUILT_IN_TANH
):
356 CASE_FLT_FN (BUILT_IN_TRUNC
):
359 CASE_FLT_FN (BUILT_IN_LLRINT
):
360 CASE_FLT_FN (BUILT_IN_LRINT
):
361 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
362 CASE_FLT_FN (BUILT_IN_RINT
):
363 return !flag_rounding_math
;
371 /* Check whether we may negate an integer constant T without causing
375 may_negate_without_overflow_p (const_tree t
)
377 unsigned HOST_WIDE_INT val
;
381 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
383 type
= TREE_TYPE (t
);
384 if (TYPE_UNSIGNED (type
))
387 prec
= TYPE_PRECISION (type
);
388 if (prec
> HOST_BITS_PER_WIDE_INT
)
390 if (TREE_INT_CST_LOW (t
) != 0)
392 prec
-= HOST_BITS_PER_WIDE_INT
;
393 val
= TREE_INT_CST_HIGH (t
);
396 val
= TREE_INT_CST_LOW (t
);
397 if (prec
< HOST_BITS_PER_WIDE_INT
)
398 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
399 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
402 /* Determine whether an expression T can be cheaply negated using
403 the function negate_expr without introducing undefined overflow. */
406 negate_expr_p (tree t
)
413 type
= TREE_TYPE (t
);
416 switch (TREE_CODE (t
))
419 if (TYPE_OVERFLOW_WRAPS (type
))
422 /* Check that -CST will not overflow type. */
423 return may_negate_without_overflow_p (t
);
425 return (INTEGRAL_TYPE_P (type
)
426 && TYPE_OVERFLOW_WRAPS (type
));
433 /* We want to canonicalize to positive real constants. Pretend
434 that only negative ones can be easily negated. */
435 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
438 return negate_expr_p (TREE_REALPART (t
))
439 && negate_expr_p (TREE_IMAGPART (t
));
442 return negate_expr_p (TREE_OPERAND (t
, 0))
443 && negate_expr_p (TREE_OPERAND (t
, 1));
446 return negate_expr_p (TREE_OPERAND (t
, 0));
449 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
450 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
452 /* -(A + B) -> (-B) - A. */
453 if (negate_expr_p (TREE_OPERAND (t
, 1))
454 && reorder_operands_p (TREE_OPERAND (t
, 0),
455 TREE_OPERAND (t
, 1)))
457 /* -(A + B) -> (-A) - B. */
458 return negate_expr_p (TREE_OPERAND (t
, 0));
461 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
462 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
463 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
464 && reorder_operands_p (TREE_OPERAND (t
, 0),
465 TREE_OPERAND (t
, 1));
468 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
474 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
475 return negate_expr_p (TREE_OPERAND (t
, 1))
476 || negate_expr_p (TREE_OPERAND (t
, 0));
484 /* In general we can't negate A / B, because if A is INT_MIN and
485 B is 1, we may turn this into INT_MIN / -1 which is undefined
486 and actually traps on some architectures. But if overflow is
487 undefined, we can negate, because - (INT_MIN / 1) is an
489 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
490 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
492 return negate_expr_p (TREE_OPERAND (t
, 1))
493 || negate_expr_p (TREE_OPERAND (t
, 0));
496 /* Negate -((double)float) as (double)(-float). */
497 if (TREE_CODE (type
) == REAL_TYPE
)
499 tree tem
= strip_float_extensions (t
);
501 return negate_expr_p (tem
);
506 /* Negate -f(x) as f(-x). */
507 if (negate_mathfn_p (builtin_mathfn_code (t
)))
508 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
512 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
513 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
515 tree op1
= TREE_OPERAND (t
, 1);
516 if (TREE_INT_CST_HIGH (op1
) == 0
517 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
518 == TREE_INT_CST_LOW (op1
))
529 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
530 simplification is possible.
531 If negate_expr_p would return true for T, NULL_TREE will never be
535 fold_negate_expr (location_t loc
, tree t
)
537 tree type
= TREE_TYPE (t
);
540 switch (TREE_CODE (t
))
542 /* Convert - (~A) to A + 1. */
544 if (INTEGRAL_TYPE_P (type
))
545 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
546 build_int_cst (type
, 1));
550 tem
= fold_negate_const (t
, type
);
551 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
552 || !TYPE_OVERFLOW_TRAPS (type
))
557 tem
= fold_negate_const (t
, type
);
558 /* Two's complement FP formats, such as c4x, may overflow. */
559 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
564 tem
= fold_negate_const (t
, type
);
569 tree rpart
= negate_expr (TREE_REALPART (t
));
570 tree ipart
= negate_expr (TREE_IMAGPART (t
));
572 if ((TREE_CODE (rpart
) == REAL_CST
573 && TREE_CODE (ipart
) == REAL_CST
)
574 || (TREE_CODE (rpart
) == INTEGER_CST
575 && TREE_CODE (ipart
) == INTEGER_CST
))
576 return build_complex (type
, rpart
, ipart
);
581 if (negate_expr_p (t
))
582 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
583 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
584 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
588 if (negate_expr_p (t
))
589 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
590 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
594 return TREE_OPERAND (t
, 0);
597 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
600 /* -(A + B) -> (-B) - A. */
601 if (negate_expr_p (TREE_OPERAND (t
, 1))
602 && reorder_operands_p (TREE_OPERAND (t
, 0),
603 TREE_OPERAND (t
, 1)))
605 tem
= negate_expr (TREE_OPERAND (t
, 1));
606 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
607 tem
, TREE_OPERAND (t
, 0));
610 /* -(A + B) -> (-A) - B. */
611 if (negate_expr_p (TREE_OPERAND (t
, 0)))
613 tem
= negate_expr (TREE_OPERAND (t
, 0));
614 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
615 tem
, TREE_OPERAND (t
, 1));
621 /* - (A - B) -> B - A */
622 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
623 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
624 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
625 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
626 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
630 if (TYPE_UNSIGNED (type
))
636 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
638 tem
= TREE_OPERAND (t
, 1);
639 if (negate_expr_p (tem
))
640 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
641 TREE_OPERAND (t
, 0), negate_expr (tem
));
642 tem
= TREE_OPERAND (t
, 0);
643 if (negate_expr_p (tem
))
644 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
645 negate_expr (tem
), TREE_OPERAND (t
, 1));
654 /* In general we can't negate A / B, because if A is INT_MIN and
655 B is 1, we may turn this into INT_MIN / -1 which is undefined
656 and actually traps on some architectures. But if overflow is
657 undefined, we can negate, because - (INT_MIN / 1) is an
659 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
661 const char * const warnmsg
= G_("assuming signed overflow does not "
662 "occur when negating a division");
663 tem
= TREE_OPERAND (t
, 1);
664 if (negate_expr_p (tem
))
666 if (INTEGRAL_TYPE_P (type
)
667 && (TREE_CODE (tem
) != INTEGER_CST
668 || integer_onep (tem
)))
669 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
670 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
671 TREE_OPERAND (t
, 0), negate_expr (tem
));
673 tem
= TREE_OPERAND (t
, 0);
674 if (negate_expr_p (tem
))
676 if (INTEGRAL_TYPE_P (type
)
677 && (TREE_CODE (tem
) != INTEGER_CST
678 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
679 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
680 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
681 negate_expr (tem
), TREE_OPERAND (t
, 1));
687 /* Convert -((double)float) into (double)(-float). */
688 if (TREE_CODE (type
) == REAL_TYPE
)
690 tem
= strip_float_extensions (t
);
691 if (tem
!= t
&& negate_expr_p (tem
))
692 return fold_convert_loc (loc
, type
, negate_expr (tem
));
697 /* Negate -f(x) as f(-x). */
698 if (negate_mathfn_p (builtin_mathfn_code (t
))
699 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
703 fndecl
= get_callee_fndecl (t
);
704 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
705 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
710 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
711 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
713 tree op1
= TREE_OPERAND (t
, 1);
714 if (TREE_INT_CST_HIGH (op1
) == 0
715 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
716 == TREE_INT_CST_LOW (op1
))
718 tree ntype
= TYPE_UNSIGNED (type
)
719 ? signed_type_for (type
)
720 : unsigned_type_for (type
);
721 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
722 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
723 return fold_convert_loc (loc
, type
, temp
);
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
748 loc
= EXPR_LOCATION (t
);
749 type
= TREE_TYPE (t
);
752 tem
= fold_negate_expr (loc
, t
);
754 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
755 return fold_convert_loc (loc
, type
, tem
);
758 /* Split a tree IN into a constant, literal and variable parts that could be
759 combined with CODE to make IN. "constant" means an expression with
760 TREE_CONSTANT but that isn't an actual constant. CODE must be a
761 commutative arithmetic operation. Store the constant part into *CONP,
762 the literal in *LITP and return the variable part. If a part isn't
763 present, set it to null. If the tree does not decompose in this way,
764 return the entire tree as the variable part and the other parts as null.
766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
767 case, we negate an operand that was subtracted. Except if it is a
768 literal for which we use *MINUS_LITP instead.
770 If NEGATE_P is true, we are negating all of IN, again except a literal
771 for which we use *MINUS_LITP instead.
773 If IN is itself a literal or constant, return it as appropriate.
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
779 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
780 tree
*minus_litp
, int negate_p
)
788 /* Strip any conversions that don't change the machine mode or signedness. */
789 STRIP_SIGN_NOPS (in
);
791 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
792 || TREE_CODE (in
) == FIXED_CST
)
794 else if (TREE_CODE (in
) == code
795 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
796 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
797 /* We can associate addition and subtraction together (even
798 though the C standard doesn't say so) for integers because
799 the value is not affected. For reals, the value might be
800 affected, so we can't. */
801 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
802 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
804 tree op0
= TREE_OPERAND (in
, 0);
805 tree op1
= TREE_OPERAND (in
, 1);
806 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
807 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
809 /* First see if either of the operands is a literal, then a constant. */
810 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
811 || TREE_CODE (op0
) == FIXED_CST
)
812 *litp
= op0
, op0
= 0;
813 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
814 || TREE_CODE (op1
) == FIXED_CST
)
815 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
817 if (op0
!= 0 && TREE_CONSTANT (op0
))
818 *conp
= op0
, op0
= 0;
819 else if (op1
!= 0 && TREE_CONSTANT (op1
))
820 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
822 /* If we haven't dealt with either operand, this is not a case we can
823 decompose. Otherwise, VAR is either of the ones remaining, if any. */
824 if (op0
!= 0 && op1
!= 0)
829 var
= op1
, neg_var_p
= neg1_p
;
831 /* Now do any needed negations. */
833 *minus_litp
= *litp
, *litp
= 0;
835 *conp
= negate_expr (*conp
);
837 var
= negate_expr (var
);
839 else if (TREE_CONSTANT (in
))
847 *minus_litp
= *litp
, *litp
= 0;
848 else if (*minus_litp
)
849 *litp
= *minus_litp
, *minus_litp
= 0;
850 *conp
= negate_expr (*conp
);
851 var
= negate_expr (var
);
857 /* Re-associate trees split by the above function. T1 and T2 are
858 either expressions to associate or null. Return the new
859 expression, if any. LOC is the location of the new expression. If
860 we build an operation, do it in TYPE and with CODE. */
863 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
870 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
871 try to fold this since we will have infinite recursion. But do
872 deal with any NEGATE_EXPRs. */
873 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
874 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
876 if (code
== PLUS_EXPR
)
878 if (TREE_CODE (t1
) == NEGATE_EXPR
)
879 return build2_loc (loc
, MINUS_EXPR
, type
,
880 fold_convert_loc (loc
, type
, t2
),
881 fold_convert_loc (loc
, type
,
882 TREE_OPERAND (t1
, 0)));
883 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
884 return build2_loc (loc
, MINUS_EXPR
, type
,
885 fold_convert_loc (loc
, type
, t1
),
886 fold_convert_loc (loc
, type
,
887 TREE_OPERAND (t2
, 0)));
888 else if (integer_zerop (t2
))
889 return fold_convert_loc (loc
, type
, t1
);
891 else if (code
== MINUS_EXPR
)
893 if (integer_zerop (t2
))
894 return fold_convert_loc (loc
, type
, t1
);
897 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
898 fold_convert_loc (loc
, type
, t2
));
901 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
902 fold_convert_loc (loc
, type
, t2
));
905 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
906 for use in int_const_binop, size_binop and size_diffop. */
909 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
911 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
913 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
928 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
929 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
930 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
934 /* Combine two integer constants ARG1 and ARG2 under operation CODE
935 to produce a new constant. Return NULL_TREE if we don't know how
936 to evaluate CODE at compile-time. */
939 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
941 double_int op1
, op2
, res
, tmp
;
943 tree type
= TREE_TYPE (arg1
);
944 bool uns
= TYPE_UNSIGNED (type
);
946 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
947 bool overflow
= false;
949 op1
= tree_to_double_int (arg1
);
950 op2
= tree_to_double_int (arg2
);
955 res
= double_int_ior (op1
, op2
);
959 res
= double_int_xor (op1
, op2
);
963 res
= double_int_and (op1
, op2
);
967 res
= double_int_rshift (op1
, double_int_to_shwi (op2
),
968 TYPE_PRECISION (type
), !uns
);
972 /* It's unclear from the C standard whether shifts can overflow.
973 The following code ignores overflow; perhaps a C standard
974 interpretation ruling is needed. */
975 res
= double_int_lshift (op1
, double_int_to_shwi (op2
),
976 TYPE_PRECISION (type
), !uns
);
980 res
= double_int_rrotate (op1
, double_int_to_shwi (op2
),
981 TYPE_PRECISION (type
));
985 res
= double_int_lrotate (op1
, double_int_to_shwi (op2
),
986 TYPE_PRECISION (type
));
990 overflow
= add_double (op1
.low
, op1
.high
, op2
.low
, op2
.high
,
991 &res
.low
, &res
.high
);
995 neg_double (op2
.low
, op2
.high
, &res
.low
, &res
.high
);
996 add_double (op1
.low
, op1
.high
, res
.low
, res
.high
,
997 &res
.low
, &res
.high
);
998 overflow
= OVERFLOW_SUM_SIGN (res
.high
, op2
.high
, op1
.high
);
1002 overflow
= mul_double (op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1003 &res
.low
, &res
.high
);
1006 case TRUNC_DIV_EXPR
:
1007 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1008 case EXACT_DIV_EXPR
:
1009 /* This is a shortcut for a common special case. */
1010 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1011 && !TREE_OVERFLOW (arg1
)
1012 && !TREE_OVERFLOW (arg2
)
1013 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1015 if (code
== CEIL_DIV_EXPR
)
1016 op1
.low
+= op2
.low
- 1;
1018 res
.low
= op1
.low
/ op2
.low
, res
.high
= 0;
1022 /* ... fall through ... */
1024 case ROUND_DIV_EXPR
:
1025 if (double_int_zero_p (op2
))
1027 if (double_int_one_p (op2
))
1032 if (double_int_equal_p (op1
, op2
)
1033 && ! double_int_zero_p (op1
))
1035 res
= double_int_one
;
1038 overflow
= div_and_round_double (code
, uns
,
1039 op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1040 &res
.low
, &res
.high
,
1041 &tmp
.low
, &tmp
.high
);
1044 case TRUNC_MOD_EXPR
:
1045 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1046 /* This is a shortcut for a common special case. */
1047 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1048 && !TREE_OVERFLOW (arg1
)
1049 && !TREE_OVERFLOW (arg2
)
1050 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1052 if (code
== CEIL_MOD_EXPR
)
1053 op1
.low
+= op2
.low
- 1;
1054 res
.low
= op1
.low
% op2
.low
, res
.high
= 0;
1058 /* ... fall through ... */
1060 case ROUND_MOD_EXPR
:
1061 if (double_int_zero_p (op2
))
1063 overflow
= div_and_round_double (code
, uns
,
1064 op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1065 &tmp
.low
, &tmp
.high
,
1066 &res
.low
, &res
.high
);
1070 res
= double_int_min (op1
, op2
, uns
);
1074 res
= double_int_max (op1
, op2
, uns
);
1081 t
= force_fit_type_double (TREE_TYPE (arg1
), res
, 1,
1082 ((!uns
|| is_sizetype
) && overflow
)
1083 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1088 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1089 constant. We assume ARG1 and ARG2 have the same data type, or at least
1090 are the same kind of constant and the same machine mode. Return zero if
1091 combining the constants is not allowed in the current operating mode. */
1094 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1096 /* Sanity check for the recursive cases. */
1103 if (TREE_CODE (arg1
) == INTEGER_CST
)
1104 return int_const_binop (code
, arg1
, arg2
);
1106 if (TREE_CODE (arg1
) == REAL_CST
)
1108 enum machine_mode mode
;
1111 REAL_VALUE_TYPE value
;
1112 REAL_VALUE_TYPE result
;
1116 /* The following codes are handled by real_arithmetic. */
1131 d1
= TREE_REAL_CST (arg1
);
1132 d2
= TREE_REAL_CST (arg2
);
1134 type
= TREE_TYPE (arg1
);
1135 mode
= TYPE_MODE (type
);
1137 /* Don't perform operation if we honor signaling NaNs and
1138 either operand is a NaN. */
1139 if (HONOR_SNANS (mode
)
1140 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1143 /* Don't perform operation if it would raise a division
1144 by zero exception. */
1145 if (code
== RDIV_EXPR
1146 && REAL_VALUES_EQUAL (d2
, dconst0
)
1147 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1150 /* If either operand is a NaN, just return it. Otherwise, set up
1151 for floating-point trap; we return an overflow. */
1152 if (REAL_VALUE_ISNAN (d1
))
1154 else if (REAL_VALUE_ISNAN (d2
))
1157 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1158 real_convert (&result
, mode
, &value
);
1160 /* Don't constant fold this floating point operation if
1161 the result has overflowed and flag_trapping_math. */
1162 if (flag_trapping_math
1163 && MODE_HAS_INFINITIES (mode
)
1164 && REAL_VALUE_ISINF (result
)
1165 && !REAL_VALUE_ISINF (d1
)
1166 && !REAL_VALUE_ISINF (d2
))
1169 /* Don't constant fold this floating point operation if the
1170 result may dependent upon the run-time rounding mode and
1171 flag_rounding_math is set, or if GCC's software emulation
1172 is unable to accurately represent the result. */
1173 if ((flag_rounding_math
1174 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1175 && (inexact
|| !real_identical (&result
, &value
)))
1178 t
= build_real (type
, result
);
1180 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1184 if (TREE_CODE (arg1
) == FIXED_CST
)
1186 FIXED_VALUE_TYPE f1
;
1187 FIXED_VALUE_TYPE f2
;
1188 FIXED_VALUE_TYPE result
;
1193 /* The following codes are handled by fixed_arithmetic. */
1199 case TRUNC_DIV_EXPR
:
1200 f2
= TREE_FIXED_CST (arg2
);
1205 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1206 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1214 f1
= TREE_FIXED_CST (arg1
);
1215 type
= TREE_TYPE (arg1
);
1216 sat_p
= TYPE_SATURATING (type
);
1217 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1218 t
= build_fixed (type
, result
);
1219 /* Propagate overflow flags. */
1220 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1221 TREE_OVERFLOW (t
) = 1;
1225 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1227 tree type
= TREE_TYPE (arg1
);
1228 tree r1
= TREE_REALPART (arg1
);
1229 tree i1
= TREE_IMAGPART (arg1
);
1230 tree r2
= TREE_REALPART (arg2
);
1231 tree i2
= TREE_IMAGPART (arg2
);
1238 real
= const_binop (code
, r1
, r2
);
1239 imag
= const_binop (code
, i1
, i2
);
1243 if (COMPLEX_FLOAT_TYPE_P (type
))
1244 return do_mpc_arg2 (arg1
, arg2
, type
,
1245 /* do_nonfinite= */ folding_initializer
,
1248 real
= const_binop (MINUS_EXPR
,
1249 const_binop (MULT_EXPR
, r1
, r2
),
1250 const_binop (MULT_EXPR
, i1
, i2
));
1251 imag
= const_binop (PLUS_EXPR
,
1252 const_binop (MULT_EXPR
, r1
, i2
),
1253 const_binop (MULT_EXPR
, i1
, r2
));
1257 if (COMPLEX_FLOAT_TYPE_P (type
))
1258 return do_mpc_arg2 (arg1
, arg2
, type
,
1259 /* do_nonfinite= */ folding_initializer
,
1262 case TRUNC_DIV_EXPR
:
1264 case FLOOR_DIV_EXPR
:
1265 case ROUND_DIV_EXPR
:
1266 if (flag_complex_method
== 0)
1268 /* Keep this algorithm in sync with
1269 tree-complex.c:expand_complex_div_straight().
1271 Expand complex division to scalars, straightforward algorithm.
1272 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1276 = const_binop (PLUS_EXPR
,
1277 const_binop (MULT_EXPR
, r2
, r2
),
1278 const_binop (MULT_EXPR
, i2
, i2
));
1280 = const_binop (PLUS_EXPR
,
1281 const_binop (MULT_EXPR
, r1
, r2
),
1282 const_binop (MULT_EXPR
, i1
, i2
));
1284 = const_binop (MINUS_EXPR
,
1285 const_binop (MULT_EXPR
, i1
, r2
),
1286 const_binop (MULT_EXPR
, r1
, i2
));
1288 real
= const_binop (code
, t1
, magsquared
);
1289 imag
= const_binop (code
, t2
, magsquared
);
1293 /* Keep this algorithm in sync with
1294 tree-complex.c:expand_complex_div_wide().
1296 Expand complex division to scalars, modified algorithm to minimize
1297 overflow with wide input ranges. */
1298 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1299 fold_abs_const (r2
, TREE_TYPE (type
)),
1300 fold_abs_const (i2
, TREE_TYPE (type
)));
1302 if (integer_nonzerop (compare
))
1304 /* In the TRUE branch, we compute
1306 div = (br * ratio) + bi;
1307 tr = (ar * ratio) + ai;
1308 ti = (ai * ratio) - ar;
1311 tree ratio
= const_binop (code
, r2
, i2
);
1312 tree div
= const_binop (PLUS_EXPR
, i2
,
1313 const_binop (MULT_EXPR
, r2
, ratio
));
1314 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1315 real
= const_binop (PLUS_EXPR
, real
, i1
);
1316 real
= const_binop (code
, real
, div
);
1318 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1319 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1320 imag
= const_binop (code
, imag
, div
);
1324 /* In the FALSE branch, we compute
1326 divisor = (d * ratio) + c;
1327 tr = (b * ratio) + a;
1328 ti = b - (a * ratio);
1331 tree ratio
= const_binop (code
, i2
, r2
);
1332 tree div
= const_binop (PLUS_EXPR
, r2
,
1333 const_binop (MULT_EXPR
, i2
, ratio
));
1335 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1336 real
= const_binop (PLUS_EXPR
, real
, r1
);
1337 real
= const_binop (code
, real
, div
);
1339 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1340 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1341 imag
= const_binop (code
, imag
, div
);
1351 return build_complex (type
, real
, imag
);
1354 if (TREE_CODE (arg1
) == VECTOR_CST
)
1356 tree type
= TREE_TYPE(arg1
);
1357 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1358 tree elements1
, elements2
, list
= NULL_TREE
;
1360 if(TREE_CODE(arg2
) != VECTOR_CST
)
1363 elements1
= TREE_VECTOR_CST_ELTS (arg1
);
1364 elements2
= TREE_VECTOR_CST_ELTS (arg2
);
1366 for (i
= 0; i
< count
; i
++)
1368 tree elem1
, elem2
, elem
;
1370 /* The trailing elements can be empty and should be treated as 0 */
1372 elem1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1375 elem1
= TREE_VALUE(elements1
);
1376 elements1
= TREE_CHAIN (elements1
);
1380 elem2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1383 elem2
= TREE_VALUE(elements2
);
1384 elements2
= TREE_CHAIN (elements2
);
1387 elem
= const_binop (code
, elem1
, elem2
);
1389 /* It is possible that const_binop cannot handle the given
1390 code and return NULL_TREE */
1391 if(elem
== NULL_TREE
)
1394 list
= tree_cons (NULL_TREE
, elem
, list
);
1396 return build_vector(type
, nreverse(list
));
1401 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1402 indicates which particular sizetype to create. */
1405 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1407 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1410 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1411 is a tree code. The type of the result is taken from the operands.
1412 Both must be equivalent integer types, ala int_binop_types_match_p.
1413 If the operands are constant, so is the result. */
1416 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1418 tree type
= TREE_TYPE (arg0
);
1420 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1421 return error_mark_node
;
1423 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1426 /* Handle the special case of two integer constants faster. */
1427 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1429 /* And some specific cases even faster than that. */
1430 if (code
== PLUS_EXPR
)
1432 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1434 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1437 else if (code
== MINUS_EXPR
)
1439 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1442 else if (code
== MULT_EXPR
)
1444 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1448 /* Handle general case of two integer constants. */
1449 return int_const_binop (code
, arg0
, arg1
);
1452 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1455 /* Given two values, either both of sizetype or both of bitsizetype,
1456 compute the difference between the two values. Return the value
1457 in signed type corresponding to the type of the operands. */
1460 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1462 tree type
= TREE_TYPE (arg0
);
1465 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1468 /* If the type is already signed, just do the simple thing. */
1469 if (!TYPE_UNSIGNED (type
))
1470 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1472 if (type
== sizetype
)
1474 else if (type
== bitsizetype
)
1475 ctype
= sbitsizetype
;
1477 ctype
= signed_type_for (type
);
1479 /* If either operand is not a constant, do the conversions to the signed
1480 type and subtract. The hardware will do the right thing with any
1481 overflow in the subtraction. */
1482 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1483 return size_binop_loc (loc
, MINUS_EXPR
,
1484 fold_convert_loc (loc
, ctype
, arg0
),
1485 fold_convert_loc (loc
, ctype
, arg1
));
1487 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1488 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1489 overflow) and negate (which can't either). Special-case a result
1490 of zero while we're here. */
1491 if (tree_int_cst_equal (arg0
, arg1
))
1492 return build_int_cst (ctype
, 0);
1493 else if (tree_int_cst_lt (arg1
, arg0
))
1494 return fold_convert_loc (loc
, ctype
,
1495 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1497 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1498 fold_convert_loc (loc
, ctype
,
1499 size_binop_loc (loc
,
1504 /* A subroutine of fold_convert_const handling conversions of an
1505 INTEGER_CST to another integer type. */
1508 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1512 /* Given an integer constant, make new constant with new type,
1513 appropriately sign-extended or truncated. */
1514 t
= force_fit_type_double (type
, tree_to_double_int (arg1
),
1515 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1516 (TREE_INT_CST_HIGH (arg1
) < 0
1517 && (TYPE_UNSIGNED (type
)
1518 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1519 | TREE_OVERFLOW (arg1
));
1524 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1525 to an integer type. */
1528 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1533 /* The following code implements the floating point to integer
1534 conversion rules required by the Java Language Specification,
1535 that IEEE NaNs are mapped to zero and values that overflow
1536 the target precision saturate, i.e. values greater than
1537 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1538 are mapped to INT_MIN. These semantics are allowed by the
1539 C and C++ standards that simply state that the behavior of
1540 FP-to-integer conversion is unspecified upon overflow. */
1544 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1548 case FIX_TRUNC_EXPR
:
1549 real_trunc (&r
, VOIDmode
, &x
);
1556 /* If R is NaN, return zero and show we have an overflow. */
1557 if (REAL_VALUE_ISNAN (r
))
1560 val
= double_int_zero
;
1563 /* See if R is less than the lower bound or greater than the
1568 tree lt
= TYPE_MIN_VALUE (type
);
1569 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1570 if (REAL_VALUES_LESS (r
, l
))
1573 val
= tree_to_double_int (lt
);
1579 tree ut
= TYPE_MAX_VALUE (type
);
1582 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1583 if (REAL_VALUES_LESS (u
, r
))
1586 val
= tree_to_double_int (ut
);
1592 real_to_integer2 ((HOST_WIDE_INT
*) &val
.low
, &val
.high
, &r
);
1594 t
= force_fit_type_double (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1598 /* A subroutine of fold_convert_const handling conversions of a
1599 FIXED_CST to an integer type. */
1602 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1605 double_int temp
, temp_trunc
;
1608 /* Right shift FIXED_CST to temp by fbit. */
1609 temp
= TREE_FIXED_CST (arg1
).data
;
1610 mode
= TREE_FIXED_CST (arg1
).mode
;
1611 if (GET_MODE_FBIT (mode
) < 2 * HOST_BITS_PER_WIDE_INT
)
1613 temp
= double_int_rshift (temp
, GET_MODE_FBIT (mode
),
1614 HOST_BITS_PER_DOUBLE_INT
,
1615 SIGNED_FIXED_POINT_MODE_P (mode
));
1617 /* Left shift temp to temp_trunc by fbit. */
1618 temp_trunc
= double_int_lshift (temp
, GET_MODE_FBIT (mode
),
1619 HOST_BITS_PER_DOUBLE_INT
,
1620 SIGNED_FIXED_POINT_MODE_P (mode
));
1624 temp
= double_int_zero
;
1625 temp_trunc
= double_int_zero
;
1628 /* If FIXED_CST is negative, we need to round the value toward 0.
1629 By checking if the fractional bits are not zero to add 1 to temp. */
1630 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1631 && double_int_negative_p (temp_trunc
)
1632 && !double_int_equal_p (TREE_FIXED_CST (arg1
).data
, temp_trunc
))
1633 temp
= double_int_add (temp
, double_int_one
);
1635 /* Given a fixed-point constant, make new constant with new type,
1636 appropriately sign-extended or truncated. */
1637 t
= force_fit_type_double (type
, temp
, -1,
1638 (double_int_negative_p (temp
)
1639 && (TYPE_UNSIGNED (type
)
1640 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1641 | TREE_OVERFLOW (arg1
));
1646 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1647 to another floating point type. */
1650 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1652 REAL_VALUE_TYPE value
;
1655 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1656 t
= build_real (type
, value
);
1658 /* If converting an infinity or NAN to a representation that doesn't
1659 have one, set the overflow bit so that we can produce some kind of
1660 error message at the appropriate point if necessary. It's not the
1661 most user-friendly message, but it's better than nothing. */
1662 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1663 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1664 TREE_OVERFLOW (t
) = 1;
1665 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1666 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1667 TREE_OVERFLOW (t
) = 1;
1668 /* Regular overflow, conversion produced an infinity in a mode that
1669 can't represent them. */
1670 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1671 && REAL_VALUE_ISINF (value
)
1672 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1673 TREE_OVERFLOW (t
) = 1;
1675 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1679 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1680 to a floating point type. */
1683 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1685 REAL_VALUE_TYPE value
;
1688 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1689 t
= build_real (type
, value
);
1691 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1695 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1696 to another fixed-point type. */
1699 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1701 FIXED_VALUE_TYPE value
;
1705 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1706 TYPE_SATURATING (type
));
1707 t
= build_fixed (type
, value
);
1709 /* Propagate overflow flags. */
1710 if (overflow_p
| TREE_OVERFLOW (arg1
))
1711 TREE_OVERFLOW (t
) = 1;
1715 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1716 to a fixed-point type. */
1719 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1721 FIXED_VALUE_TYPE value
;
1725 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
1726 TREE_INT_CST (arg1
),
1727 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1728 TYPE_SATURATING (type
));
1729 t
= build_fixed (type
, value
);
1731 /* Propagate overflow flags. */
1732 if (overflow_p
| TREE_OVERFLOW (arg1
))
1733 TREE_OVERFLOW (t
) = 1;
1737 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1738 to a fixed-point type. */
1741 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1743 FIXED_VALUE_TYPE value
;
1747 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1748 &TREE_REAL_CST (arg1
),
1749 TYPE_SATURATING (type
));
1750 t
= build_fixed (type
, value
);
1752 /* Propagate overflow flags. */
1753 if (overflow_p
| TREE_OVERFLOW (arg1
))
1754 TREE_OVERFLOW (t
) = 1;
1758 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1759 type TYPE. If no simplification can be done return NULL_TREE. */
1762 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1764 if (TREE_TYPE (arg1
) == type
)
1767 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1768 || TREE_CODE (type
) == OFFSET_TYPE
)
1770 if (TREE_CODE (arg1
) == INTEGER_CST
)
1771 return fold_convert_const_int_from_int (type
, arg1
);
1772 else if (TREE_CODE (arg1
) == REAL_CST
)
1773 return fold_convert_const_int_from_real (code
, type
, arg1
);
1774 else if (TREE_CODE (arg1
) == FIXED_CST
)
1775 return fold_convert_const_int_from_fixed (type
, arg1
);
1777 else if (TREE_CODE (type
) == REAL_TYPE
)
1779 if (TREE_CODE (arg1
) == INTEGER_CST
)
1780 return build_real_from_int_cst (type
, arg1
);
1781 else if (TREE_CODE (arg1
) == REAL_CST
)
1782 return fold_convert_const_real_from_real (type
, arg1
);
1783 else if (TREE_CODE (arg1
) == FIXED_CST
)
1784 return fold_convert_const_real_from_fixed (type
, arg1
);
1786 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1788 if (TREE_CODE (arg1
) == FIXED_CST
)
1789 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1790 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1791 return fold_convert_const_fixed_from_int (type
, arg1
);
1792 else if (TREE_CODE (arg1
) == REAL_CST
)
1793 return fold_convert_const_fixed_from_real (type
, arg1
);
1798 /* Construct a vector of zero elements of vector type TYPE. */
1801 build_zero_vector (tree type
)
1805 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1806 return build_vector_from_val (type
, t
);
1809 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1812 fold_convertible_p (const_tree type
, const_tree arg
)
1814 tree orig
= TREE_TYPE (arg
);
1819 if (TREE_CODE (arg
) == ERROR_MARK
1820 || TREE_CODE (type
) == ERROR_MARK
1821 || TREE_CODE (orig
) == ERROR_MARK
)
1824 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1827 switch (TREE_CODE (type
))
1829 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1830 case POINTER_TYPE
: case REFERENCE_TYPE
:
1832 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1833 || TREE_CODE (orig
) == OFFSET_TYPE
)
1835 return (TREE_CODE (orig
) == VECTOR_TYPE
1836 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1839 case FIXED_POINT_TYPE
:
1843 return TREE_CODE (type
) == TREE_CODE (orig
);
1850 /* Convert expression ARG to type TYPE. Used by the middle-end for
1851 simple conversions in preference to calling the front-end's convert. */
1854 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1856 tree orig
= TREE_TYPE (arg
);
1862 if (TREE_CODE (arg
) == ERROR_MARK
1863 || TREE_CODE (type
) == ERROR_MARK
1864 || TREE_CODE (orig
) == ERROR_MARK
)
1865 return error_mark_node
;
1867 switch (TREE_CODE (type
))
1870 case REFERENCE_TYPE
:
1871 /* Handle conversions between pointers to different address spaces. */
1872 if (POINTER_TYPE_P (orig
)
1873 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1874 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1875 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1878 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1880 if (TREE_CODE (arg
) == INTEGER_CST
)
1882 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1883 if (tem
!= NULL_TREE
)
1886 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1887 || TREE_CODE (orig
) == OFFSET_TYPE
)
1888 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1889 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1890 return fold_convert_loc (loc
, type
,
1891 fold_build1_loc (loc
, REALPART_EXPR
,
1892 TREE_TYPE (orig
), arg
));
1893 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1894 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1895 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1898 if (TREE_CODE (arg
) == INTEGER_CST
)
1900 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1901 if (tem
!= NULL_TREE
)
1904 else if (TREE_CODE (arg
) == REAL_CST
)
1906 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1907 if (tem
!= NULL_TREE
)
1910 else if (TREE_CODE (arg
) == FIXED_CST
)
1912 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1913 if (tem
!= NULL_TREE
)
1917 switch (TREE_CODE (orig
))
1920 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1921 case POINTER_TYPE
: case REFERENCE_TYPE
:
1922 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1925 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1927 case FIXED_POINT_TYPE
:
1928 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1931 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1932 return fold_convert_loc (loc
, type
, tem
);
1938 case FIXED_POINT_TYPE
:
1939 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1940 || TREE_CODE (arg
) == REAL_CST
)
1942 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1943 if (tem
!= NULL_TREE
)
1944 goto fold_convert_exit
;
1947 switch (TREE_CODE (orig
))
1949 case FIXED_POINT_TYPE
:
1954 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1957 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1958 return fold_convert_loc (loc
, type
, tem
);
1965 switch (TREE_CODE (orig
))
1968 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1969 case POINTER_TYPE
: case REFERENCE_TYPE
:
1971 case FIXED_POINT_TYPE
:
1972 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
1973 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
1974 fold_convert_loc (loc
, TREE_TYPE (type
),
1975 integer_zero_node
));
1980 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1982 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1983 TREE_OPERAND (arg
, 0));
1984 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1985 TREE_OPERAND (arg
, 1));
1986 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
1989 arg
= save_expr (arg
);
1990 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1991 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
1992 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
1993 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
1994 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2002 if (integer_zerop (arg
))
2003 return build_zero_vector (type
);
2004 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2005 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2006 || TREE_CODE (orig
) == VECTOR_TYPE
);
2007 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2010 tem
= fold_ignored_result (arg
);
2011 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2014 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2015 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2019 protected_set_expr_location_unshare (tem
, loc
);
2023 /* Return false if expr can be assumed not to be an lvalue, true
2027 maybe_lvalue_p (const_tree x
)
2029 /* We only need to wrap lvalue tree codes. */
2030 switch (TREE_CODE (x
))
2043 case ARRAY_RANGE_REF
:
2049 case PREINCREMENT_EXPR
:
2050 case PREDECREMENT_EXPR
:
2052 case TRY_CATCH_EXPR
:
2053 case WITH_CLEANUP_EXPR
:
2062 /* Assume the worst for front-end tree codes. */
2063 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2071 /* Return an expr equal to X but certainly not valid as an lvalue. */
2074 non_lvalue_loc (location_t loc
, tree x
)
2076 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2081 if (! maybe_lvalue_p (x
))
2083 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2086 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2087 Zero means allow extended lvalues. */
2089 int pedantic_lvalues
;
2091 /* When pedantic, return an expr equal to X but certainly not valid as a
2092 pedantic lvalue. Otherwise, return X. */
2095 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2097 if (pedantic_lvalues
)
2098 return non_lvalue_loc (loc
, x
);
2100 return protected_set_expr_location_unshare (x
, loc
);
2103 /* Given a tree comparison code, return the code that is the logical inverse.
2104 It is generally not safe to do this for floating-point comparisons, except
2105 for EQ_EXPR and NE_EXPR, so we return ERROR_MARK in this case. */
2108 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2110 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
)
2120 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2122 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2124 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2126 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2140 return UNORDERED_EXPR
;
2141 case UNORDERED_EXPR
:
2142 return ORDERED_EXPR
;
2148 /* Similar, but return the comparison that results if the operands are
2149 swapped. This is safe for floating-point. */
2152 swap_tree_comparison (enum tree_code code
)
2159 case UNORDERED_EXPR
:
2185 /* Convert a comparison tree code from an enum tree_code representation
2186 into a compcode bit-based encoding. This function is the inverse of
2187 compcode_to_comparison. */
2189 static enum comparison_code
2190 comparison_to_compcode (enum tree_code code
)
2207 return COMPCODE_ORD
;
2208 case UNORDERED_EXPR
:
2209 return COMPCODE_UNORD
;
2211 return COMPCODE_UNLT
;
2213 return COMPCODE_UNEQ
;
2215 return COMPCODE_UNLE
;
2217 return COMPCODE_UNGT
;
2219 return COMPCODE_LTGT
;
2221 return COMPCODE_UNGE
;
2227 /* Convert a compcode bit-based encoding of a comparison operator back
2228 to GCC's enum tree_code representation. This function is the
2229 inverse of comparison_to_compcode. */
2231 static enum tree_code
2232 compcode_to_comparison (enum comparison_code code
)
2249 return ORDERED_EXPR
;
2250 case COMPCODE_UNORD
:
2251 return UNORDERED_EXPR
;
2269 /* Return a tree for the comparison which is the combination of
2270 doing the AND or OR (depending on CODE) of the two operations LCODE
2271 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2272 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2273 if this makes the transformation invalid. */
2276 combine_comparisons (location_t loc
,
2277 enum tree_code code
, enum tree_code lcode
,
2278 enum tree_code rcode
, tree truth_type
,
2279 tree ll_arg
, tree lr_arg
)
2281 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2282 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2283 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2288 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2289 compcode
= lcompcode
& rcompcode
;
2292 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2293 compcode
= lcompcode
| rcompcode
;
2302 /* Eliminate unordered comparisons, as well as LTGT and ORD
2303 which are not used unless the mode has NaNs. */
2304 compcode
&= ~COMPCODE_UNORD
;
2305 if (compcode
== COMPCODE_LTGT
)
2306 compcode
= COMPCODE_NE
;
2307 else if (compcode
== COMPCODE_ORD
)
2308 compcode
= COMPCODE_TRUE
;
2310 else if (flag_trapping_math
)
2312 /* Check that the original operation and the optimized ones will trap
2313 under the same condition. */
2314 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2315 && (lcompcode
!= COMPCODE_EQ
)
2316 && (lcompcode
!= COMPCODE_ORD
);
2317 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2318 && (rcompcode
!= COMPCODE_EQ
)
2319 && (rcompcode
!= COMPCODE_ORD
);
2320 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2321 && (compcode
!= COMPCODE_EQ
)
2322 && (compcode
!= COMPCODE_ORD
);
2324 /* In a short-circuited boolean expression the LHS might be
2325 such that the RHS, if evaluated, will never trap. For
2326 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2327 if neither x nor y is NaN. (This is a mixed blessing: for
2328 example, the expression above will never trap, hence
2329 optimizing it to x < y would be invalid). */
2330 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2331 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2334 /* If the comparison was short-circuited, and only the RHS
2335 trapped, we may now generate a spurious trap. */
2337 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2340 /* If we changed the conditions that cause a trap, we lose. */
2341 if ((ltrap
|| rtrap
) != trap
)
2345 if (compcode
== COMPCODE_TRUE
)
2346 return constant_boolean_node (true, truth_type
);
2347 else if (compcode
== COMPCODE_FALSE
)
2348 return constant_boolean_node (false, truth_type
);
2351 enum tree_code tcode
;
2353 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2354 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2358 /* Return nonzero if two operands (typically of the same tree node)
2359 are necessarily equal. If either argument has side-effects this
2360 function returns zero. FLAGS modifies behavior as follows:
2362 If OEP_ONLY_CONST is set, only return nonzero for constants.
2363 This function tests whether the operands are indistinguishable;
2364 it does not test whether they are equal using C's == operation.
2365 The distinction is important for IEEE floating point, because
2366 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2367 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2369 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2370 even though it may hold multiple values during a function.
2371 This is because a GCC tree node guarantees that nothing else is
2372 executed between the evaluation of its "operands" (which may often
2373 be evaluated in arbitrary order). Hence if the operands themselves
2374 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2375 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2376 unset means assuming isochronic (or instantaneous) tree equivalence.
2377 Unless comparing arbitrary expression trees, such as from different
2378 statements, this flag can usually be left unset.
2380 If OEP_PURE_SAME is set, then pure functions with identical arguments
2381 are considered the same. It is used when the caller has other ways
2382 to ensure that global memory is unchanged in between. */
2385 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2387 /* If either is ERROR_MARK, they aren't equal. */
2388 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2389 || TREE_TYPE (arg0
) == error_mark_node
2390 || TREE_TYPE (arg1
) == error_mark_node
)
2393 /* Similar, if either does not have a type (like a released SSA name),
2394 they aren't equal. */
2395 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2398 /* Check equality of integer constants before bailing out due to
2399 precision differences. */
2400 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2401 return tree_int_cst_equal (arg0
, arg1
);
2403 /* If both types don't have the same signedness, then we can't consider
2404 them equal. We must check this before the STRIP_NOPS calls
2405 because they may change the signedness of the arguments. As pointers
2406 strictly don't have a signedness, require either two pointers or
2407 two non-pointers as well. */
2408 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2409 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2412 /* We cannot consider pointers to different address space equal. */
2413 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2414 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2415 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2418 /* If both types don't have the same precision, then it is not safe
2420 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
2426 /* In case both args are comparisons but with different comparison
2427 code, try to swap the comparison operands of one arg to produce
2428 a match and compare that variant. */
2429 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2430 && COMPARISON_CLASS_P (arg0
)
2431 && COMPARISON_CLASS_P (arg1
))
2433 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2435 if (TREE_CODE (arg0
) == swap_code
)
2436 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2437 TREE_OPERAND (arg1
, 1), flags
)
2438 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2439 TREE_OPERAND (arg1
, 0), flags
);
2442 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2443 /* This is needed for conversions and for COMPONENT_REF.
2444 Might as well play it safe and always test this. */
2445 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2446 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2447 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2450 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2451 We don't care about side effects in that case because the SAVE_EXPR
2452 takes care of that for us. In all other cases, two expressions are
2453 equal if they have no side effects. If we have two identical
2454 expressions with side effects that should be treated the same due
2455 to the only side effects being identical SAVE_EXPR's, that will
2456 be detected in the recursive calls below.
2457 If we are taking an invariant address of two identical objects
2458 they are necessarily equal as well. */
2459 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2460 && (TREE_CODE (arg0
) == SAVE_EXPR
2461 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2462 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2465 /* Next handle constant cases, those for which we can return 1 even
2466 if ONLY_CONST is set. */
2467 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2468 switch (TREE_CODE (arg0
))
2471 return tree_int_cst_equal (arg0
, arg1
);
2474 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2475 TREE_FIXED_CST (arg1
));
2478 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2479 TREE_REAL_CST (arg1
)))
2483 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2485 /* If we do not distinguish between signed and unsigned zero,
2486 consider them equal. */
2487 if (real_zerop (arg0
) && real_zerop (arg1
))
2496 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2497 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2500 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2503 v1
= TREE_CHAIN (v1
);
2504 v2
= TREE_CHAIN (v2
);
2511 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2513 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2517 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2518 && ! memcmp (TREE_STRING_POINTER (arg0
),
2519 TREE_STRING_POINTER (arg1
),
2520 TREE_STRING_LENGTH (arg0
)));
2523 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2524 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2525 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2530 if (flags
& OEP_ONLY_CONST
)
2533 /* Define macros to test an operand from arg0 and arg1 for equality and a
2534 variant that allows null and views null as being different from any
2535 non-null value. In the latter case, if either is null, the both
2536 must be; otherwise, do the normal comparison. */
2537 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2538 TREE_OPERAND (arg1, N), flags)
2540 #define OP_SAME_WITH_NULL(N) \
2541 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2542 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2544 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2547 /* Two conversions are equal only if signedness and modes match. */
2548 switch (TREE_CODE (arg0
))
2551 case FIX_TRUNC_EXPR
:
2552 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2553 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2563 case tcc_comparison
:
2565 if (OP_SAME (0) && OP_SAME (1))
2568 /* For commutative ops, allow the other order. */
2569 return (commutative_tree_code (TREE_CODE (arg0
))
2570 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2571 TREE_OPERAND (arg1
, 1), flags
)
2572 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2573 TREE_OPERAND (arg1
, 0), flags
));
2576 /* If either of the pointer (or reference) expressions we are
2577 dereferencing contain a side effect, these cannot be equal. */
2578 if (TREE_SIDE_EFFECTS (arg0
)
2579 || TREE_SIDE_EFFECTS (arg1
))
2582 switch (TREE_CODE (arg0
))
2590 /* Require equal access sizes, and similar pointer types.
2591 We can have incomplete types for array references of
2592 variable-sized arrays from the Fortran frontent
2594 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2595 || (TYPE_SIZE (TREE_TYPE (arg0
))
2596 && TYPE_SIZE (TREE_TYPE (arg1
))
2597 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2598 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2599 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
2600 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
2601 && OP_SAME (0) && OP_SAME (1));
2604 case ARRAY_RANGE_REF
:
2605 /* Operands 2 and 3 may be null.
2606 Compare the array index by value if it is constant first as we
2607 may have different types but same value here. */
2609 && (tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2610 TREE_OPERAND (arg1
, 1))
2612 && OP_SAME_WITH_NULL (2)
2613 && OP_SAME_WITH_NULL (3));
2616 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2617 may be NULL when we're called to compare MEM_EXPRs. */
2618 return OP_SAME_WITH_NULL (0)
2620 && OP_SAME_WITH_NULL (2);
2623 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2629 case tcc_expression
:
2630 switch (TREE_CODE (arg0
))
2633 case TRUTH_NOT_EXPR
:
2636 case TRUTH_ANDIF_EXPR
:
2637 case TRUTH_ORIF_EXPR
:
2638 return OP_SAME (0) && OP_SAME (1);
2641 case WIDEN_MULT_PLUS_EXPR
:
2642 case WIDEN_MULT_MINUS_EXPR
:
2645 /* The multiplcation operands are commutative. */
2648 case TRUTH_AND_EXPR
:
2650 case TRUTH_XOR_EXPR
:
2651 if (OP_SAME (0) && OP_SAME (1))
2654 /* Otherwise take into account this is a commutative operation. */
2655 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2656 TREE_OPERAND (arg1
, 1), flags
)
2657 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2658 TREE_OPERAND (arg1
, 0), flags
));
2663 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2670 switch (TREE_CODE (arg0
))
2673 /* If the CALL_EXPRs call different functions, then they
2674 clearly can not be equal. */
2675 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2680 unsigned int cef
= call_expr_flags (arg0
);
2681 if (flags
& OEP_PURE_SAME
)
2682 cef
&= ECF_CONST
| ECF_PURE
;
2689 /* Now see if all the arguments are the same. */
2691 const_call_expr_arg_iterator iter0
, iter1
;
2693 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2694 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2696 a0
= next_const_call_expr_arg (&iter0
),
2697 a1
= next_const_call_expr_arg (&iter1
))
2698 if (! operand_equal_p (a0
, a1
, flags
))
2701 /* If we get here and both argument lists are exhausted
2702 then the CALL_EXPRs are equal. */
2703 return ! (a0
|| a1
);
2709 case tcc_declaration
:
2710 /* Consider __builtin_sqrt equal to sqrt. */
2711 return (TREE_CODE (arg0
) == FUNCTION_DECL
2712 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2713 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2714 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2721 #undef OP_SAME_WITH_NULL
2724 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2725 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2727 When in doubt, return 0. */
2730 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2732 int unsignedp1
, unsignedpo
;
2733 tree primarg0
, primarg1
, primother
;
2734 unsigned int correct_width
;
2736 if (operand_equal_p (arg0
, arg1
, 0))
2739 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2740 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2743 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2744 and see if the inner values are the same. This removes any
2745 signedness comparison, which doesn't matter here. */
2746 primarg0
= arg0
, primarg1
= arg1
;
2747 STRIP_NOPS (primarg0
);
2748 STRIP_NOPS (primarg1
);
2749 if (operand_equal_p (primarg0
, primarg1
, 0))
2752 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2753 actual comparison operand, ARG0.
2755 First throw away any conversions to wider types
2756 already present in the operands. */
2758 primarg1
= get_narrower (arg1
, &unsignedp1
);
2759 primother
= get_narrower (other
, &unsignedpo
);
2761 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2762 if (unsignedp1
== unsignedpo
2763 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2764 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2766 tree type
= TREE_TYPE (arg0
);
2768 /* Make sure shorter operand is extended the right way
2769 to match the longer operand. */
2770 primarg1
= fold_convert (signed_or_unsigned_type_for
2771 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2773 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2780 /* See if ARG is an expression that is either a comparison or is performing
2781 arithmetic on comparisons. The comparisons must only be comparing
2782 two different values, which will be stored in *CVAL1 and *CVAL2; if
2783 they are nonzero it means that some operands have already been found.
2784 No variables may be used anywhere else in the expression except in the
2785 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2786 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2788 If this is true, return 1. Otherwise, return zero. */
2791 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2793 enum tree_code code
= TREE_CODE (arg
);
2794 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2796 /* We can handle some of the tcc_expression cases here. */
2797 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2799 else if (tclass
== tcc_expression
2800 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2801 || code
== COMPOUND_EXPR
))
2802 tclass
= tcc_binary
;
2804 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2805 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2807 /* If we've already found a CVAL1 or CVAL2, this expression is
2808 two complex to handle. */
2809 if (*cval1
|| *cval2
)
2819 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2822 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2823 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2824 cval1
, cval2
, save_p
));
2829 case tcc_expression
:
2830 if (code
== COND_EXPR
)
2831 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2832 cval1
, cval2
, save_p
)
2833 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2834 cval1
, cval2
, save_p
)
2835 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2836 cval1
, cval2
, save_p
));
2839 case tcc_comparison
:
2840 /* First see if we can handle the first operand, then the second. For
2841 the second operand, we know *CVAL1 can't be zero. It must be that
2842 one side of the comparison is each of the values; test for the
2843 case where this isn't true by failing if the two operands
2846 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2847 TREE_OPERAND (arg
, 1), 0))
2851 *cval1
= TREE_OPERAND (arg
, 0);
2852 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2854 else if (*cval2
== 0)
2855 *cval2
= TREE_OPERAND (arg
, 0);
2856 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2861 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2863 else if (*cval2
== 0)
2864 *cval2
= TREE_OPERAND (arg
, 1);
2865 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2877 /* ARG is a tree that is known to contain just arithmetic operations and
2878 comparisons. Evaluate the operations in the tree substituting NEW0 for
2879 any occurrence of OLD0 as an operand of a comparison and likewise for
2883 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2884 tree old1
, tree new1
)
2886 tree type
= TREE_TYPE (arg
);
2887 enum tree_code code
= TREE_CODE (arg
);
2888 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2890 /* We can handle some of the tcc_expression cases here. */
2891 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2893 else if (tclass
== tcc_expression
2894 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2895 tclass
= tcc_binary
;
2900 return fold_build1_loc (loc
, code
, type
,
2901 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2902 old0
, new0
, old1
, new1
));
2905 return fold_build2_loc (loc
, code
, type
,
2906 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2907 old0
, new0
, old1
, new1
),
2908 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2909 old0
, new0
, old1
, new1
));
2911 case tcc_expression
:
2915 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
2919 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
2923 return fold_build3_loc (loc
, code
, type
,
2924 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2925 old0
, new0
, old1
, new1
),
2926 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2927 old0
, new0
, old1
, new1
),
2928 eval_subst (loc
, TREE_OPERAND (arg
, 2),
2929 old0
, new0
, old1
, new1
));
2933 /* Fall through - ??? */
2935 case tcc_comparison
:
2937 tree arg0
= TREE_OPERAND (arg
, 0);
2938 tree arg1
= TREE_OPERAND (arg
, 1);
2940 /* We need to check both for exact equality and tree equality. The
2941 former will be true if the operand has a side-effect. In that
2942 case, we know the operand occurred exactly once. */
2944 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2946 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2949 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2951 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2954 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
2962 /* Return a tree for the case when the result of an expression is RESULT
2963 converted to TYPE and OMITTED was previously an operand of the expression
2964 but is now not needed (e.g., we folded OMITTED * 0).
2966 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2967 the conversion of RESULT to TYPE. */
2970 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
2972 tree t
= fold_convert_loc (loc
, type
, result
);
2974 /* If the resulting operand is an empty statement, just return the omitted
2975 statement casted to void. */
2976 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2977 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
2978 fold_ignored_result (omitted
));
2980 if (TREE_SIDE_EFFECTS (omitted
))
2981 return build2_loc (loc
, COMPOUND_EXPR
, type
,
2982 fold_ignored_result (omitted
), t
);
2984 return non_lvalue_loc (loc
, t
);
2987 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2990 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
2993 tree t
= fold_convert_loc (loc
, type
, result
);
2995 /* If the resulting operand is an empty statement, just return the omitted
2996 statement casted to void. */
2997 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2998 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
2999 fold_ignored_result (omitted
));
3001 if (TREE_SIDE_EFFECTS (omitted
))
3002 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3003 fold_ignored_result (omitted
), t
);
3005 return pedantic_non_lvalue_loc (loc
, t
);
3008 /* Return a tree for the case when the result of an expression is RESULT
3009 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3010 of the expression but are now not needed.
3012 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3013 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3014 evaluated before OMITTED2. Otherwise, if neither has side effects,
3015 just do the conversion of RESULT to TYPE. */
3018 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3019 tree omitted1
, tree omitted2
)
3021 tree t
= fold_convert_loc (loc
, type
, result
);
3023 if (TREE_SIDE_EFFECTS (omitted2
))
3024 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3025 if (TREE_SIDE_EFFECTS (omitted1
))
3026 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3028 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3032 /* Return a simplified tree node for the truth-negation of ARG. This
3033 never alters ARG itself. We assume that ARG is an operation that
3034 returns a truth value (0 or 1).
3036 FIXME: one would think we would fold the result, but it causes
3037 problems with the dominator optimizer. */
3040 fold_truth_not_expr (location_t loc
, tree arg
)
3042 tree type
= TREE_TYPE (arg
);
3043 enum tree_code code
= TREE_CODE (arg
);
3044 location_t loc1
, loc2
;
3046 /* If this is a comparison, we can simply invert it, except for
3047 floating-point non-equality comparisons, in which case we just
3048 enclose a TRUTH_NOT_EXPR around what we have. */
3050 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3052 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3053 if (FLOAT_TYPE_P (op_type
)
3054 && flag_trapping_math
3055 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3056 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3059 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3060 if (code
== ERROR_MARK
)
3063 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3064 TREE_OPERAND (arg
, 1));
3070 return constant_boolean_node (integer_zerop (arg
), type
);
3072 case TRUTH_AND_EXPR
:
3073 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3074 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3075 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3076 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3077 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3080 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3081 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3082 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3083 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3084 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3086 case TRUTH_XOR_EXPR
:
3087 /* Here we can invert either operand. We invert the first operand
3088 unless the second operand is a TRUTH_NOT_EXPR in which case our
3089 result is the XOR of the first operand with the inside of the
3090 negation of the second operand. */
3092 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3093 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3094 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3096 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3097 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3098 TREE_OPERAND (arg
, 1));
3100 case TRUTH_ANDIF_EXPR
:
3101 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3102 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3103 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3104 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3105 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3107 case TRUTH_ORIF_EXPR
:
3108 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3109 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3110 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3111 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3112 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3114 case TRUTH_NOT_EXPR
:
3115 return TREE_OPERAND (arg
, 0);
3119 tree arg1
= TREE_OPERAND (arg
, 1);
3120 tree arg2
= TREE_OPERAND (arg
, 2);
3122 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3123 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3125 /* A COND_EXPR may have a throw as one operand, which
3126 then has void type. Just leave void operands
3128 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3129 VOID_TYPE_P (TREE_TYPE (arg1
))
3130 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3131 VOID_TYPE_P (TREE_TYPE (arg2
))
3132 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3136 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3137 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3138 TREE_OPERAND (arg
, 0),
3139 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3141 case NON_LVALUE_EXPR
:
3142 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3143 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3146 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3147 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3149 /* ... fall through ... */
3152 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3153 return build1_loc (loc
, TREE_CODE (arg
), type
,
3154 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3157 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3159 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3162 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3164 case CLEANUP_POINT_EXPR
:
3165 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3166 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3167 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3174 /* Return a simplified tree node for the truth-negation of ARG. This
3175 never alters ARG itself. We assume that ARG is an operation that
3176 returns a truth value (0 or 1).
3178 FIXME: one would think we would fold the result, but it causes
3179 problems with the dominator optimizer. */
3182 invert_truthvalue_loc (location_t loc
, tree arg
)
3186 if (TREE_CODE (arg
) == ERROR_MARK
)
3189 tem
= fold_truth_not_expr (loc
, arg
);
3191 tem
= build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3196 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3197 operands are another bit-wise operation with a common input. If so,
3198 distribute the bit operations to save an operation and possibly two if
3199 constants are involved. For example, convert
3200 (A | B) & (A | C) into A | (B & C)
3201 Further simplification will occur if B and C are constants.
3203 If this optimization cannot be done, 0 will be returned. */
3206 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3207 tree arg0
, tree arg1
)
3212 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3213 || TREE_CODE (arg0
) == code
3214 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3215 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3218 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3220 common
= TREE_OPERAND (arg0
, 0);
3221 left
= TREE_OPERAND (arg0
, 1);
3222 right
= TREE_OPERAND (arg1
, 1);
3224 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3226 common
= TREE_OPERAND (arg0
, 0);
3227 left
= TREE_OPERAND (arg0
, 1);
3228 right
= TREE_OPERAND (arg1
, 0);
3230 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3232 common
= TREE_OPERAND (arg0
, 1);
3233 left
= TREE_OPERAND (arg0
, 0);
3234 right
= TREE_OPERAND (arg1
, 1);
3236 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3238 common
= TREE_OPERAND (arg0
, 1);
3239 left
= TREE_OPERAND (arg0
, 0);
3240 right
= TREE_OPERAND (arg1
, 0);
3245 common
= fold_convert_loc (loc
, type
, common
);
3246 left
= fold_convert_loc (loc
, type
, left
);
3247 right
= fold_convert_loc (loc
, type
, right
);
3248 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3249 fold_build2_loc (loc
, code
, type
, left
, right
));
3252 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3253 with code CODE. This optimization is unsafe. */
3255 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3256 tree arg0
, tree arg1
)
3258 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3259 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3261 /* (A / C) +- (B / C) -> (A +- B) / C. */
3263 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3264 TREE_OPERAND (arg1
, 1), 0))
3265 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3266 fold_build2_loc (loc
, code
, type
,
3267 TREE_OPERAND (arg0
, 0),
3268 TREE_OPERAND (arg1
, 0)),
3269 TREE_OPERAND (arg0
, 1));
3271 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3272 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3273 TREE_OPERAND (arg1
, 0), 0)
3274 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3275 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3277 REAL_VALUE_TYPE r0
, r1
;
3278 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3279 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3281 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3283 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3284 real_arithmetic (&r0
, code
, &r0
, &r1
);
3285 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3286 TREE_OPERAND (arg0
, 0),
3287 build_real (type
, r0
));
3293 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3294 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3297 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3298 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3300 tree result
, bftype
;
3304 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3305 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3306 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3307 && host_integerp (size
, 0)
3308 && tree_low_cst (size
, 0) == bitsize
)
3309 return fold_convert_loc (loc
, type
, inner
);
3313 if (TYPE_PRECISION (bftype
) != bitsize
3314 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3315 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3317 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3318 size_int (bitsize
), bitsize_int (bitpos
));
3321 result
= fold_convert_loc (loc
, type
, result
);
3326 /* Optimize a bit-field compare.
3328 There are two cases: First is a compare against a constant and the
3329 second is a comparison of two items where the fields are at the same
3330 bit position relative to the start of a chunk (byte, halfword, word)
3331 large enough to contain it. In these cases we can avoid the shift
3332 implicit in bitfield extractions.
3334 For constants, we emit a compare of the shifted constant with the
3335 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3336 compared. For two fields at the same position, we do the ANDs with the
3337 similar mask and compare the result of the ANDs.
3339 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3340 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3341 are the left and right operands of the comparison, respectively.
3343 If the optimization described above can be done, we return the resulting
3344 tree. Otherwise we return zero. */
3347 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3348 tree compare_type
, tree lhs
, tree rhs
)
3350 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3351 tree type
= TREE_TYPE (lhs
);
3352 tree signed_type
, unsigned_type
;
3353 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3354 enum machine_mode lmode
, rmode
, nmode
;
3355 int lunsignedp
, runsignedp
;
3356 int lvolatilep
= 0, rvolatilep
= 0;
3357 tree linner
, rinner
= NULL_TREE
;
3361 /* Get all the information about the extractions being done. If the bit size
3362 if the same as the size of the underlying object, we aren't doing an
3363 extraction at all and so can do nothing. We also don't want to
3364 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3365 then will no longer be able to replace it. */
3366 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3367 &lunsignedp
, &lvolatilep
, false);
3368 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3369 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3374 /* If this is not a constant, we can only do something if bit positions,
3375 sizes, and signedness are the same. */
3376 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3377 &runsignedp
, &rvolatilep
, false);
3379 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3380 || lunsignedp
!= runsignedp
|| offset
!= 0
3381 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3385 /* See if we can find a mode to refer to this field. We should be able to,
3386 but fail if we can't. */
3388 && GET_MODE_BITSIZE (lmode
) > 0
3389 && flag_strict_volatile_bitfields
> 0)
3392 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3393 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3394 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3395 TYPE_ALIGN (TREE_TYPE (rinner
))),
3396 word_mode
, lvolatilep
|| rvolatilep
);
3397 if (nmode
== VOIDmode
)
3400 /* Set signed and unsigned types of the precision of this mode for the
3402 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3403 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3405 /* Compute the bit position and size for the new reference and our offset
3406 within it. If the new reference is the same size as the original, we
3407 won't optimize anything, so return zero. */
3408 nbitsize
= GET_MODE_BITSIZE (nmode
);
3409 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3411 if (nbitsize
== lbitsize
)
3414 if (BYTES_BIG_ENDIAN
)
3415 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3417 /* Make the mask to be used against the extracted field. */
3418 mask
= build_int_cst_type (unsigned_type
, -1);
3419 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3420 mask
= const_binop (RSHIFT_EXPR
, mask
,
3421 size_int (nbitsize
- lbitsize
- lbitpos
));
3424 /* If not comparing with constant, just rework the comparison
3426 return fold_build2_loc (loc
, code
, compare_type
,
3427 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3428 make_bit_field_ref (loc
, linner
,
3433 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3434 make_bit_field_ref (loc
, rinner
,
3440 /* Otherwise, we are handling the constant case. See if the constant is too
3441 big for the field. Warn and return a tree of for 0 (false) if so. We do
3442 this not only for its own sake, but to avoid having to test for this
3443 error case below. If we didn't, we might generate wrong code.
3445 For unsigned fields, the constant shifted right by the field length should
3446 be all zero. For signed fields, the high-order bits should agree with
3451 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3452 fold_convert_loc (loc
,
3453 unsigned_type
, rhs
),
3454 size_int (lbitsize
))))
3456 warning (0, "comparison is always %d due to width of bit-field",
3458 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3463 tree tem
= const_binop (RSHIFT_EXPR
,
3464 fold_convert_loc (loc
, signed_type
, rhs
),
3465 size_int (lbitsize
- 1));
3466 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3468 warning (0, "comparison is always %d due to width of bit-field",
3470 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3474 /* Single-bit compares should always be against zero. */
3475 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3477 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3478 rhs
= build_int_cst (type
, 0);
3481 /* Make a new bitfield reference, shift the constant over the
3482 appropriate number of bits and mask it with the computed mask
3483 (in case this was a signed field). If we changed it, make a new one. */
3484 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3487 TREE_SIDE_EFFECTS (lhs
) = 1;
3488 TREE_THIS_VOLATILE (lhs
) = 1;
3491 rhs
= const_binop (BIT_AND_EXPR
,
3492 const_binop (LSHIFT_EXPR
,
3493 fold_convert_loc (loc
, unsigned_type
, rhs
),
3494 size_int (lbitpos
)),
3497 lhs
= build2_loc (loc
, code
, compare_type
,
3498 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3502 /* Subroutine for fold_truth_andor_1: decode a field reference.
3504 If EXP is a comparison reference, we return the innermost reference.
3506 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3507 set to the starting bit number.
3509 If the innermost field can be completely contained in a mode-sized
3510 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3512 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3513 otherwise it is not changed.
3515 *PUNSIGNEDP is set to the signedness of the field.
3517 *PMASK is set to the mask used. This is either contained in a
3518 BIT_AND_EXPR or derived from the width of the field.
3520 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3522 Return 0 if this is not a component reference or is one that we can't
3523 do anything with. */
3526 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3527 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3528 int *punsignedp
, int *pvolatilep
,
3529 tree
*pmask
, tree
*pand_mask
)
3531 tree outer_type
= 0;
3533 tree mask
, inner
, offset
;
3535 unsigned int precision
;
3537 /* All the optimizations using this function assume integer fields.
3538 There are problems with FP fields since the type_for_size call
3539 below can fail for, e.g., XFmode. */
3540 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3543 /* We are interested in the bare arrangement of bits, so strip everything
3544 that doesn't affect the machine mode. However, record the type of the
3545 outermost expression if it may matter below. */
3546 if (CONVERT_EXPR_P (exp
)
3547 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3548 outer_type
= TREE_TYPE (exp
);
3551 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3553 and_mask
= TREE_OPERAND (exp
, 1);
3554 exp
= TREE_OPERAND (exp
, 0);
3555 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3556 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3560 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3561 punsignedp
, pvolatilep
, false);
3562 if ((inner
== exp
&& and_mask
== 0)
3563 || *pbitsize
< 0 || offset
!= 0
3564 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3567 /* If the number of bits in the reference is the same as the bitsize of
3568 the outer type, then the outer type gives the signedness. Otherwise
3569 (in case of a small bitfield) the signedness is unchanged. */
3570 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3571 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3573 /* Compute the mask to access the bitfield. */
3574 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3575 precision
= TYPE_PRECISION (unsigned_type
);
3577 mask
= build_int_cst_type (unsigned_type
, -1);
3579 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3580 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3582 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3584 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3585 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3588 *pand_mask
= and_mask
;
3592 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3596 all_ones_mask_p (const_tree mask
, int size
)
3598 tree type
= TREE_TYPE (mask
);
3599 unsigned int precision
= TYPE_PRECISION (type
);
3602 tmask
= build_int_cst_type (signed_type_for (type
), -1);
3605 tree_int_cst_equal (mask
,
3606 const_binop (RSHIFT_EXPR
,
3607 const_binop (LSHIFT_EXPR
, tmask
,
3608 size_int (precision
- size
)),
3609 size_int (precision
- size
)));
3612 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3613 represents the sign bit of EXP's type. If EXP represents a sign
3614 or zero extension, also test VAL against the unextended type.
3615 The return value is the (sub)expression whose sign bit is VAL,
3616 or NULL_TREE otherwise. */
3619 sign_bit_p (tree exp
, const_tree val
)
3621 unsigned HOST_WIDE_INT mask_lo
, lo
;
3622 HOST_WIDE_INT mask_hi
, hi
;
3626 /* Tree EXP must have an integral type. */
3627 t
= TREE_TYPE (exp
);
3628 if (! INTEGRAL_TYPE_P (t
))
3631 /* Tree VAL must be an integer constant. */
3632 if (TREE_CODE (val
) != INTEGER_CST
3633 || TREE_OVERFLOW (val
))
3636 width
= TYPE_PRECISION (t
);
3637 if (width
> HOST_BITS_PER_WIDE_INT
)
3639 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3642 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3643 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3649 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3652 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3653 >> (HOST_BITS_PER_WIDE_INT
- width
));
3656 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3657 treat VAL as if it were unsigned. */
3658 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3659 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3662 /* Handle extension from a narrower type. */
3663 if (TREE_CODE (exp
) == NOP_EXPR
3664 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3665 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3670 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3671 to be evaluated unconditionally. */
3674 simple_operand_p (const_tree exp
)
3676 /* Strip any conversions that don't change the machine mode. */
3679 return (CONSTANT_CLASS_P (exp
)
3680 || TREE_CODE (exp
) == SSA_NAME
3682 && ! TREE_ADDRESSABLE (exp
)
3683 && ! TREE_THIS_VOLATILE (exp
)
3684 && ! DECL_NONLOCAL (exp
)
3685 /* Don't regard global variables as simple. They may be
3686 allocated in ways unknown to the compiler (shared memory,
3687 #pragma weak, etc). */
3688 && ! TREE_PUBLIC (exp
)
3689 && ! DECL_EXTERNAL (exp
)
3690 /* Loading a static variable is unduly expensive, but global
3691 registers aren't expensive. */
3692 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3695 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3696 to be evaluated unconditionally.
3697 I addition to simple_operand_p, we assume that comparisons, conversions,
3698 and logic-not operations are simple, if their operands are simple, too. */
3701 simple_operand_p_2 (tree exp
)
3703 enum tree_code code
;
3705 if (TREE_SIDE_EFFECTS (exp
)
3706 || tree_could_trap_p (exp
))
3709 while (CONVERT_EXPR_P (exp
))
3710 exp
= TREE_OPERAND (exp
, 0);
3712 code
= TREE_CODE (exp
);
3714 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3715 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3716 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3718 if (code
== TRUTH_NOT_EXPR
)
3719 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3721 return simple_operand_p (exp
);
3725 /* The following functions are subroutines to fold_range_test and allow it to
3726 try to change a logical combination of comparisons into a range test.
3729 X == 2 || X == 3 || X == 4 || X == 5
3733 (unsigned) (X - 2) <= 3
3735 We describe each set of comparisons as being either inside or outside
3736 a range, using a variable named like IN_P, and then describe the
3737 range with a lower and upper bound. If one of the bounds is omitted,
3738 it represents either the highest or lowest value of the type.
3740 In the comments below, we represent a range by two numbers in brackets
3741 preceded by a "+" to designate being inside that range, or a "-" to
3742 designate being outside that range, so the condition can be inverted by
3743 flipping the prefix. An omitted bound is represented by a "-". For
3744 example, "- [-, 10]" means being outside the range starting at the lowest
3745 possible value and ending at 10, in other words, being greater than 10.
3746 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3749 We set up things so that the missing bounds are handled in a consistent
3750 manner so neither a missing bound nor "true" and "false" need to be
3751 handled using a special case. */
3753 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3754 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3755 and UPPER1_P are nonzero if the respective argument is an upper bound
3756 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3757 must be specified for a comparison. ARG1 will be converted to ARG0's
3758 type if both are specified. */
3761 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3762 tree arg1
, int upper1_p
)
3768 /* If neither arg represents infinity, do the normal operation.
3769 Else, if not a comparison, return infinity. Else handle the special
3770 comparison rules. Note that most of the cases below won't occur, but
3771 are handled for consistency. */
3773 if (arg0
!= 0 && arg1
!= 0)
3775 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3776 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3778 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3781 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3784 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3785 for neither. In real maths, we cannot assume open ended ranges are
3786 the same. But, this is computer arithmetic, where numbers are finite.
3787 We can therefore make the transformation of any unbounded range with
3788 the value Z, Z being greater than any representable number. This permits
3789 us to treat unbounded ranges as equal. */
3790 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3791 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3795 result
= sgn0
== sgn1
;
3798 result
= sgn0
!= sgn1
;
3801 result
= sgn0
< sgn1
;
3804 result
= sgn0
<= sgn1
;
3807 result
= sgn0
> sgn1
;
3810 result
= sgn0
>= sgn1
;
3816 return constant_boolean_node (result
, type
);
3819 /* Helper routine for make_range. Perform one step for it, return
3820 new expression if the loop should continue or NULL_TREE if it should
3824 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3825 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3826 bool *strict_overflow_p
)
3828 tree arg0_type
= TREE_TYPE (arg0
);
3829 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3830 int in_p
= *p_in_p
, n_in_p
;
3834 case TRUTH_NOT_EXPR
:
3838 case EQ_EXPR
: case NE_EXPR
:
3839 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3840 /* We can only do something if the range is testing for zero
3841 and if the second operand is an integer constant. Note that
3842 saying something is "in" the range we make is done by
3843 complementing IN_P since it will set in the initial case of
3844 being not equal to zero; "out" is leaving it alone. */
3845 if (low
== NULL_TREE
|| high
== NULL_TREE
3846 || ! integer_zerop (low
) || ! integer_zerop (high
)
3847 || TREE_CODE (arg1
) != INTEGER_CST
)
3852 case NE_EXPR
: /* - [c, c] */
3855 case EQ_EXPR
: /* + [c, c] */
3856 in_p
= ! in_p
, low
= high
= arg1
;
3858 case GT_EXPR
: /* - [-, c] */
3859 low
= 0, high
= arg1
;
3861 case GE_EXPR
: /* + [c, -] */
3862 in_p
= ! in_p
, low
= arg1
, high
= 0;
3864 case LT_EXPR
: /* - [c, -] */
3865 low
= arg1
, high
= 0;
3867 case LE_EXPR
: /* + [-, c] */
3868 in_p
= ! in_p
, low
= 0, high
= arg1
;
3874 /* If this is an unsigned comparison, we also know that EXP is
3875 greater than or equal to zero. We base the range tests we make
3876 on that fact, so we record it here so we can parse existing
3877 range tests. We test arg0_type since often the return type
3878 of, e.g. EQ_EXPR, is boolean. */
3879 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3881 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3883 build_int_cst (arg0_type
, 0),
3887 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3889 /* If the high bound is missing, but we have a nonzero low
3890 bound, reverse the range so it goes from zero to the low bound
3892 if (high
== 0 && low
&& ! integer_zerop (low
))
3895 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3896 integer_one_node
, 0);
3897 low
= build_int_cst (arg0_type
, 0);
3907 /* (-x) IN [a,b] -> x in [-b, -a] */
3908 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3909 build_int_cst (exp_type
, 0),
3911 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3912 build_int_cst (exp_type
, 0),
3914 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
3920 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3921 build_int_cst (exp_type
, 1));
3925 if (TREE_CODE (arg1
) != INTEGER_CST
)
3928 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3929 move a constant to the other side. */
3930 if (!TYPE_UNSIGNED (arg0_type
)
3931 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3934 /* If EXP is signed, any overflow in the computation is undefined,
3935 so we don't worry about it so long as our computations on
3936 the bounds don't overflow. For unsigned, overflow is defined
3937 and this is exactly the right thing. */
3938 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3939 arg0_type
, low
, 0, arg1
, 0);
3940 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3941 arg0_type
, high
, 1, arg1
, 0);
3942 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3943 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3946 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3947 *strict_overflow_p
= true;
3950 /* Check for an unsigned range which has wrapped around the maximum
3951 value thus making n_high < n_low, and normalize it. */
3952 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3954 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3955 integer_one_node
, 0);
3956 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3957 integer_one_node
, 0);
3959 /* If the range is of the form +/- [ x+1, x ], we won't
3960 be able to normalize it. But then, it represents the
3961 whole range or the empty set, so make it
3963 if (tree_int_cst_equal (n_low
, low
)
3964 && tree_int_cst_equal (n_high
, high
))
3970 low
= n_low
, high
= n_high
;
3978 case NON_LVALUE_EXPR
:
3979 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3982 if (! INTEGRAL_TYPE_P (arg0_type
)
3983 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3984 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3987 n_low
= low
, n_high
= high
;
3990 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
3993 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
3995 /* If we're converting arg0 from an unsigned type, to exp,
3996 a signed type, we will be doing the comparison as unsigned.
3997 The tests above have already verified that LOW and HIGH
4000 So we have to ensure that we will handle large unsigned
4001 values the same way that the current signed bounds treat
4004 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4008 /* For fixed-point modes, we need to pass the saturating flag
4009 as the 2nd parameter. */
4010 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4012 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4013 TYPE_SATURATING (arg0_type
));
4016 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4018 /* A range without an upper bound is, naturally, unbounded.
4019 Since convert would have cropped a very large value, use
4020 the max value for the destination type. */
4022 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4023 : TYPE_MAX_VALUE (arg0_type
);
4025 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4026 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4027 fold_convert_loc (loc
, arg0_type
,
4029 build_int_cst (arg0_type
, 1));
4031 /* If the low bound is specified, "and" the range with the
4032 range for which the original unsigned value will be
4036 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4037 1, fold_convert_loc (loc
, arg0_type
,
4042 in_p
= (n_in_p
== in_p
);
4046 /* Otherwise, "or" the range with the range of the input
4047 that will be interpreted as negative. */
4048 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4049 1, fold_convert_loc (loc
, arg0_type
,
4054 in_p
= (in_p
!= n_in_p
);
4068 /* Given EXP, a logical expression, set the range it is testing into
4069 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4070 actually being tested. *PLOW and *PHIGH will be made of the same
4071 type as the returned expression. If EXP is not a comparison, we
4072 will most likely not be returning a useful value and range. Set
4073 *STRICT_OVERFLOW_P to true if the return value is only valid
4074 because signed overflow is undefined; otherwise, do not change
4075 *STRICT_OVERFLOW_P. */
4078 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4079 bool *strict_overflow_p
)
4081 enum tree_code code
;
4082 tree arg0
, arg1
= NULL_TREE
;
4083 tree exp_type
, nexp
;
4086 location_t loc
= EXPR_LOCATION (exp
);
4088 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4089 and see if we can refine the range. Some of the cases below may not
4090 happen, but it doesn't seem worth worrying about this. We "continue"
4091 the outer loop when we've changed something; otherwise we "break"
4092 the switch, which will "break" the while. */
4095 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4099 code
= TREE_CODE (exp
);
4100 exp_type
= TREE_TYPE (exp
);
4103 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4105 if (TREE_OPERAND_LENGTH (exp
) > 0)
4106 arg0
= TREE_OPERAND (exp
, 0);
4107 if (TREE_CODE_CLASS (code
) == tcc_binary
4108 || TREE_CODE_CLASS (code
) == tcc_comparison
4109 || (TREE_CODE_CLASS (code
) == tcc_expression
4110 && TREE_OPERAND_LENGTH (exp
) > 1))
4111 arg1
= TREE_OPERAND (exp
, 1);
4113 if (arg0
== NULL_TREE
)
4116 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4117 &high
, &in_p
, strict_overflow_p
);
4118 if (nexp
== NULL_TREE
)
4123 /* If EXP is a constant, we can evaluate whether this is true or false. */
4124 if (TREE_CODE (exp
) == INTEGER_CST
)
4126 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4128 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4134 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4138 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4139 type, TYPE, return an expression to test if EXP is in (or out of, depending
4140 on IN_P) the range. Return 0 if the test couldn't be created. */
4143 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4144 tree low
, tree high
)
4146 tree etype
= TREE_TYPE (exp
), value
;
4148 #ifdef HAVE_canonicalize_funcptr_for_compare
4149 /* Disable this optimization for function pointer expressions
4150 on targets that require function pointer canonicalization. */
4151 if (HAVE_canonicalize_funcptr_for_compare
4152 && TREE_CODE (etype
) == POINTER_TYPE
4153 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4159 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4161 return invert_truthvalue_loc (loc
, value
);
4166 if (low
== 0 && high
== 0)
4167 return build_int_cst (type
, 1);
4170 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4171 fold_convert_loc (loc
, etype
, high
));
4174 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4175 fold_convert_loc (loc
, etype
, low
));
4177 if (operand_equal_p (low
, high
, 0))
4178 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4179 fold_convert_loc (loc
, etype
, low
));
4181 if (integer_zerop (low
))
4183 if (! TYPE_UNSIGNED (etype
))
4185 etype
= unsigned_type_for (etype
);
4186 high
= fold_convert_loc (loc
, etype
, high
);
4187 exp
= fold_convert_loc (loc
, etype
, exp
);
4189 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4192 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4193 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4195 unsigned HOST_WIDE_INT lo
;
4199 prec
= TYPE_PRECISION (etype
);
4200 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4203 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4207 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4208 lo
= (unsigned HOST_WIDE_INT
) -1;
4211 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4213 if (TYPE_UNSIGNED (etype
))
4215 tree signed_etype
= signed_type_for (etype
);
4216 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4218 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4220 etype
= signed_etype
;
4221 exp
= fold_convert_loc (loc
, etype
, exp
);
4223 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4224 build_int_cst (etype
, 0));
4228 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4229 This requires wrap-around arithmetics for the type of the expression.
4230 First make sure that arithmetics in this type is valid, then make sure
4231 that it wraps around. */
4232 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4233 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4234 TYPE_UNSIGNED (etype
));
4236 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4238 tree utype
, minv
, maxv
;
4240 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4241 for the type in question, as we rely on this here. */
4242 utype
= unsigned_type_for (etype
);
4243 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4244 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4245 integer_one_node
, 1);
4246 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4248 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4255 high
= fold_convert_loc (loc
, etype
, high
);
4256 low
= fold_convert_loc (loc
, etype
, low
);
4257 exp
= fold_convert_loc (loc
, etype
, exp
);
4259 value
= const_binop (MINUS_EXPR
, high
, low
);
4262 if (POINTER_TYPE_P (etype
))
4264 if (value
!= 0 && !TREE_OVERFLOW (value
))
4266 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4267 return build_range_check (loc
, type
,
4268 fold_build_pointer_plus_loc (loc
, exp
, low
),
4269 1, build_int_cst (etype
, 0), value
);
4274 if (value
!= 0 && !TREE_OVERFLOW (value
))
4275 return build_range_check (loc
, type
,
4276 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4277 1, build_int_cst (etype
, 0), value
);
4282 /* Return the predecessor of VAL in its type, handling the infinite case. */
4285 range_predecessor (tree val
)
4287 tree type
= TREE_TYPE (val
);
4289 if (INTEGRAL_TYPE_P (type
)
4290 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4293 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4296 /* Return the successor of VAL in its type, handling the infinite case. */
4299 range_successor (tree val
)
4301 tree type
= TREE_TYPE (val
);
4303 if (INTEGRAL_TYPE_P (type
)
4304 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4307 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4310 /* Given two ranges, see if we can merge them into one. Return 1 if we
4311 can, 0 if we can't. Set the output range into the specified parameters. */
4314 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4315 tree high0
, int in1_p
, tree low1
, tree high1
)
4323 int lowequal
= ((low0
== 0 && low1
== 0)
4324 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4325 low0
, 0, low1
, 0)));
4326 int highequal
= ((high0
== 0 && high1
== 0)
4327 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4328 high0
, 1, high1
, 1)));
4330 /* Make range 0 be the range that starts first, or ends last if they
4331 start at the same value. Swap them if it isn't. */
4332 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4335 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4336 high1
, 1, high0
, 1))))
4338 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4339 tem
= low0
, low0
= low1
, low1
= tem
;
4340 tem
= high0
, high0
= high1
, high1
= tem
;
4343 /* Now flag two cases, whether the ranges are disjoint or whether the
4344 second range is totally subsumed in the first. Note that the tests
4345 below are simplified by the ones above. */
4346 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4347 high0
, 1, low1
, 0));
4348 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4349 high1
, 1, high0
, 1));
4351 /* We now have four cases, depending on whether we are including or
4352 excluding the two ranges. */
4355 /* If they don't overlap, the result is false. If the second range
4356 is a subset it is the result. Otherwise, the range is from the start
4357 of the second to the end of the first. */
4359 in_p
= 0, low
= high
= 0;
4361 in_p
= 1, low
= low1
, high
= high1
;
4363 in_p
= 1, low
= low1
, high
= high0
;
4366 else if (in0_p
&& ! in1_p
)
4368 /* If they don't overlap, the result is the first range. If they are
4369 equal, the result is false. If the second range is a subset of the
4370 first, and the ranges begin at the same place, we go from just after
4371 the end of the second range to the end of the first. If the second
4372 range is not a subset of the first, or if it is a subset and both
4373 ranges end at the same place, the range starts at the start of the
4374 first range and ends just before the second range.
4375 Otherwise, we can't describe this as a single range. */
4377 in_p
= 1, low
= low0
, high
= high0
;
4378 else if (lowequal
&& highequal
)
4379 in_p
= 0, low
= high
= 0;
4380 else if (subset
&& lowequal
)
4382 low
= range_successor (high1
);
4387 /* We are in the weird situation where high0 > high1 but
4388 high1 has no successor. Punt. */
4392 else if (! subset
|| highequal
)
4395 high
= range_predecessor (low1
);
4399 /* low0 < low1 but low1 has no predecessor. Punt. */
4407 else if (! in0_p
&& in1_p
)
4409 /* If they don't overlap, the result is the second range. If the second
4410 is a subset of the first, the result is false. Otherwise,
4411 the range starts just after the first range and ends at the
4412 end of the second. */
4414 in_p
= 1, low
= low1
, high
= high1
;
4415 else if (subset
|| highequal
)
4416 in_p
= 0, low
= high
= 0;
4419 low
= range_successor (high0
);
4424 /* high1 > high0 but high0 has no successor. Punt. */
4432 /* The case where we are excluding both ranges. Here the complex case
4433 is if they don't overlap. In that case, the only time we have a
4434 range is if they are adjacent. If the second is a subset of the
4435 first, the result is the first. Otherwise, the range to exclude
4436 starts at the beginning of the first range and ends at the end of the
4440 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4441 range_successor (high0
),
4443 in_p
= 0, low
= low0
, high
= high1
;
4446 /* Canonicalize - [min, x] into - [-, x]. */
4447 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4448 switch (TREE_CODE (TREE_TYPE (low0
)))
4451 if (TYPE_PRECISION (TREE_TYPE (low0
))
4452 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4456 if (tree_int_cst_equal (low0
,
4457 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4461 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4462 && integer_zerop (low0
))
4469 /* Canonicalize - [x, max] into - [x, -]. */
4470 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4471 switch (TREE_CODE (TREE_TYPE (high1
)))
4474 if (TYPE_PRECISION (TREE_TYPE (high1
))
4475 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4479 if (tree_int_cst_equal (high1
,
4480 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4484 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4485 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4487 integer_one_node
, 1)))
4494 /* The ranges might be also adjacent between the maximum and
4495 minimum values of the given type. For
4496 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4497 return + [x + 1, y - 1]. */
4498 if (low0
== 0 && high1
== 0)
4500 low
= range_successor (high0
);
4501 high
= range_predecessor (low1
);
4502 if (low
== 0 || high
== 0)
4512 in_p
= 0, low
= low0
, high
= high0
;
4514 in_p
= 0, low
= low0
, high
= high1
;
4517 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4522 /* Subroutine of fold, looking inside expressions of the form
4523 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4524 of the COND_EXPR. This function is being used also to optimize
4525 A op B ? C : A, by reversing the comparison first.
4527 Return a folded expression whose code is not a COND_EXPR
4528 anymore, or NULL_TREE if no folding opportunity is found. */
4531 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4532 tree arg0
, tree arg1
, tree arg2
)
4534 enum tree_code comp_code
= TREE_CODE (arg0
);
4535 tree arg00
= TREE_OPERAND (arg0
, 0);
4536 tree arg01
= TREE_OPERAND (arg0
, 1);
4537 tree arg1_type
= TREE_TYPE (arg1
);
4543 /* If we have A op 0 ? A : -A, consider applying the following
4546 A == 0? A : -A same as -A
4547 A != 0? A : -A same as A
4548 A >= 0? A : -A same as abs (A)
4549 A > 0? A : -A same as abs (A)
4550 A <= 0? A : -A same as -abs (A)
4551 A < 0? A : -A same as -abs (A)
4553 None of these transformations work for modes with signed
4554 zeros. If A is +/-0, the first two transformations will
4555 change the sign of the result (from +0 to -0, or vice
4556 versa). The last four will fix the sign of the result,
4557 even though the original expressions could be positive or
4558 negative, depending on the sign of A.
4560 Note that all these transformations are correct if A is
4561 NaN, since the two alternatives (A and -A) are also NaNs. */
4562 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4563 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4564 ? real_zerop (arg01
)
4565 : integer_zerop (arg01
))
4566 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4567 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4568 /* In the case that A is of the form X-Y, '-A' (arg2) may
4569 have already been folded to Y-X, check for that. */
4570 || (TREE_CODE (arg1
) == MINUS_EXPR
4571 && TREE_CODE (arg2
) == MINUS_EXPR
4572 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4573 TREE_OPERAND (arg2
, 1), 0)
4574 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4575 TREE_OPERAND (arg2
, 0), 0))))
4580 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4581 return pedantic_non_lvalue_loc (loc
,
4582 fold_convert_loc (loc
, type
,
4583 negate_expr (tem
)));
4586 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4589 if (flag_trapping_math
)
4594 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4595 arg1
= fold_convert_loc (loc
, signed_type_for
4596 (TREE_TYPE (arg1
)), arg1
);
4597 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4598 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4601 if (flag_trapping_math
)
4605 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4606 arg1
= fold_convert_loc (loc
, signed_type_for
4607 (TREE_TYPE (arg1
)), arg1
);
4608 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4609 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4611 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4615 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4616 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4617 both transformations are correct when A is NaN: A != 0
4618 is then true, and A == 0 is false. */
4620 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4621 && integer_zerop (arg01
) && integer_zerop (arg2
))
4623 if (comp_code
== NE_EXPR
)
4624 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4625 else if (comp_code
== EQ_EXPR
)
4626 return build_int_cst (type
, 0);
4629 /* Try some transformations of A op B ? A : B.
4631 A == B? A : B same as B
4632 A != B? A : B same as A
4633 A >= B? A : B same as max (A, B)
4634 A > B? A : B same as max (B, A)
4635 A <= B? A : B same as min (A, B)
4636 A < B? A : B same as min (B, A)
4638 As above, these transformations don't work in the presence
4639 of signed zeros. For example, if A and B are zeros of
4640 opposite sign, the first two transformations will change
4641 the sign of the result. In the last four, the original
4642 expressions give different results for (A=+0, B=-0) and
4643 (A=-0, B=+0), but the transformed expressions do not.
4645 The first two transformations are correct if either A or B
4646 is a NaN. In the first transformation, the condition will
4647 be false, and B will indeed be chosen. In the case of the
4648 second transformation, the condition A != B will be true,
4649 and A will be chosen.
4651 The conversions to max() and min() are not correct if B is
4652 a number and A is not. The conditions in the original
4653 expressions will be false, so all four give B. The min()
4654 and max() versions would give a NaN instead. */
4655 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4656 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4657 /* Avoid these transformations if the COND_EXPR may be used
4658 as an lvalue in the C++ front-end. PR c++/19199. */
4660 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4661 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4662 || ! maybe_lvalue_p (arg1
)
4663 || ! maybe_lvalue_p (arg2
)))
4665 tree comp_op0
= arg00
;
4666 tree comp_op1
= arg01
;
4667 tree comp_type
= TREE_TYPE (comp_op0
);
4669 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4670 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4680 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4682 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4687 /* In C++ a ?: expression can be an lvalue, so put the
4688 operand which will be used if they are equal first
4689 so that we can convert this back to the
4690 corresponding COND_EXPR. */
4691 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4693 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4694 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4695 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4696 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4697 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4698 comp_op1
, comp_op0
);
4699 return pedantic_non_lvalue_loc (loc
,
4700 fold_convert_loc (loc
, type
, tem
));
4707 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4709 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4710 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4711 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4712 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4713 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4714 comp_op1
, comp_op0
);
4715 return pedantic_non_lvalue_loc (loc
,
4716 fold_convert_loc (loc
, type
, tem
));
4720 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4721 return pedantic_non_lvalue_loc (loc
,
4722 fold_convert_loc (loc
, type
, arg2
));
4725 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4726 return pedantic_non_lvalue_loc (loc
,
4727 fold_convert_loc (loc
, type
, arg1
));
4730 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4735 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4736 we might still be able to simplify this. For example,
4737 if C1 is one less or one more than C2, this might have started
4738 out as a MIN or MAX and been transformed by this function.
4739 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4741 if (INTEGRAL_TYPE_P (type
)
4742 && TREE_CODE (arg01
) == INTEGER_CST
4743 && TREE_CODE (arg2
) == INTEGER_CST
)
4747 if (TREE_CODE (arg1
) == INTEGER_CST
)
4749 /* We can replace A with C1 in this case. */
4750 arg1
= fold_convert_loc (loc
, type
, arg01
);
4751 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4754 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4755 MIN_EXPR, to preserve the signedness of the comparison. */
4756 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4758 && operand_equal_p (arg01
,
4759 const_binop (PLUS_EXPR
, arg2
,
4760 build_int_cst (type
, 1)),
4763 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4764 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4766 return pedantic_non_lvalue_loc (loc
,
4767 fold_convert_loc (loc
, type
, tem
));
4772 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4774 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4776 && operand_equal_p (arg01
,
4777 const_binop (MINUS_EXPR
, arg2
,
4778 build_int_cst (type
, 1)),
4781 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4782 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4784 return pedantic_non_lvalue_loc (loc
,
4785 fold_convert_loc (loc
, type
, tem
));
4790 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4791 MAX_EXPR, to preserve the signedness of the comparison. */
4792 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4794 && operand_equal_p (arg01
,
4795 const_binop (MINUS_EXPR
, arg2
,
4796 build_int_cst (type
, 1)),
4799 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4800 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4802 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4807 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4808 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4810 && operand_equal_p (arg01
,
4811 const_binop (PLUS_EXPR
, arg2
,
4812 build_int_cst (type
, 1)),
4815 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4816 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4818 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4832 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4833 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4834 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4838 /* EXP is some logical combination of boolean tests. See if we can
4839 merge it into some range test. Return the new tree if so. */
4842 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4845 int or_op
= (code
== TRUTH_ORIF_EXPR
4846 || code
== TRUTH_OR_EXPR
);
4847 int in0_p
, in1_p
, in_p
;
4848 tree low0
, low1
, low
, high0
, high1
, high
;
4849 bool strict_overflow_p
= false;
4850 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4851 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4853 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4854 "when simplifying range test");
4856 /* If this is an OR operation, invert both sides; we will invert
4857 again at the end. */
4859 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4861 /* If both expressions are the same, if we can merge the ranges, and we
4862 can build the range test, return it or it inverted. If one of the
4863 ranges is always true or always false, consider it to be the same
4864 expression as the other. */
4865 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4866 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4868 && 0 != (tem
= (build_range_check (loc
, type
,
4870 : rhs
!= 0 ? rhs
: integer_zero_node
,
4873 if (strict_overflow_p
)
4874 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4875 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4878 /* On machines where the branch cost is expensive, if this is a
4879 short-circuited branch and the underlying object on both sides
4880 is the same, make a non-short-circuit operation. */
4881 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4882 && lhs
!= 0 && rhs
!= 0
4883 && (code
== TRUTH_ANDIF_EXPR
4884 || code
== TRUTH_ORIF_EXPR
)
4885 && operand_equal_p (lhs
, rhs
, 0))
4887 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4888 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4889 which cases we can't do this. */
4890 if (simple_operand_p (lhs
))
4891 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4892 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4895 else if (!lang_hooks
.decls
.global_bindings_p ()
4896 && !CONTAINS_PLACEHOLDER_P (lhs
))
4898 tree common
= save_expr (lhs
);
4900 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4901 or_op
? ! in0_p
: in0_p
,
4903 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4904 or_op
? ! in1_p
: in1_p
,
4907 if (strict_overflow_p
)
4908 fold_overflow_warning (warnmsg
,
4909 WARN_STRICT_OVERFLOW_COMPARISON
);
4910 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4911 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4920 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4921 bit value. Arrange things so the extra bits will be set to zero if and
4922 only if C is signed-extended to its full width. If MASK is nonzero,
4923 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4926 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4928 tree type
= TREE_TYPE (c
);
4929 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4932 if (p
== modesize
|| unsignedp
)
4935 /* We work by getting just the sign bit into the low-order bit, then
4936 into the high-order bit, then sign-extend. We then XOR that value
4938 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1));
4939 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1));
4941 /* We must use a signed type in order to get an arithmetic right shift.
4942 However, we must also avoid introducing accidental overflows, so that
4943 a subsequent call to integer_zerop will work. Hence we must
4944 do the type conversion here. At this point, the constant is either
4945 zero or one, and the conversion to a signed type can never overflow.
4946 We could get an overflow if this conversion is done anywhere else. */
4947 if (TYPE_UNSIGNED (type
))
4948 temp
= fold_convert (signed_type_for (type
), temp
);
4950 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
4951 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
4953 temp
= const_binop (BIT_AND_EXPR
, temp
,
4954 fold_convert (TREE_TYPE (c
), mask
));
4955 /* If necessary, convert the type back to match the type of C. */
4956 if (TYPE_UNSIGNED (type
))
4957 temp
= fold_convert (type
, temp
);
4959 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
4962 /* For an expression that has the form
4966 we can drop one of the inner expressions and simplify to
4970 LOC is the location of the resulting expression. OP is the inner
4971 logical operation; the left-hand side in the examples above, while CMPOP
4972 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4973 removing a condition that guards another, as in
4974 (A != NULL && A->...) || A == NULL
4975 which we must not transform. If RHS_ONLY is true, only eliminate the
4976 right-most operand of the inner logical operation. */
4979 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
4982 tree type
= TREE_TYPE (cmpop
);
4983 enum tree_code code
= TREE_CODE (cmpop
);
4984 enum tree_code truthop_code
= TREE_CODE (op
);
4985 tree lhs
= TREE_OPERAND (op
, 0);
4986 tree rhs
= TREE_OPERAND (op
, 1);
4987 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
4988 enum tree_code rhs_code
= TREE_CODE (rhs
);
4989 enum tree_code lhs_code
= TREE_CODE (lhs
);
4990 enum tree_code inv_code
;
4992 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
4995 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4998 if (rhs_code
== truthop_code
)
5000 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5001 if (newrhs
!= NULL_TREE
)
5004 rhs_code
= TREE_CODE (rhs
);
5007 if (lhs_code
== truthop_code
&& !rhs_only
)
5009 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5010 if (newlhs
!= NULL_TREE
)
5013 lhs_code
= TREE_CODE (lhs
);
5017 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5018 if (inv_code
== rhs_code
5019 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5020 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5022 if (!rhs_only
&& inv_code
== lhs_code
5023 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5024 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5026 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5027 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5032 /* Find ways of folding logical expressions of LHS and RHS:
5033 Try to merge two comparisons to the same innermost item.
5034 Look for range tests like "ch >= '0' && ch <= '9'".
5035 Look for combinations of simple terms on machines with expensive branches
5036 and evaluate the RHS unconditionally.
5038 For example, if we have p->a == 2 && p->b == 4 and we can make an
5039 object large enough to span both A and B, we can do this with a comparison
5040 against the object ANDed with the a mask.
5042 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5043 operations to do this with one comparison.
5045 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5046 function and the one above.
5048 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5049 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5051 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5054 We return the simplified tree or 0 if no optimization is possible. */
5057 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5060 /* If this is the "or" of two comparisons, we can do something if
5061 the comparisons are NE_EXPR. If this is the "and", we can do something
5062 if the comparisons are EQ_EXPR. I.e.,
5063 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5065 WANTED_CODE is this operation code. For single bit fields, we can
5066 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5067 comparison for one-bit fields. */
5069 enum tree_code wanted_code
;
5070 enum tree_code lcode
, rcode
;
5071 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5072 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5073 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5074 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5075 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5076 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5077 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5078 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5079 enum machine_mode lnmode
, rnmode
;
5080 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5081 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5082 tree l_const
, r_const
;
5083 tree lntype
, rntype
, result
;
5084 HOST_WIDE_INT first_bit
, end_bit
;
5087 /* Start by getting the comparison codes. Fail if anything is volatile.
5088 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5089 it were surrounded with a NE_EXPR. */
5091 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5094 lcode
= TREE_CODE (lhs
);
5095 rcode
= TREE_CODE (rhs
);
5097 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5099 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5100 build_int_cst (TREE_TYPE (lhs
), 0));
5104 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5106 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5107 build_int_cst (TREE_TYPE (rhs
), 0));
5111 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5112 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5115 ll_arg
= TREE_OPERAND (lhs
, 0);
5116 lr_arg
= TREE_OPERAND (lhs
, 1);
5117 rl_arg
= TREE_OPERAND (rhs
, 0);
5118 rr_arg
= TREE_OPERAND (rhs
, 1);
5120 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5121 if (simple_operand_p (ll_arg
)
5122 && simple_operand_p (lr_arg
))
5124 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5125 && operand_equal_p (lr_arg
, rr_arg
, 0))
5127 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5128 truth_type
, ll_arg
, lr_arg
);
5132 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5133 && operand_equal_p (lr_arg
, rl_arg
, 0))
5135 result
= combine_comparisons (loc
, code
, lcode
,
5136 swap_tree_comparison (rcode
),
5137 truth_type
, ll_arg
, lr_arg
);
5143 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5144 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5146 /* If the RHS can be evaluated unconditionally and its operands are
5147 simple, it wins to evaluate the RHS unconditionally on machines
5148 with expensive branches. In this case, this isn't a comparison
5149 that can be merged. */
5151 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5153 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5154 && simple_operand_p (rl_arg
)
5155 && simple_operand_p (rr_arg
))
5157 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5158 if (code
== TRUTH_OR_EXPR
5159 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5160 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5161 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5162 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5163 return build2_loc (loc
, NE_EXPR
, truth_type
,
5164 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5166 build_int_cst (TREE_TYPE (ll_arg
), 0));
5168 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5169 if (code
== TRUTH_AND_EXPR
5170 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5171 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5172 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5173 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5174 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5175 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5177 build_int_cst (TREE_TYPE (ll_arg
), 0));
5180 /* See if the comparisons can be merged. Then get all the parameters for
5183 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5184 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5188 ll_inner
= decode_field_reference (loc
, ll_arg
,
5189 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5190 &ll_unsignedp
, &volatilep
, &ll_mask
,
5192 lr_inner
= decode_field_reference (loc
, lr_arg
,
5193 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5194 &lr_unsignedp
, &volatilep
, &lr_mask
,
5196 rl_inner
= decode_field_reference (loc
, rl_arg
,
5197 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5198 &rl_unsignedp
, &volatilep
, &rl_mask
,
5200 rr_inner
= decode_field_reference (loc
, rr_arg
,
5201 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5202 &rr_unsignedp
, &volatilep
, &rr_mask
,
5205 /* It must be true that the inner operation on the lhs of each
5206 comparison must be the same if we are to be able to do anything.
5207 Then see if we have constants. If not, the same must be true for
5209 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5210 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5213 if (TREE_CODE (lr_arg
) == INTEGER_CST
5214 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5215 l_const
= lr_arg
, r_const
= rr_arg
;
5216 else if (lr_inner
== 0 || rr_inner
== 0
5217 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5220 l_const
= r_const
= 0;
5222 /* If either comparison code is not correct for our logical operation,
5223 fail. However, we can convert a one-bit comparison against zero into
5224 the opposite comparison against that bit being set in the field. */
5226 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5227 if (lcode
!= wanted_code
)
5229 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5231 /* Make the left operand unsigned, since we are only interested
5232 in the value of one bit. Otherwise we are doing the wrong
5241 /* This is analogous to the code for l_const above. */
5242 if (rcode
!= wanted_code
)
5244 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5253 /* See if we can find a mode that contains both fields being compared on
5254 the left. If we can't, fail. Otherwise, update all constants and masks
5255 to be relative to a field of that size. */
5256 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5257 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5258 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5259 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5261 if (lnmode
== VOIDmode
)
5264 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5265 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5266 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5267 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5269 if (BYTES_BIG_ENDIAN
)
5271 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5272 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5275 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5276 size_int (xll_bitpos
));
5277 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5278 size_int (xrl_bitpos
));
5282 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5283 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5284 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5285 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5286 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5289 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5291 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5296 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5297 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5298 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5299 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5300 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5303 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5305 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5309 /* If the right sides are not constant, do the same for it. Also,
5310 disallow this optimization if a size or signedness mismatch occurs
5311 between the left and right sides. */
5314 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5315 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5316 /* Make sure the two fields on the right
5317 correspond to the left without being swapped. */
5318 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5321 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5322 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5323 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5324 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5326 if (rnmode
== VOIDmode
)
5329 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5330 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5331 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5332 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5334 if (BYTES_BIG_ENDIAN
)
5336 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5337 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5340 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5342 size_int (xlr_bitpos
));
5343 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5345 size_int (xrr_bitpos
));
5347 /* Make a mask that corresponds to both fields being compared.
5348 Do this for both items being compared. If the operands are the
5349 same size and the bits being compared are in the same position
5350 then we can do this by masking both and comparing the masked
5352 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5353 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5354 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5356 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5357 ll_unsignedp
|| rl_unsignedp
);
5358 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5359 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5361 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5362 lr_unsignedp
|| rr_unsignedp
);
5363 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5364 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5366 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5369 /* There is still another way we can do something: If both pairs of
5370 fields being compared are adjacent, we may be able to make a wider
5371 field containing them both.
5373 Note that we still must mask the lhs/rhs expressions. Furthermore,
5374 the mask must be shifted to account for the shift done by
5375 make_bit_field_ref. */
5376 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5377 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5378 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5379 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5383 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5384 ll_bitsize
+ rl_bitsize
,
5385 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5386 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5387 lr_bitsize
+ rr_bitsize
,
5388 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5390 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5391 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5392 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5393 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5395 /* Convert to the smaller type before masking out unwanted bits. */
5397 if (lntype
!= rntype
)
5399 if (lnbitsize
> rnbitsize
)
5401 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5402 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5405 else if (lnbitsize
< rnbitsize
)
5407 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5408 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5413 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5414 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5416 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5417 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5419 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5425 /* Handle the case of comparisons with constants. If there is something in
5426 common between the masks, those bits of the constants must be the same.
5427 If not, the condition is always false. Test for this to avoid generating
5428 incorrect code below. */
5429 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5430 if (! integer_zerop (result
)
5431 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5432 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5434 if (wanted_code
== NE_EXPR
)
5436 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5437 return constant_boolean_node (true, truth_type
);
5441 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5442 return constant_boolean_node (false, truth_type
);
5446 /* Construct the expression we will return. First get the component
5447 reference we will make. Unless the mask is all ones the width of
5448 that field, perform the mask operation. Then compare with the
5450 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5451 ll_unsignedp
|| rl_unsignedp
);
5453 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5454 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5455 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5457 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5458 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5461 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5465 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5469 enum tree_code op_code
;
5472 int consts_equal
, consts_lt
;
5475 STRIP_SIGN_NOPS (arg0
);
5477 op_code
= TREE_CODE (arg0
);
5478 minmax_const
= TREE_OPERAND (arg0
, 1);
5479 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5480 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5481 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5482 inner
= TREE_OPERAND (arg0
, 0);
5484 /* If something does not permit us to optimize, return the original tree. */
5485 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5486 || TREE_CODE (comp_const
) != INTEGER_CST
5487 || TREE_OVERFLOW (comp_const
)
5488 || TREE_CODE (minmax_const
) != INTEGER_CST
5489 || TREE_OVERFLOW (minmax_const
))
5492 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5493 and GT_EXPR, doing the rest with recursive calls using logical
5497 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5500 = optimize_minmax_comparison (loc
,
5501 invert_tree_comparison (code
, false),
5504 return invert_truthvalue_loc (loc
, tem
);
5510 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5511 optimize_minmax_comparison
5512 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5513 optimize_minmax_comparison
5514 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5517 if (op_code
== MAX_EXPR
&& consts_equal
)
5518 /* MAX (X, 0) == 0 -> X <= 0 */
5519 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5521 else if (op_code
== MAX_EXPR
&& consts_lt
)
5522 /* MAX (X, 0) == 5 -> X == 5 */
5523 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5525 else if (op_code
== MAX_EXPR
)
5526 /* MAX (X, 0) == -1 -> false */
5527 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5529 else if (consts_equal
)
5530 /* MIN (X, 0) == 0 -> X >= 0 */
5531 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5534 /* MIN (X, 0) == 5 -> false */
5535 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5538 /* MIN (X, 0) == -1 -> X == -1 */
5539 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5542 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5543 /* MAX (X, 0) > 0 -> X > 0
5544 MAX (X, 0) > 5 -> X > 5 */
5545 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5547 else if (op_code
== MAX_EXPR
)
5548 /* MAX (X, 0) > -1 -> true */
5549 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5551 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5552 /* MIN (X, 0) > 0 -> false
5553 MIN (X, 0) > 5 -> false */
5554 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5557 /* MIN (X, 0) > -1 -> X > -1 */
5558 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5565 /* T is an integer expression that is being multiplied, divided, or taken a
5566 modulus (CODE says which and what kind of divide or modulus) by a
5567 constant C. See if we can eliminate that operation by folding it with
5568 other operations already in T. WIDE_TYPE, if non-null, is a type that
5569 should be used for the computation if wider than our type.
5571 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5572 (X * 2) + (Y * 4). We must, however, be assured that either the original
5573 expression would not overflow or that overflow is undefined for the type
5574 in the language in question.
5576 If we return a non-null expression, it is an equivalent form of the
5577 original computation, but need not be in the original type.
5579 We set *STRICT_OVERFLOW_P to true if the return values depends on
5580 signed overflow being undefined. Otherwise we do not change
5581 *STRICT_OVERFLOW_P. */
5584 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5585 bool *strict_overflow_p
)
5587 /* To avoid exponential search depth, refuse to allow recursion past
5588 three levels. Beyond that (1) it's highly unlikely that we'll find
5589 something interesting and (2) we've probably processed it before
5590 when we built the inner expression. */
5599 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5606 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5607 bool *strict_overflow_p
)
5609 tree type
= TREE_TYPE (t
);
5610 enum tree_code tcode
= TREE_CODE (t
);
5611 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5612 > GET_MODE_SIZE (TYPE_MODE (type
)))
5613 ? wide_type
: type
);
5615 int same_p
= tcode
== code
;
5616 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5617 bool sub_strict_overflow_p
;
5619 /* Don't deal with constants of zero here; they confuse the code below. */
5620 if (integer_zerop (c
))
5623 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5624 op0
= TREE_OPERAND (t
, 0);
5626 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5627 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5629 /* Note that we need not handle conditional operations here since fold
5630 already handles those cases. So just do arithmetic here. */
5634 /* For a constant, we can always simplify if we are a multiply
5635 or (for divide and modulus) if it is a multiple of our constant. */
5636 if (code
== MULT_EXPR
5637 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
)))
5638 return const_binop (code
, fold_convert (ctype
, t
),
5639 fold_convert (ctype
, c
));
5642 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5643 /* If op0 is an expression ... */
5644 if ((COMPARISON_CLASS_P (op0
)
5645 || UNARY_CLASS_P (op0
)
5646 || BINARY_CLASS_P (op0
)
5647 || VL_EXP_CLASS_P (op0
)
5648 || EXPRESSION_CLASS_P (op0
))
5649 /* ... and has wrapping overflow, and its type is smaller
5650 than ctype, then we cannot pass through as widening. */
5651 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5652 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5653 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5654 && (TYPE_PRECISION (ctype
)
5655 > TYPE_PRECISION (TREE_TYPE (op0
))))
5656 /* ... or this is a truncation (t is narrower than op0),
5657 then we cannot pass through this narrowing. */
5658 || (TYPE_PRECISION (type
)
5659 < TYPE_PRECISION (TREE_TYPE (op0
)))
5660 /* ... or signedness changes for division or modulus,
5661 then we cannot pass through this conversion. */
5662 || (code
!= MULT_EXPR
5663 && (TYPE_UNSIGNED (ctype
)
5664 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5665 /* ... or has undefined overflow while the converted to
5666 type has not, we cannot do the operation in the inner type
5667 as that would introduce undefined overflow. */
5668 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5669 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5672 /* Pass the constant down and see if we can make a simplification. If
5673 we can, replace this expression with the inner simplification for
5674 possible later conversion to our or some other type. */
5675 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5676 && TREE_CODE (t2
) == INTEGER_CST
5677 && !TREE_OVERFLOW (t2
)
5678 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5680 ? ctype
: NULL_TREE
,
5681 strict_overflow_p
))))
5686 /* If widening the type changes it from signed to unsigned, then we
5687 must avoid building ABS_EXPR itself as unsigned. */
5688 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5690 tree cstype
= (*signed_type_for
) (ctype
);
5691 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5694 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5695 return fold_convert (ctype
, t1
);
5699 /* If the constant is negative, we cannot simplify this. */
5700 if (tree_int_cst_sgn (c
) == -1)
5704 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5706 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5709 case MIN_EXPR
: case MAX_EXPR
:
5710 /* If widening the type changes the signedness, then we can't perform
5711 this optimization as that changes the result. */
5712 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5715 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5716 sub_strict_overflow_p
= false;
5717 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5718 &sub_strict_overflow_p
)) != 0
5719 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5720 &sub_strict_overflow_p
)) != 0)
5722 if (tree_int_cst_sgn (c
) < 0)
5723 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5724 if (sub_strict_overflow_p
)
5725 *strict_overflow_p
= true;
5726 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5727 fold_convert (ctype
, t2
));
5731 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5732 /* If the second operand is constant, this is a multiplication
5733 or floor division, by a power of two, so we can treat it that
5734 way unless the multiplier or divisor overflows. Signed
5735 left-shift overflow is implementation-defined rather than
5736 undefined in C90, so do not convert signed left shift into
5738 if (TREE_CODE (op1
) == INTEGER_CST
5739 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5740 /* const_binop may not detect overflow correctly,
5741 so check for it explicitly here. */
5742 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5743 && TREE_INT_CST_HIGH (op1
) == 0
5744 && 0 != (t1
= fold_convert (ctype
,
5745 const_binop (LSHIFT_EXPR
,
5748 && !TREE_OVERFLOW (t1
))
5749 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5750 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5752 fold_convert (ctype
, op0
),
5754 c
, code
, wide_type
, strict_overflow_p
);
5757 case PLUS_EXPR
: case MINUS_EXPR
:
5758 /* See if we can eliminate the operation on both sides. If we can, we
5759 can return a new PLUS or MINUS. If we can't, the only remaining
5760 cases where we can do anything are if the second operand is a
5762 sub_strict_overflow_p
= false;
5763 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5764 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5765 if (t1
!= 0 && t2
!= 0
5766 && (code
== MULT_EXPR
5767 /* If not multiplication, we can only do this if both operands
5768 are divisible by c. */
5769 || (multiple_of_p (ctype
, op0
, c
)
5770 && multiple_of_p (ctype
, op1
, c
))))
5772 if (sub_strict_overflow_p
)
5773 *strict_overflow_p
= true;
5774 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5775 fold_convert (ctype
, t2
));
5778 /* If this was a subtraction, negate OP1 and set it to be an addition.
5779 This simplifies the logic below. */
5780 if (tcode
== MINUS_EXPR
)
5782 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5783 /* If OP1 was not easily negatable, the constant may be OP0. */
5784 if (TREE_CODE (op0
) == INTEGER_CST
)
5795 if (TREE_CODE (op1
) != INTEGER_CST
)
5798 /* If either OP1 or C are negative, this optimization is not safe for
5799 some of the division and remainder types while for others we need
5800 to change the code. */
5801 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5803 if (code
== CEIL_DIV_EXPR
)
5804 code
= FLOOR_DIV_EXPR
;
5805 else if (code
== FLOOR_DIV_EXPR
)
5806 code
= CEIL_DIV_EXPR
;
5807 else if (code
!= MULT_EXPR
5808 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5812 /* If it's a multiply or a division/modulus operation of a multiple
5813 of our constant, do the operation and verify it doesn't overflow. */
5814 if (code
== MULT_EXPR
5815 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5817 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5818 fold_convert (ctype
, c
));
5819 /* We allow the constant to overflow with wrapping semantics. */
5821 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5827 /* If we have an unsigned type is not a sizetype, we cannot widen
5828 the operation since it will change the result if the original
5829 computation overflowed. */
5830 if (TYPE_UNSIGNED (ctype
)
5831 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5835 /* If we were able to eliminate our operation from the first side,
5836 apply our operation to the second side and reform the PLUS. */
5837 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5838 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5840 /* The last case is if we are a multiply. In that case, we can
5841 apply the distributive law to commute the multiply and addition
5842 if the multiplication of the constants doesn't overflow. */
5843 if (code
== MULT_EXPR
)
5844 return fold_build2 (tcode
, ctype
,
5845 fold_build2 (code
, ctype
,
5846 fold_convert (ctype
, op0
),
5847 fold_convert (ctype
, c
)),
5853 /* We have a special case here if we are doing something like
5854 (C * 8) % 4 since we know that's zero. */
5855 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5856 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5857 /* If the multiplication can overflow we cannot optimize this.
5858 ??? Until we can properly mark individual operations as
5859 not overflowing we need to treat sizetype special here as
5860 stor-layout relies on this opimization to make
5861 DECL_FIELD_BIT_OFFSET always a constant. */
5862 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5863 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
5864 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))))
5865 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5866 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5868 *strict_overflow_p
= true;
5869 return omit_one_operand (type
, integer_zero_node
, op0
);
5872 /* ... fall through ... */
5874 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5875 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5876 /* If we can extract our operation from the LHS, do so and return a
5877 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5878 do something only if the second operand is a constant. */
5880 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5881 strict_overflow_p
)) != 0)
5882 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5883 fold_convert (ctype
, op1
));
5884 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5885 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5886 strict_overflow_p
)) != 0)
5887 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5888 fold_convert (ctype
, t1
));
5889 else if (TREE_CODE (op1
) != INTEGER_CST
)
5892 /* If these are the same operation types, we can associate them
5893 assuming no overflow. */
5898 mul
= double_int_mul_with_sign
5900 (tree_to_double_int (op1
),
5901 TYPE_PRECISION (ctype
), TYPE_UNSIGNED (ctype
)),
5903 (tree_to_double_int (c
),
5904 TYPE_PRECISION (ctype
), TYPE_UNSIGNED (ctype
)),
5905 false, &overflow_p
);
5906 overflow_p
= (((!TYPE_UNSIGNED (ctype
)
5907 || (TREE_CODE (ctype
) == INTEGER_TYPE
5908 && TYPE_IS_SIZETYPE (ctype
)))
5910 | TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
));
5911 if (!double_int_fits_to_tree_p (ctype
, mul
)
5912 && ((TYPE_UNSIGNED (ctype
) && tcode
!= MULT_EXPR
)
5913 || !TYPE_UNSIGNED (ctype
)
5914 || (TREE_CODE (ctype
) == INTEGER_TYPE
5915 && TYPE_IS_SIZETYPE (ctype
))))
5918 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5919 double_int_to_tree (ctype
, mul
));
5922 /* If these operations "cancel" each other, we have the main
5923 optimizations of this pass, which occur when either constant is a
5924 multiple of the other, in which case we replace this with either an
5925 operation or CODE or TCODE.
5927 If we have an unsigned type that is not a sizetype, we cannot do
5928 this since it will change the result if the original computation
5930 if ((TYPE_OVERFLOW_UNDEFINED (ctype
)
5931 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5932 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5933 || (tcode
== MULT_EXPR
5934 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5935 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5936 && code
!= MULT_EXPR
)))
5938 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5940 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5941 *strict_overflow_p
= true;
5942 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5943 fold_convert (ctype
,
5944 const_binop (TRUNC_DIV_EXPR
,
5947 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
)))
5949 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5950 *strict_overflow_p
= true;
5951 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5952 fold_convert (ctype
,
5953 const_binop (TRUNC_DIV_EXPR
,
5966 /* Return a node which has the indicated constant VALUE (either 0 or
5967 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5968 and is of the indicated TYPE. */
5971 constant_boolean_node (bool value
, tree type
)
5973 if (type
== integer_type_node
)
5974 return value
? integer_one_node
: integer_zero_node
;
5975 else if (type
== boolean_type_node
)
5976 return value
? boolean_true_node
: boolean_false_node
;
5977 else if (TREE_CODE (type
) == VECTOR_TYPE
)
5978 return build_vector_from_val (type
,
5979 build_int_cst (TREE_TYPE (type
),
5982 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
5986 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5987 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5988 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5989 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5990 COND is the first argument to CODE; otherwise (as in the example
5991 given here), it is the second argument. TYPE is the type of the
5992 original expression. Return NULL_TREE if no simplification is
5996 fold_binary_op_with_conditional_arg (location_t loc
,
5997 enum tree_code code
,
5998 tree type
, tree op0
, tree op1
,
5999 tree cond
, tree arg
, int cond_first_p
)
6001 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6002 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6003 tree test
, true_value
, false_value
;
6004 tree lhs
= NULL_TREE
;
6005 tree rhs
= NULL_TREE
;
6007 if (TREE_CODE (cond
) == COND_EXPR
)
6009 test
= TREE_OPERAND (cond
, 0);
6010 true_value
= TREE_OPERAND (cond
, 1);
6011 false_value
= TREE_OPERAND (cond
, 2);
6012 /* If this operand throws an expression, then it does not make
6013 sense to try to perform a logical or arithmetic operation
6015 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6017 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6022 tree testtype
= TREE_TYPE (cond
);
6024 true_value
= constant_boolean_node (true, testtype
);
6025 false_value
= constant_boolean_node (false, testtype
);
6028 /* This transformation is only worthwhile if we don't have to wrap ARG
6029 in a SAVE_EXPR and the operation can be simplified on at least one
6030 of the branches once its pushed inside the COND_EXPR. */
6031 if (!TREE_CONSTANT (arg
)
6032 && (TREE_SIDE_EFFECTS (arg
)
6033 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6036 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6039 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6041 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6043 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6047 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6049 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6051 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6054 /* Check that we have simplified at least one of the branches. */
6055 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6058 return fold_build3_loc (loc
, COND_EXPR
, type
, test
, lhs
, rhs
);
6062 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6064 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6065 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6066 ADDEND is the same as X.
6068 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6069 and finite. The problematic cases are when X is zero, and its mode
6070 has signed zeros. In the case of rounding towards -infinity,
6071 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6072 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6075 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6077 if (!real_zerop (addend
))
6080 /* Don't allow the fold with -fsignaling-nans. */
6081 if (HONOR_SNANS (TYPE_MODE (type
)))
6084 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6085 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6088 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6089 if (TREE_CODE (addend
) == REAL_CST
6090 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6093 /* The mode has signed zeros, and we have to honor their sign.
6094 In this situation, there is only one case we can return true for.
6095 X - 0 is the same as X unless rounding towards -infinity is
6097 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6100 /* Subroutine of fold() that checks comparisons of built-in math
6101 functions against real constants.
6103 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6104 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6105 is the type of the result and ARG0 and ARG1 are the operands of the
6106 comparison. ARG1 must be a TREE_REAL_CST.
6108 The function returns the constant folded tree if a simplification
6109 can be made, and NULL_TREE otherwise. */
6112 fold_mathfn_compare (location_t loc
,
6113 enum built_in_function fcode
, enum tree_code code
,
6114 tree type
, tree arg0
, tree arg1
)
6118 if (BUILTIN_SQRT_P (fcode
))
6120 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6121 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6123 c
= TREE_REAL_CST (arg1
);
6124 if (REAL_VALUE_NEGATIVE (c
))
6126 /* sqrt(x) < y is always false, if y is negative. */
6127 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6128 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6130 /* sqrt(x) > y is always true, if y is negative and we
6131 don't care about NaNs, i.e. negative values of x. */
6132 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6133 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6135 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6136 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6137 build_real (TREE_TYPE (arg
), dconst0
));
6139 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6143 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6144 real_convert (&c2
, mode
, &c2
);
6146 if (REAL_VALUE_ISINF (c2
))
6148 /* sqrt(x) > y is x == +Inf, when y is very large. */
6149 if (HONOR_INFINITIES (mode
))
6150 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6151 build_real (TREE_TYPE (arg
), c2
));
6153 /* sqrt(x) > y is always false, when y is very large
6154 and we don't care about infinities. */
6155 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6158 /* sqrt(x) > c is the same as x > c*c. */
6159 return fold_build2_loc (loc
, code
, type
, arg
,
6160 build_real (TREE_TYPE (arg
), c2
));
6162 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6166 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6167 real_convert (&c2
, mode
, &c2
);
6169 if (REAL_VALUE_ISINF (c2
))
6171 /* sqrt(x) < y is always true, when y is a very large
6172 value and we don't care about NaNs or Infinities. */
6173 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6174 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6176 /* sqrt(x) < y is x != +Inf when y is very large and we
6177 don't care about NaNs. */
6178 if (! HONOR_NANS (mode
))
6179 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6180 build_real (TREE_TYPE (arg
), c2
));
6182 /* sqrt(x) < y is x >= 0 when y is very large and we
6183 don't care about Infinities. */
6184 if (! HONOR_INFINITIES (mode
))
6185 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6186 build_real (TREE_TYPE (arg
), dconst0
));
6188 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6189 arg
= save_expr (arg
);
6190 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6191 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6192 build_real (TREE_TYPE (arg
),
6194 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6195 build_real (TREE_TYPE (arg
),
6199 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6200 if (! HONOR_NANS (mode
))
6201 return fold_build2_loc (loc
, code
, type
, arg
,
6202 build_real (TREE_TYPE (arg
), c2
));
6204 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6205 arg
= save_expr (arg
);
6206 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6207 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6208 build_real (TREE_TYPE (arg
),
6210 fold_build2_loc (loc
, code
, type
, arg
,
6211 build_real (TREE_TYPE (arg
),
6219 /* Subroutine of fold() that optimizes comparisons against Infinities,
6220 either +Inf or -Inf.
6222 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6223 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6224 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6226 The function returns the constant folded tree if a simplification
6227 can be made, and NULL_TREE otherwise. */
6230 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6231 tree arg0
, tree arg1
)
6233 enum machine_mode mode
;
6234 REAL_VALUE_TYPE max
;
6238 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6240 /* For negative infinity swap the sense of the comparison. */
6241 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6243 code
= swap_tree_comparison (code
);
6248 /* x > +Inf is always false, if with ignore sNANs. */
6249 if (HONOR_SNANS (mode
))
6251 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6254 /* x <= +Inf is always true, if we don't case about NaNs. */
6255 if (! HONOR_NANS (mode
))
6256 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6258 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6259 arg0
= save_expr (arg0
);
6260 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6264 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6265 real_maxval (&max
, neg
, mode
);
6266 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6267 arg0
, build_real (TREE_TYPE (arg0
), max
));
6270 /* x < +Inf is always equal to x <= DBL_MAX. */
6271 real_maxval (&max
, neg
, mode
);
6272 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6273 arg0
, build_real (TREE_TYPE (arg0
), max
));
6276 /* x != +Inf is always equal to !(x > DBL_MAX). */
6277 real_maxval (&max
, neg
, mode
);
6278 if (! HONOR_NANS (mode
))
6279 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6280 arg0
, build_real (TREE_TYPE (arg0
), max
));
6282 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6283 arg0
, build_real (TREE_TYPE (arg0
), max
));
6284 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6293 /* Subroutine of fold() that optimizes comparisons of a division by
6294 a nonzero integer constant against an integer constant, i.e.
6297 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6298 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6299 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6301 The function returns the constant folded tree if a simplification
6302 can be made, and NULL_TREE otherwise. */
6305 fold_div_compare (location_t loc
,
6306 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6308 tree prod
, tmp
, hi
, lo
;
6309 tree arg00
= TREE_OPERAND (arg0
, 0);
6310 tree arg01
= TREE_OPERAND (arg0
, 1);
6312 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6316 /* We have to do this the hard way to detect unsigned overflow.
6317 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6318 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
6319 TREE_INT_CST_HIGH (arg01
),
6320 TREE_INT_CST_LOW (arg1
),
6321 TREE_INT_CST_HIGH (arg1
),
6322 &val
.low
, &val
.high
, unsigned_p
);
6323 prod
= force_fit_type_double (TREE_TYPE (arg00
), val
, -1, overflow
);
6324 neg_overflow
= false;
6328 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6329 build_int_cst (TREE_TYPE (arg01
), 1));
6332 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6333 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
6334 TREE_INT_CST_HIGH (prod
),
6335 TREE_INT_CST_LOW (tmp
),
6336 TREE_INT_CST_HIGH (tmp
),
6337 &val
.low
, &val
.high
, unsigned_p
);
6338 hi
= force_fit_type_double (TREE_TYPE (arg00
), val
,
6339 -1, overflow
| TREE_OVERFLOW (prod
));
6341 else if (tree_int_cst_sgn (arg01
) >= 0)
6343 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6344 build_int_cst (TREE_TYPE (arg01
), 1));
6345 switch (tree_int_cst_sgn (arg1
))
6348 neg_overflow
= true;
6349 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6354 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6359 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6369 /* A negative divisor reverses the relational operators. */
6370 code
= swap_tree_comparison (code
);
6372 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6373 build_int_cst (TREE_TYPE (arg01
), 1));
6374 switch (tree_int_cst_sgn (arg1
))
6377 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6382 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6387 neg_overflow
= true;
6388 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6400 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6401 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6402 if (TREE_OVERFLOW (hi
))
6403 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6404 if (TREE_OVERFLOW (lo
))
6405 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6406 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6409 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6410 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6411 if (TREE_OVERFLOW (hi
))
6412 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6413 if (TREE_OVERFLOW (lo
))
6414 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6415 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6418 if (TREE_OVERFLOW (lo
))
6420 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6421 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6423 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6426 if (TREE_OVERFLOW (hi
))
6428 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6429 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6431 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6434 if (TREE_OVERFLOW (hi
))
6436 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6437 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6439 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6442 if (TREE_OVERFLOW (lo
))
6444 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6445 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6447 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6457 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6458 equality/inequality test, then return a simplified form of the test
6459 using a sign testing. Otherwise return NULL. TYPE is the desired
6463 fold_single_bit_test_into_sign_test (location_t loc
,
6464 enum tree_code code
, tree arg0
, tree arg1
,
6467 /* If this is testing a single bit, we can optimize the test. */
6468 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6469 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6470 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6472 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6473 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6474 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6476 if (arg00
!= NULL_TREE
6477 /* This is only a win if casting to a signed type is cheap,
6478 i.e. when arg00's type is not a partial mode. */
6479 && TYPE_PRECISION (TREE_TYPE (arg00
))
6480 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6482 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6483 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6485 fold_convert_loc (loc
, stype
, arg00
),
6486 build_int_cst (stype
, 0));
6493 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6494 equality/inequality test, then return a simplified form of
6495 the test using shifts and logical operations. Otherwise return
6496 NULL. TYPE is the desired result type. */
6499 fold_single_bit_test (location_t loc
, enum tree_code code
,
6500 tree arg0
, tree arg1
, tree result_type
)
6502 /* If this is testing a single bit, we can optimize the test. */
6503 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6504 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6505 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6507 tree inner
= TREE_OPERAND (arg0
, 0);
6508 tree type
= TREE_TYPE (arg0
);
6509 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6510 enum machine_mode operand_mode
= TYPE_MODE (type
);
6512 tree signed_type
, unsigned_type
, intermediate_type
;
6515 /* First, see if we can fold the single bit test into a sign-bit
6517 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6522 /* Otherwise we have (A & C) != 0 where C is a single bit,
6523 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6524 Similarly for (A & C) == 0. */
6526 /* If INNER is a right shift of a constant and it plus BITNUM does
6527 not overflow, adjust BITNUM and INNER. */
6528 if (TREE_CODE (inner
) == RSHIFT_EXPR
6529 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6530 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6531 && bitnum
< TYPE_PRECISION (type
)
6532 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6533 bitnum
- TYPE_PRECISION (type
)))
6535 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6536 inner
= TREE_OPERAND (inner
, 0);
6539 /* If we are going to be able to omit the AND below, we must do our
6540 operations as unsigned. If we must use the AND, we have a choice.
6541 Normally unsigned is faster, but for some machines signed is. */
6542 #ifdef LOAD_EXTEND_OP
6543 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6544 && !flag_syntax_only
) ? 0 : 1;
6549 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6550 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6551 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6552 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6555 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6556 inner
, size_int (bitnum
));
6558 one
= build_int_cst (intermediate_type
, 1);
6560 if (code
== EQ_EXPR
)
6561 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6563 /* Put the AND last so it can combine with more things. */
6564 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6566 /* Make sure to return the proper type. */
6567 inner
= fold_convert_loc (loc
, result_type
, inner
);
6574 /* Check whether we are allowed to reorder operands arg0 and arg1,
6575 such that the evaluation of arg1 occurs before arg0. */
6578 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6580 if (! flag_evaluation_order
)
6582 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6584 return ! TREE_SIDE_EFFECTS (arg0
)
6585 && ! TREE_SIDE_EFFECTS (arg1
);
6588 /* Test whether it is preferable two swap two operands, ARG0 and
6589 ARG1, for example because ARG0 is an integer constant and ARG1
6590 isn't. If REORDER is true, only recommend swapping if we can
6591 evaluate the operands in reverse order. */
6594 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6596 STRIP_SIGN_NOPS (arg0
);
6597 STRIP_SIGN_NOPS (arg1
);
6599 if (TREE_CODE (arg1
) == INTEGER_CST
)
6601 if (TREE_CODE (arg0
) == INTEGER_CST
)
6604 if (TREE_CODE (arg1
) == REAL_CST
)
6606 if (TREE_CODE (arg0
) == REAL_CST
)
6609 if (TREE_CODE (arg1
) == FIXED_CST
)
6611 if (TREE_CODE (arg0
) == FIXED_CST
)
6614 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6616 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6619 if (TREE_CONSTANT (arg1
))
6621 if (TREE_CONSTANT (arg0
))
6624 if (optimize_function_for_size_p (cfun
))
6627 if (reorder
&& flag_evaluation_order
6628 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6631 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6632 for commutative and comparison operators. Ensuring a canonical
6633 form allows the optimizers to find additional redundancies without
6634 having to explicitly check for both orderings. */
6635 if (TREE_CODE (arg0
) == SSA_NAME
6636 && TREE_CODE (arg1
) == SSA_NAME
6637 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6640 /* Put SSA_NAMEs last. */
6641 if (TREE_CODE (arg1
) == SSA_NAME
)
6643 if (TREE_CODE (arg0
) == SSA_NAME
)
6646 /* Put variables last. */
6655 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6656 ARG0 is extended to a wider type. */
6659 fold_widened_comparison (location_t loc
, enum tree_code code
,
6660 tree type
, tree arg0
, tree arg1
)
6662 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6664 tree shorter_type
, outer_type
;
6668 if (arg0_unw
== arg0
)
6670 shorter_type
= TREE_TYPE (arg0_unw
);
6672 #ifdef HAVE_canonicalize_funcptr_for_compare
6673 /* Disable this optimization if we're casting a function pointer
6674 type on targets that require function pointer canonicalization. */
6675 if (HAVE_canonicalize_funcptr_for_compare
6676 && TREE_CODE (shorter_type
) == POINTER_TYPE
6677 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6681 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6684 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6686 /* If possible, express the comparison in the shorter mode. */
6687 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6688 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6689 && (TREE_TYPE (arg1_unw
) == shorter_type
6690 || ((TYPE_PRECISION (shorter_type
)
6691 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6692 && (TYPE_UNSIGNED (shorter_type
)
6693 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6694 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6695 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6696 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6697 && int_fits_type_p (arg1_unw
, shorter_type
))))
6698 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6699 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6701 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6702 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6703 || !int_fits_type_p (arg1_unw
, shorter_type
))
6706 /* If we are comparing with the integer that does not fit into the range
6707 of the shorter type, the result is known. */
6708 outer_type
= TREE_TYPE (arg1_unw
);
6709 min
= lower_bound_in_type (outer_type
, shorter_type
);
6710 max
= upper_bound_in_type (outer_type
, shorter_type
);
6712 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6714 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6721 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6726 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6732 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6734 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6739 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6741 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6750 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6751 ARG0 just the signedness is changed. */
6754 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6755 tree arg0
, tree arg1
)
6758 tree inner_type
, outer_type
;
6760 if (!CONVERT_EXPR_P (arg0
))
6763 outer_type
= TREE_TYPE (arg0
);
6764 arg0_inner
= TREE_OPERAND (arg0
, 0);
6765 inner_type
= TREE_TYPE (arg0_inner
);
6767 #ifdef HAVE_canonicalize_funcptr_for_compare
6768 /* Disable this optimization if we're casting a function pointer
6769 type on targets that require function pointer canonicalization. */
6770 if (HAVE_canonicalize_funcptr_for_compare
6771 && TREE_CODE (inner_type
) == POINTER_TYPE
6772 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6776 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6779 if (TREE_CODE (arg1
) != INTEGER_CST
6780 && !(CONVERT_EXPR_P (arg1
)
6781 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6784 if ((TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6785 || POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6790 if (TREE_CODE (arg1
) == INTEGER_CST
)
6791 arg1
= force_fit_type_double (inner_type
, tree_to_double_int (arg1
),
6792 0, TREE_OVERFLOW (arg1
));
6794 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6796 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6799 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6800 step of the array. Reconstructs s and delta in the case of s *
6801 delta being an integer constant (and thus already folded). ADDR is
6802 the address. MULT is the multiplicative expression. If the
6803 function succeeds, the new address expression is returned.
6804 Otherwise NULL_TREE is returned. LOC is the location of the
6805 resulting expression. */
6808 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6810 tree s
, delta
, step
;
6811 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6816 /* Strip the nops that might be added when converting op1 to sizetype. */
6819 /* Canonicalize op1 into a possibly non-constant delta
6820 and an INTEGER_CST s. */
6821 if (TREE_CODE (op1
) == MULT_EXPR
)
6823 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6828 if (TREE_CODE (arg0
) == INTEGER_CST
)
6833 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6841 else if (TREE_CODE (op1
) == INTEGER_CST
)
6848 /* Simulate we are delta * 1. */
6850 s
= integer_one_node
;
6853 for (;; ref
= TREE_OPERAND (ref
, 0))
6855 if (TREE_CODE (ref
) == ARRAY_REF
)
6859 /* Remember if this was a multi-dimensional array. */
6860 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6863 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6866 itype
= TREE_TYPE (domain
);
6868 step
= array_ref_element_size (ref
);
6869 if (TREE_CODE (step
) != INTEGER_CST
)
6874 if (! tree_int_cst_equal (step
, s
))
6879 /* Try if delta is a multiple of step. */
6880 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6886 /* Only fold here if we can verify we do not overflow one
6887 dimension of a multi-dimensional array. */
6892 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
6893 || !TYPE_MAX_VALUE (domain
)
6894 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6897 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6898 fold_convert_loc (loc
, itype
,
6899 TREE_OPERAND (ref
, 1)),
6900 fold_convert_loc (loc
, itype
, delta
));
6902 || TREE_CODE (tmp
) != INTEGER_CST
6903 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6909 else if (TREE_CODE (ref
) == COMPONENT_REF
6910 && TREE_CODE (TREE_TYPE (ref
)) == ARRAY_TYPE
)
6914 /* Remember if this was a multi-dimensional array. */
6915 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6918 domain
= TYPE_DOMAIN (TREE_TYPE (ref
));
6921 itype
= TREE_TYPE (domain
);
6923 step
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref
)));
6924 if (TREE_CODE (step
) != INTEGER_CST
)
6929 if (! tree_int_cst_equal (step
, s
))
6934 /* Try if delta is a multiple of step. */
6935 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6941 /* Only fold here if we can verify we do not overflow one
6942 dimension of a multi-dimensional array. */
6947 if (!TYPE_MIN_VALUE (domain
)
6948 || !TYPE_MAX_VALUE (domain
)
6949 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6952 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6953 fold_convert_loc (loc
, itype
,
6954 TYPE_MIN_VALUE (domain
)),
6955 fold_convert_loc (loc
, itype
, delta
));
6956 if (TREE_CODE (tmp
) != INTEGER_CST
6957 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6966 if (!handled_component_p (ref
))
6970 /* We found the suitable array reference. So copy everything up to it,
6971 and replace the index. */
6973 pref
= TREE_OPERAND (addr
, 0);
6974 ret
= copy_node (pref
);
6975 SET_EXPR_LOCATION (ret
, loc
);
6980 pref
= TREE_OPERAND (pref
, 0);
6981 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6982 pos
= TREE_OPERAND (pos
, 0);
6985 if (TREE_CODE (ref
) == ARRAY_REF
)
6987 TREE_OPERAND (pos
, 1)
6988 = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
6989 fold_convert_loc (loc
, itype
, TREE_OPERAND (pos
, 1)),
6990 fold_convert_loc (loc
, itype
, delta
));
6991 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6993 else if (TREE_CODE (ref
) == COMPONENT_REF
)
6995 gcc_assert (ret
== pos
);
6996 ret
= build4_loc (loc
, ARRAY_REF
, TREE_TYPE (TREE_TYPE (ref
)), ret
,
6998 (loc
, PLUS_EXPR
, itype
,
6999 fold_convert_loc (loc
, itype
,
7001 (TYPE_DOMAIN (TREE_TYPE (ref
)))),
7002 fold_convert_loc (loc
, itype
, delta
)),
7003 NULL_TREE
, NULL_TREE
);
7004 return build_fold_addr_expr_loc (loc
, ret
);
7011 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7012 means A >= Y && A != MAX, but in this case we know that
7013 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7016 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7018 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7020 if (TREE_CODE (bound
) == LT_EXPR
)
7021 a
= TREE_OPERAND (bound
, 0);
7022 else if (TREE_CODE (bound
) == GT_EXPR
)
7023 a
= TREE_OPERAND (bound
, 1);
7027 typea
= TREE_TYPE (a
);
7028 if (!INTEGRAL_TYPE_P (typea
)
7029 && !POINTER_TYPE_P (typea
))
7032 if (TREE_CODE (ineq
) == LT_EXPR
)
7034 a1
= TREE_OPERAND (ineq
, 1);
7035 y
= TREE_OPERAND (ineq
, 0);
7037 else if (TREE_CODE (ineq
) == GT_EXPR
)
7039 a1
= TREE_OPERAND (ineq
, 0);
7040 y
= TREE_OPERAND (ineq
, 1);
7045 if (TREE_TYPE (a1
) != typea
)
7048 if (POINTER_TYPE_P (typea
))
7050 /* Convert the pointer types into integer before taking the difference. */
7051 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7052 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7053 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7056 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7058 if (!diff
|| !integer_onep (diff
))
7061 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7064 /* Fold a sum or difference of at least one multiplication.
7065 Returns the folded tree or NULL if no simplification could be made. */
7068 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7069 tree arg0
, tree arg1
)
7071 tree arg00
, arg01
, arg10
, arg11
;
7072 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7074 /* (A * C) +- (B * C) -> (A+-B) * C.
7075 (A * C) +- A -> A * (C+-1).
7076 We are most concerned about the case where C is a constant,
7077 but other combinations show up during loop reduction. Since
7078 it is not difficult, try all four possibilities. */
7080 if (TREE_CODE (arg0
) == MULT_EXPR
)
7082 arg00
= TREE_OPERAND (arg0
, 0);
7083 arg01
= TREE_OPERAND (arg0
, 1);
7085 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7087 arg00
= build_one_cst (type
);
7092 /* We cannot generate constant 1 for fract. */
7093 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7096 arg01
= build_one_cst (type
);
7098 if (TREE_CODE (arg1
) == MULT_EXPR
)
7100 arg10
= TREE_OPERAND (arg1
, 0);
7101 arg11
= TREE_OPERAND (arg1
, 1);
7103 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7105 arg10
= build_one_cst (type
);
7106 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7107 the purpose of this canonicalization. */
7108 if (TREE_INT_CST_HIGH (arg1
) == -1
7109 && negate_expr_p (arg1
)
7110 && code
== PLUS_EXPR
)
7112 arg11
= negate_expr (arg1
);
7120 /* We cannot generate constant 1 for fract. */
7121 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7124 arg11
= build_one_cst (type
);
7128 if (operand_equal_p (arg01
, arg11
, 0))
7129 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7130 else if (operand_equal_p (arg00
, arg10
, 0))
7131 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7132 else if (operand_equal_p (arg00
, arg11
, 0))
7133 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7134 else if (operand_equal_p (arg01
, arg10
, 0))
7135 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7137 /* No identical multiplicands; see if we can find a common
7138 power-of-two factor in non-power-of-two multiplies. This
7139 can help in multi-dimensional array access. */
7140 else if (host_integerp (arg01
, 0)
7141 && host_integerp (arg11
, 0))
7143 HOST_WIDE_INT int01
, int11
, tmp
;
7146 int01
= TREE_INT_CST_LOW (arg01
);
7147 int11
= TREE_INT_CST_LOW (arg11
);
7149 /* Move min of absolute values to int11. */
7150 if (absu_hwi (int01
) < absu_hwi (int11
))
7152 tmp
= int01
, int01
= int11
, int11
= tmp
;
7153 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7160 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7161 /* The remainder should not be a constant, otherwise we
7162 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7163 increased the number of multiplications necessary. */
7164 && TREE_CODE (arg10
) != INTEGER_CST
)
7166 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7167 build_int_cst (TREE_TYPE (arg00
),
7172 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7177 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7178 fold_build2_loc (loc
, code
, type
,
7179 fold_convert_loc (loc
, type
, alt0
),
7180 fold_convert_loc (loc
, type
, alt1
)),
7181 fold_convert_loc (loc
, type
, same
));
7186 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7187 specified by EXPR into the buffer PTR of length LEN bytes.
7188 Return the number of bytes placed in the buffer, or zero
7192 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7194 tree type
= TREE_TYPE (expr
);
7195 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7196 int byte
, offset
, word
, words
;
7197 unsigned char value
;
7199 if (total_bytes
> len
)
7201 words
= total_bytes
/ UNITS_PER_WORD
;
7203 for (byte
= 0; byte
< total_bytes
; byte
++)
7205 int bitpos
= byte
* BITS_PER_UNIT
;
7206 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7207 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7209 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7210 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7212 if (total_bytes
> UNITS_PER_WORD
)
7214 word
= byte
/ UNITS_PER_WORD
;
7215 if (WORDS_BIG_ENDIAN
)
7216 word
= (words
- 1) - word
;
7217 offset
= word
* UNITS_PER_WORD
;
7218 if (BYTES_BIG_ENDIAN
)
7219 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7221 offset
+= byte
% UNITS_PER_WORD
;
7224 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7225 ptr
[offset
] = value
;
7231 /* Subroutine of native_encode_expr. Encode the REAL_CST
7232 specified by EXPR into the buffer PTR of length LEN bytes.
7233 Return the number of bytes placed in the buffer, or zero
7237 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7239 tree type
= TREE_TYPE (expr
);
7240 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7241 int byte
, offset
, word
, words
, bitpos
;
7242 unsigned char value
;
7244 /* There are always 32 bits in each long, no matter the size of
7245 the hosts long. We handle floating point representations with
7249 if (total_bytes
> len
)
7251 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7253 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7255 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7256 bitpos
+= BITS_PER_UNIT
)
7258 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7259 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7261 if (UNITS_PER_WORD
< 4)
7263 word
= byte
/ UNITS_PER_WORD
;
7264 if (WORDS_BIG_ENDIAN
)
7265 word
= (words
- 1) - word
;
7266 offset
= word
* UNITS_PER_WORD
;
7267 if (BYTES_BIG_ENDIAN
)
7268 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7270 offset
+= byte
% UNITS_PER_WORD
;
7273 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7274 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7279 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7280 specified by EXPR into the buffer PTR of length LEN bytes.
7281 Return the number of bytes placed in the buffer, or zero
7285 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7290 part
= TREE_REALPART (expr
);
7291 rsize
= native_encode_expr (part
, ptr
, len
);
7294 part
= TREE_IMAGPART (expr
);
7295 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7298 return rsize
+ isize
;
7302 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7303 specified by EXPR into the buffer PTR of length LEN bytes.
7304 Return the number of bytes placed in the buffer, or zero
7308 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7310 int i
, size
, offset
, count
;
7311 tree itype
, elem
, elements
;
7314 elements
= TREE_VECTOR_CST_ELTS (expr
);
7315 count
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
));
7316 itype
= TREE_TYPE (TREE_TYPE (expr
));
7317 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7318 for (i
= 0; i
< count
; i
++)
7322 elem
= TREE_VALUE (elements
);
7323 elements
= TREE_CHAIN (elements
);
7330 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7335 if (offset
+ size
> len
)
7337 memset (ptr
+offset
, 0, size
);
7345 /* Subroutine of native_encode_expr. Encode the STRING_CST
7346 specified by EXPR into the buffer PTR of length LEN bytes.
7347 Return the number of bytes placed in the buffer, or zero
7351 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
7353 tree type
= TREE_TYPE (expr
);
7354 HOST_WIDE_INT total_bytes
;
7356 if (TREE_CODE (type
) != ARRAY_TYPE
7357 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7358 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7359 || !host_integerp (TYPE_SIZE_UNIT (type
), 0))
7361 total_bytes
= tree_low_cst (TYPE_SIZE_UNIT (type
), 0);
7362 if (total_bytes
> len
)
7364 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
7366 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
7367 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
7368 total_bytes
- TREE_STRING_LENGTH (expr
));
7371 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
7376 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7377 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7378 buffer PTR of length LEN bytes. Return the number of bytes
7379 placed in the buffer, or zero upon failure. */
7382 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7384 switch (TREE_CODE (expr
))
7387 return native_encode_int (expr
, ptr
, len
);
7390 return native_encode_real (expr
, ptr
, len
);
7393 return native_encode_complex (expr
, ptr
, len
);
7396 return native_encode_vector (expr
, ptr
, len
);
7399 return native_encode_string (expr
, ptr
, len
);
7407 /* Subroutine of native_interpret_expr. Interpret the contents of
7408 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7409 If the buffer cannot be interpreted, return NULL_TREE. */
7412 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7414 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7415 int byte
, offset
, word
, words
;
7416 unsigned char value
;
7419 if (total_bytes
> len
)
7421 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
7424 result
= double_int_zero
;
7425 words
= total_bytes
/ UNITS_PER_WORD
;
7427 for (byte
= 0; byte
< total_bytes
; byte
++)
7429 int bitpos
= byte
* BITS_PER_UNIT
;
7430 if (total_bytes
> UNITS_PER_WORD
)
7432 word
= byte
/ UNITS_PER_WORD
;
7433 if (WORDS_BIG_ENDIAN
)
7434 word
= (words
- 1) - word
;
7435 offset
= word
* UNITS_PER_WORD
;
7436 if (BYTES_BIG_ENDIAN
)
7437 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7439 offset
+= byte
% UNITS_PER_WORD
;
7442 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7443 value
= ptr
[offset
];
7445 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7446 result
.low
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7448 result
.high
|= (unsigned HOST_WIDE_INT
) value
7449 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7452 return double_int_to_tree (type
, result
);
7456 /* Subroutine of native_interpret_expr. Interpret the contents of
7457 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7458 If the buffer cannot be interpreted, return NULL_TREE. */
7461 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7463 enum machine_mode mode
= TYPE_MODE (type
);
7464 int total_bytes
= GET_MODE_SIZE (mode
);
7465 int byte
, offset
, word
, words
, bitpos
;
7466 unsigned char value
;
7467 /* There are always 32 bits in each long, no matter the size of
7468 the hosts long. We handle floating point representations with
7473 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7474 if (total_bytes
> len
|| total_bytes
> 24)
7476 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7478 memset (tmp
, 0, sizeof (tmp
));
7479 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7480 bitpos
+= BITS_PER_UNIT
)
7482 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7483 if (UNITS_PER_WORD
< 4)
7485 word
= byte
/ UNITS_PER_WORD
;
7486 if (WORDS_BIG_ENDIAN
)
7487 word
= (words
- 1) - word
;
7488 offset
= word
* UNITS_PER_WORD
;
7489 if (BYTES_BIG_ENDIAN
)
7490 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7492 offset
+= byte
% UNITS_PER_WORD
;
7495 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7496 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7498 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7501 real_from_target (&r
, tmp
, mode
);
7502 return build_real (type
, r
);
7506 /* Subroutine of native_interpret_expr. Interpret the contents of
7507 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7508 If the buffer cannot be interpreted, return NULL_TREE. */
7511 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7513 tree etype
, rpart
, ipart
;
7516 etype
= TREE_TYPE (type
);
7517 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7520 rpart
= native_interpret_expr (etype
, ptr
, size
);
7523 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7526 return build_complex (type
, rpart
, ipart
);
7530 /* Subroutine of native_interpret_expr. Interpret the contents of
7531 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7532 If the buffer cannot be interpreted, return NULL_TREE. */
7535 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7537 tree etype
, elem
, elements
;
7540 etype
= TREE_TYPE (type
);
7541 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7542 count
= TYPE_VECTOR_SUBPARTS (type
);
7543 if (size
* count
> len
)
7546 elements
= NULL_TREE
;
7547 for (i
= count
- 1; i
>= 0; i
--)
7549 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7552 elements
= tree_cons (NULL_TREE
, elem
, elements
);
7554 return build_vector (type
, elements
);
7558 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7559 the buffer PTR of length LEN as a constant of type TYPE. For
7560 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7561 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7562 return NULL_TREE. */
7565 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7567 switch (TREE_CODE (type
))
7572 return native_interpret_int (type
, ptr
, len
);
7575 return native_interpret_real (type
, ptr
, len
);
7578 return native_interpret_complex (type
, ptr
, len
);
7581 return native_interpret_vector (type
, ptr
, len
);
7589 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7590 TYPE at compile-time. If we're unable to perform the conversion
7591 return NULL_TREE. */
7594 fold_view_convert_expr (tree type
, tree expr
)
7596 /* We support up to 512-bit values (for V8DFmode). */
7597 unsigned char buffer
[64];
7600 /* Check that the host and target are sane. */
7601 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7604 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7608 return native_interpret_expr (type
, buffer
, len
);
7611 /* Build an expression for the address of T. Folds away INDIRECT_REF
7612 to avoid confusing the gimplify process. */
7615 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7617 /* The size of the object is not relevant when talking about its address. */
7618 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7619 t
= TREE_OPERAND (t
, 0);
7621 if (TREE_CODE (t
) == INDIRECT_REF
)
7623 t
= TREE_OPERAND (t
, 0);
7625 if (TREE_TYPE (t
) != ptrtype
)
7626 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7628 else if (TREE_CODE (t
) == MEM_REF
7629 && integer_zerop (TREE_OPERAND (t
, 1)))
7630 return TREE_OPERAND (t
, 0);
7631 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7633 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7635 if (TREE_TYPE (t
) != ptrtype
)
7636 t
= fold_convert_loc (loc
, ptrtype
, t
);
7639 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7644 /* Build an expression for the address of T. */
7647 build_fold_addr_expr_loc (location_t loc
, tree t
)
7649 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7651 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7654 static bool vec_cst_ctor_to_array (tree
, tree
*);
7656 /* Fold a unary expression of code CODE and type TYPE with operand
7657 OP0. Return the folded expression if folding is successful.
7658 Otherwise, return NULL_TREE. */
7661 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7665 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7667 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7668 && TREE_CODE_LENGTH (code
) == 1);
7673 if (CONVERT_EXPR_CODE_P (code
)
7674 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7676 /* Don't use STRIP_NOPS, because signedness of argument type
7678 STRIP_SIGN_NOPS (arg0
);
7682 /* Strip any conversions that don't change the mode. This
7683 is safe for every expression, except for a comparison
7684 expression because its signedness is derived from its
7687 Note that this is done as an internal manipulation within
7688 the constant folder, in order to find the simplest
7689 representation of the arguments so that their form can be
7690 studied. In any cases, the appropriate type conversions
7691 should be put back in the tree that will get out of the
7697 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7699 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7700 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7701 fold_build1_loc (loc
, code
, type
,
7702 fold_convert_loc (loc
, TREE_TYPE (op0
),
7703 TREE_OPERAND (arg0
, 1))));
7704 else if (TREE_CODE (arg0
) == COND_EXPR
)
7706 tree arg01
= TREE_OPERAND (arg0
, 1);
7707 tree arg02
= TREE_OPERAND (arg0
, 2);
7708 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7709 arg01
= fold_build1_loc (loc
, code
, type
,
7710 fold_convert_loc (loc
,
7711 TREE_TYPE (op0
), arg01
));
7712 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7713 arg02
= fold_build1_loc (loc
, code
, type
,
7714 fold_convert_loc (loc
,
7715 TREE_TYPE (op0
), arg02
));
7716 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7719 /* If this was a conversion, and all we did was to move into
7720 inside the COND_EXPR, bring it back out. But leave it if
7721 it is a conversion from integer to integer and the
7722 result precision is no wider than a word since such a
7723 conversion is cheap and may be optimized away by combine,
7724 while it couldn't if it were outside the COND_EXPR. Then return
7725 so we don't get into an infinite recursion loop taking the
7726 conversion out and then back in. */
7728 if ((CONVERT_EXPR_CODE_P (code
)
7729 || code
== NON_LVALUE_EXPR
)
7730 && TREE_CODE (tem
) == COND_EXPR
7731 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7732 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7733 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7734 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7735 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7736 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7737 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7739 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7740 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7741 || flag_syntax_only
))
7742 tem
= build1_loc (loc
, code
, type
,
7744 TREE_TYPE (TREE_OPERAND
7745 (TREE_OPERAND (tem
, 1), 0)),
7746 TREE_OPERAND (tem
, 0),
7747 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7748 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7757 /* Re-association barriers around constants and other re-association
7758 barriers can be removed. */
7759 if (CONSTANT_CLASS_P (op0
)
7760 || TREE_CODE (op0
) == PAREN_EXPR
)
7761 return fold_convert_loc (loc
, type
, op0
);
7766 case FIX_TRUNC_EXPR
:
7767 if (TREE_TYPE (op0
) == type
)
7770 if (COMPARISON_CLASS_P (op0
))
7772 /* If we have (type) (a CMP b) and type is an integral type, return
7773 new expression involving the new type. Canonicalize
7774 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7776 Do not fold the result as that would not simplify further, also
7777 folding again results in recursions. */
7778 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7779 return build2_loc (loc
, TREE_CODE (op0
), type
,
7780 TREE_OPERAND (op0
, 0),
7781 TREE_OPERAND (op0
, 1));
7782 else if (!INTEGRAL_TYPE_P (type
))
7783 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7784 constant_boolean_node (true, type
),
7785 constant_boolean_node (false, type
));
7788 /* Handle cases of two conversions in a row. */
7789 if (CONVERT_EXPR_P (op0
))
7791 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7792 tree inter_type
= TREE_TYPE (op0
);
7793 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7794 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7795 int inside_float
= FLOAT_TYPE_P (inside_type
);
7796 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7797 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7798 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7799 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7800 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7801 int inter_float
= FLOAT_TYPE_P (inter_type
);
7802 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7803 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7804 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7805 int final_int
= INTEGRAL_TYPE_P (type
);
7806 int final_ptr
= POINTER_TYPE_P (type
);
7807 int final_float
= FLOAT_TYPE_P (type
);
7808 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7809 unsigned int final_prec
= TYPE_PRECISION (type
);
7810 int final_unsignedp
= TYPE_UNSIGNED (type
);
7812 /* In addition to the cases of two conversions in a row
7813 handled below, if we are converting something to its own
7814 type via an object of identical or wider precision, neither
7815 conversion is needed. */
7816 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7817 && (((inter_int
|| inter_ptr
) && final_int
)
7818 || (inter_float
&& final_float
))
7819 && inter_prec
>= final_prec
)
7820 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7822 /* Likewise, if the intermediate and initial types are either both
7823 float or both integer, we don't need the middle conversion if the
7824 former is wider than the latter and doesn't change the signedness
7825 (for integers). Avoid this if the final type is a pointer since
7826 then we sometimes need the middle conversion. Likewise if the
7827 final type has a precision not equal to the size of its mode. */
7828 if (((inter_int
&& inside_int
)
7829 || (inter_float
&& inside_float
)
7830 || (inter_vec
&& inside_vec
))
7831 && inter_prec
>= inside_prec
7832 && (inter_float
|| inter_vec
7833 || inter_unsignedp
== inside_unsignedp
)
7834 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7835 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7837 && (! final_vec
|| inter_prec
== inside_prec
))
7838 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7840 /* If we have a sign-extension of a zero-extended value, we can
7841 replace that by a single zero-extension. */
7842 if (inside_int
&& inter_int
&& final_int
7843 && inside_prec
< inter_prec
&& inter_prec
< final_prec
7844 && inside_unsignedp
&& !inter_unsignedp
)
7845 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7847 /* Two conversions in a row are not needed unless:
7848 - some conversion is floating-point (overstrict for now), or
7849 - some conversion is a vector (overstrict for now), or
7850 - the intermediate type is narrower than both initial and
7852 - the intermediate type and innermost type differ in signedness,
7853 and the outermost type is wider than the intermediate, or
7854 - the initial type is a pointer type and the precisions of the
7855 intermediate and final types differ, or
7856 - the final type is a pointer type and the precisions of the
7857 initial and intermediate types differ. */
7858 if (! inside_float
&& ! inter_float
&& ! final_float
7859 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7860 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7861 && ! (inside_int
&& inter_int
7862 && inter_unsignedp
!= inside_unsignedp
7863 && inter_prec
< final_prec
)
7864 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7865 == (final_unsignedp
&& final_prec
> inter_prec
))
7866 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7867 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7868 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7869 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7870 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7873 /* Handle (T *)&A.B.C for A being of type T and B and C
7874 living at offset zero. This occurs frequently in
7875 C++ upcasting and then accessing the base. */
7876 if (TREE_CODE (op0
) == ADDR_EXPR
7877 && POINTER_TYPE_P (type
)
7878 && handled_component_p (TREE_OPERAND (op0
, 0)))
7880 HOST_WIDE_INT bitsize
, bitpos
;
7882 enum machine_mode mode
;
7883 int unsignedp
, volatilep
;
7884 tree base
= TREE_OPERAND (op0
, 0);
7885 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7886 &mode
, &unsignedp
, &volatilep
, false);
7887 /* If the reference was to a (constant) zero offset, we can use
7888 the address of the base if it has the same base type
7889 as the result type and the pointer type is unqualified. */
7890 if (! offset
&& bitpos
== 0
7891 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7892 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7893 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7894 return fold_convert_loc (loc
, type
,
7895 build_fold_addr_expr_loc (loc
, base
));
7898 if (TREE_CODE (op0
) == MODIFY_EXPR
7899 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7900 /* Detect assigning a bitfield. */
7901 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7903 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7905 /* Don't leave an assignment inside a conversion
7906 unless assigning a bitfield. */
7907 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7908 /* First do the assignment, then return converted constant. */
7909 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7910 TREE_NO_WARNING (tem
) = 1;
7911 TREE_USED (tem
) = 1;
7915 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7916 constants (if x has signed type, the sign bit cannot be set
7917 in c). This folds extension into the BIT_AND_EXPR.
7918 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7919 very likely don't have maximal range for their precision and this
7920 transformation effectively doesn't preserve non-maximal ranges. */
7921 if (TREE_CODE (type
) == INTEGER_TYPE
7922 && TREE_CODE (op0
) == BIT_AND_EXPR
7923 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7925 tree and_expr
= op0
;
7926 tree and0
= TREE_OPERAND (and_expr
, 0);
7927 tree and1
= TREE_OPERAND (and_expr
, 1);
7930 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7931 || (TYPE_PRECISION (type
)
7932 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7934 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7935 <= HOST_BITS_PER_WIDE_INT
7936 && host_integerp (and1
, 1))
7938 unsigned HOST_WIDE_INT cst
;
7940 cst
= tree_low_cst (and1
, 1);
7941 cst
&= (HOST_WIDE_INT
) -1
7942 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7943 change
= (cst
== 0);
7944 #ifdef LOAD_EXTEND_OP
7946 && !flag_syntax_only
7947 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7950 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7951 and0
= fold_convert_loc (loc
, uns
, and0
);
7952 and1
= fold_convert_loc (loc
, uns
, and1
);
7958 tem
= force_fit_type_double (type
, tree_to_double_int (and1
),
7959 0, TREE_OVERFLOW (and1
));
7960 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7961 fold_convert_loc (loc
, type
, and0
), tem
);
7965 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7966 when one of the new casts will fold away. Conservatively we assume
7967 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7968 if (POINTER_TYPE_P (type
)
7969 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7970 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
7971 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7972 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7973 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7975 tree arg00
= TREE_OPERAND (arg0
, 0);
7976 tree arg01
= TREE_OPERAND (arg0
, 1);
7978 return fold_build_pointer_plus_loc
7979 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7982 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7983 of the same precision, and X is an integer type not narrower than
7984 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7985 if (INTEGRAL_TYPE_P (type
)
7986 && TREE_CODE (op0
) == BIT_NOT_EXPR
7987 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7988 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7989 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7991 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7992 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7993 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7994 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7995 fold_convert_loc (loc
, type
, tem
));
7998 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7999 type of X and Y (integer types only). */
8000 if (INTEGRAL_TYPE_P (type
)
8001 && TREE_CODE (op0
) == MULT_EXPR
8002 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8003 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8005 /* Be careful not to introduce new overflows. */
8007 if (TYPE_OVERFLOW_WRAPS (type
))
8010 mult_type
= unsigned_type_for (type
);
8012 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8014 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8015 fold_convert_loc (loc
, mult_type
,
8016 TREE_OPERAND (op0
, 0)),
8017 fold_convert_loc (loc
, mult_type
,
8018 TREE_OPERAND (op0
, 1)));
8019 return fold_convert_loc (loc
, type
, tem
);
8023 tem
= fold_convert_const (code
, type
, op0
);
8024 return tem
? tem
: NULL_TREE
;
8026 case ADDR_SPACE_CONVERT_EXPR
:
8027 if (integer_zerop (arg0
))
8028 return fold_convert_const (code
, type
, arg0
);
8031 case FIXED_CONVERT_EXPR
:
8032 tem
= fold_convert_const (code
, type
, arg0
);
8033 return tem
? tem
: NULL_TREE
;
8035 case VIEW_CONVERT_EXPR
:
8036 if (TREE_TYPE (op0
) == type
)
8038 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8039 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8040 type
, TREE_OPERAND (op0
, 0));
8041 if (TREE_CODE (op0
) == MEM_REF
)
8042 return fold_build2_loc (loc
, MEM_REF
, type
,
8043 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8045 /* For integral conversions with the same precision or pointer
8046 conversions use a NOP_EXPR instead. */
8047 if ((INTEGRAL_TYPE_P (type
)
8048 || POINTER_TYPE_P (type
))
8049 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8050 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8051 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8052 return fold_convert_loc (loc
, type
, op0
);
8054 /* Strip inner integral conversions that do not change the precision. */
8055 if (CONVERT_EXPR_P (op0
)
8056 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8057 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8058 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8059 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8060 && (TYPE_PRECISION (TREE_TYPE (op0
))
8061 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8062 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8063 type
, TREE_OPERAND (op0
, 0));
8065 return fold_view_convert_expr (type
, op0
);
8068 tem
= fold_negate_expr (loc
, arg0
);
8070 return fold_convert_loc (loc
, type
, tem
);
8074 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8075 return fold_abs_const (arg0
, type
);
8076 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8077 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8078 /* Convert fabs((double)float) into (double)fabsf(float). */
8079 else if (TREE_CODE (arg0
) == NOP_EXPR
8080 && TREE_CODE (type
) == REAL_TYPE
)
8082 tree targ0
= strip_float_extensions (arg0
);
8084 return fold_convert_loc (loc
, type
,
8085 fold_build1_loc (loc
, ABS_EXPR
,
8089 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8090 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8092 else if (tree_expr_nonnegative_p (arg0
))
8095 /* Strip sign ops from argument. */
8096 if (TREE_CODE (type
) == REAL_TYPE
)
8098 tem
= fold_strip_sign_ops (arg0
);
8100 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8101 fold_convert_loc (loc
, type
, tem
));
8106 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8107 return fold_convert_loc (loc
, type
, arg0
);
8108 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8110 tree itype
= TREE_TYPE (type
);
8111 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8112 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8113 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8114 negate_expr (ipart
));
8116 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8118 tree itype
= TREE_TYPE (type
);
8119 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8120 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8121 return build_complex (type
, rpart
, negate_expr (ipart
));
8123 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8124 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8128 if (TREE_CODE (arg0
) == INTEGER_CST
)
8129 return fold_not_const (arg0
, type
);
8130 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8131 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8132 /* Convert ~ (-A) to A - 1. */
8133 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8134 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8135 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8136 build_int_cst (type
, 1));
8137 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8138 else if (INTEGRAL_TYPE_P (type
)
8139 && ((TREE_CODE (arg0
) == MINUS_EXPR
8140 && integer_onep (TREE_OPERAND (arg0
, 1)))
8141 || (TREE_CODE (arg0
) == PLUS_EXPR
8142 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8143 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8144 fold_convert_loc (loc
, type
,
8145 TREE_OPERAND (arg0
, 0)));
8146 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8147 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8148 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8149 fold_convert_loc (loc
, type
,
8150 TREE_OPERAND (arg0
, 0)))))
8151 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8152 fold_convert_loc (loc
, type
,
8153 TREE_OPERAND (arg0
, 1)));
8154 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8155 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8156 fold_convert_loc (loc
, type
,
8157 TREE_OPERAND (arg0
, 1)))))
8158 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8159 fold_convert_loc (loc
, type
,
8160 TREE_OPERAND (arg0
, 0)), tem
);
8161 /* Perform BIT_NOT_EXPR on each element individually. */
8162 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8164 tree elements
= TREE_VECTOR_CST_ELTS (arg0
), elem
, list
= NULL_TREE
;
8165 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
8167 for (i
= 0; i
< count
; i
++)
8171 elem
= TREE_VALUE (elements
);
8172 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8173 if (elem
== NULL_TREE
)
8175 elements
= TREE_CHAIN (elements
);
8178 elem
= build_int_cst (TREE_TYPE (type
), -1);
8179 list
= tree_cons (NULL_TREE
, elem
, list
);
8182 return build_vector (type
, nreverse (list
));
8187 case TRUTH_NOT_EXPR
:
8188 /* The argument to invert_truthvalue must have Boolean type. */
8189 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8190 arg0
= fold_convert_loc (loc
, boolean_type_node
, arg0
);
8192 /* Note that the operand of this must be an int
8193 and its values must be 0 or 1.
8194 ("true" is a fixed value perhaps depending on the language,
8195 but we don't handle values other than 1 correctly yet.) */
8196 tem
= fold_truth_not_expr (loc
, arg0
);
8199 return fold_convert_loc (loc
, type
, tem
);
8202 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8203 return fold_convert_loc (loc
, type
, arg0
);
8204 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8205 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8206 TREE_OPERAND (arg0
, 1));
8207 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8208 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8209 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8211 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8212 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8213 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8214 TREE_OPERAND (arg0
, 0)),
8215 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8216 TREE_OPERAND (arg0
, 1)));
8217 return fold_convert_loc (loc
, type
, tem
);
8219 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8221 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8222 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8223 TREE_OPERAND (arg0
, 0));
8224 return fold_convert_loc (loc
, type
, tem
);
8226 if (TREE_CODE (arg0
) == CALL_EXPR
)
8228 tree fn
= get_callee_fndecl (arg0
);
8229 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8230 switch (DECL_FUNCTION_CODE (fn
))
8232 CASE_FLT_FN (BUILT_IN_CEXPI
):
8233 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8235 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8245 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8246 return build_zero_cst (type
);
8247 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8248 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8249 TREE_OPERAND (arg0
, 0));
8250 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8251 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8252 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8254 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8255 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8256 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8257 TREE_OPERAND (arg0
, 0)),
8258 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8259 TREE_OPERAND (arg0
, 1)));
8260 return fold_convert_loc (loc
, type
, tem
);
8262 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8264 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8265 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8266 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8268 if (TREE_CODE (arg0
) == CALL_EXPR
)
8270 tree fn
= get_callee_fndecl (arg0
);
8271 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8272 switch (DECL_FUNCTION_CODE (fn
))
8274 CASE_FLT_FN (BUILT_IN_CEXPI
):
8275 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8277 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8287 /* Fold *&X to X if X is an lvalue. */
8288 if (TREE_CODE (op0
) == ADDR_EXPR
)
8290 tree op00
= TREE_OPERAND (op0
, 0);
8291 if ((TREE_CODE (op00
) == VAR_DECL
8292 || TREE_CODE (op00
) == PARM_DECL
8293 || TREE_CODE (op00
) == RESULT_DECL
)
8294 && !TREE_READONLY (op00
))
8299 case VEC_UNPACK_LO_EXPR
:
8300 case VEC_UNPACK_HI_EXPR
:
8301 case VEC_UNPACK_FLOAT_LO_EXPR
:
8302 case VEC_UNPACK_FLOAT_HI_EXPR
:
8304 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
8305 tree
*elts
, vals
= NULL_TREE
;
8306 enum tree_code subcode
;
8308 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2);
8309 if (TREE_CODE (arg0
) != VECTOR_CST
)
8312 elts
= XALLOCAVEC (tree
, nelts
* 2);
8313 if (!vec_cst_ctor_to_array (arg0
, elts
))
8316 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_UNPACK_LO_EXPR
8317 || code
== VEC_UNPACK_FLOAT_LO_EXPR
))
8320 if (code
== VEC_UNPACK_LO_EXPR
|| code
== VEC_UNPACK_HI_EXPR
)
8323 subcode
= FLOAT_EXPR
;
8325 for (i
= 0; i
< nelts
; i
++)
8327 elts
[i
] = fold_convert_const (subcode
, TREE_TYPE (type
), elts
[i
]);
8328 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
8332 for (i
= 0; i
< nelts
; i
++)
8333 vals
= tree_cons (NULL_TREE
, elts
[nelts
- i
- 1], vals
);
8334 return build_vector (type
, vals
);
8339 } /* switch (code) */
8343 /* If the operation was a conversion do _not_ mark a resulting constant
8344 with TREE_OVERFLOW if the original constant was not. These conversions
8345 have implementation defined behavior and retaining the TREE_OVERFLOW
8346 flag here would confuse later passes such as VRP. */
8348 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8349 tree type
, tree op0
)
8351 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8353 && TREE_CODE (res
) == INTEGER_CST
8354 && TREE_CODE (op0
) == INTEGER_CST
8355 && CONVERT_EXPR_CODE_P (code
))
8356 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8361 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8362 operands OP0 and OP1. LOC is the location of the resulting expression.
8363 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8364 Return the folded expression if folding is successful. Otherwise,
8365 return NULL_TREE. */
8367 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8368 tree arg0
, tree arg1
, tree op0
, tree op1
)
8372 /* We only do these simplifications if we are optimizing. */
8376 /* Check for things like (A || B) && (A || C). We can convert this
8377 to A || (B && C). Note that either operator can be any of the four
8378 truth and/or operations and the transformation will still be
8379 valid. Also note that we only care about order for the
8380 ANDIF and ORIF operators. If B contains side effects, this
8381 might change the truth-value of A. */
8382 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8383 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8384 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8385 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8386 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8387 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8389 tree a00
= TREE_OPERAND (arg0
, 0);
8390 tree a01
= TREE_OPERAND (arg0
, 1);
8391 tree a10
= TREE_OPERAND (arg1
, 0);
8392 tree a11
= TREE_OPERAND (arg1
, 1);
8393 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8394 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8395 && (code
== TRUTH_AND_EXPR
8396 || code
== TRUTH_OR_EXPR
));
8398 if (operand_equal_p (a00
, a10
, 0))
8399 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8400 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8401 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8402 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8403 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8404 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8405 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8406 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8408 /* This case if tricky because we must either have commutative
8409 operators or else A10 must not have side-effects. */
8411 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8412 && operand_equal_p (a01
, a11
, 0))
8413 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8414 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8418 /* See if we can build a range comparison. */
8419 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8422 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8423 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8425 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8427 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8430 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8431 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8433 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8435 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8438 /* Check for the possibility of merging component references. If our
8439 lhs is another similar operation, try to merge its rhs with our
8440 rhs. Then try to merge our lhs and rhs. */
8441 if (TREE_CODE (arg0
) == code
8442 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8443 TREE_OPERAND (arg0
, 1), arg1
)))
8444 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8446 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8449 if ((BRANCH_COST (optimize_function_for_speed_p (cfun
),
8451 && LOGICAL_OP_NON_SHORT_CIRCUIT
8452 && (code
== TRUTH_AND_EXPR
8453 || code
== TRUTH_ANDIF_EXPR
8454 || code
== TRUTH_OR_EXPR
8455 || code
== TRUTH_ORIF_EXPR
))
8457 enum tree_code ncode
, icode
;
8459 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8460 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8461 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8463 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8464 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8465 We don't want to pack more than two leafs to a non-IF AND/OR
8467 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8468 equal to IF-CODE, then we don't want to add right-hand operand.
8469 If the inner right-hand side of left-hand operand has
8470 side-effects, or isn't simple, then we can't add to it,
8471 as otherwise we might destroy if-sequence. */
8472 if (TREE_CODE (arg0
) == icode
8473 && simple_operand_p_2 (arg1
)
8474 /* Needed for sequence points to handle trappings, and
8476 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8478 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8480 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8483 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8484 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8485 else if (TREE_CODE (arg1
) == icode
8486 && simple_operand_p_2 (arg0
)
8487 /* Needed for sequence points to handle trappings, and
8489 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8491 tem
= fold_build2_loc (loc
, ncode
, type
,
8492 arg0
, TREE_OPERAND (arg1
, 0));
8493 return fold_build2_loc (loc
, icode
, type
, tem
,
8494 TREE_OPERAND (arg1
, 1));
8496 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8498 For sequence point consistancy, we need to check for trapping,
8499 and side-effects. */
8500 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8501 && simple_operand_p_2 (arg1
))
8502 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8508 /* Fold a binary expression of code CODE and type TYPE with operands
8509 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8510 Return the folded expression if folding is successful. Otherwise,
8511 return NULL_TREE. */
8514 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8516 enum tree_code compl_code
;
8518 if (code
== MIN_EXPR
)
8519 compl_code
= MAX_EXPR
;
8520 else if (code
== MAX_EXPR
)
8521 compl_code
= MIN_EXPR
;
8525 /* MIN (MAX (a, b), b) == b. */
8526 if (TREE_CODE (op0
) == compl_code
8527 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8528 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8530 /* MIN (MAX (b, a), b) == b. */
8531 if (TREE_CODE (op0
) == compl_code
8532 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8533 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8534 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8536 /* MIN (a, MAX (a, b)) == a. */
8537 if (TREE_CODE (op1
) == compl_code
8538 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8539 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8540 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8542 /* MIN (a, MAX (b, a)) == a. */
8543 if (TREE_CODE (op1
) == compl_code
8544 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8545 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8546 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8551 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8552 by changing CODE to reduce the magnitude of constants involved in
8553 ARG0 of the comparison.
8554 Returns a canonicalized comparison tree if a simplification was
8555 possible, otherwise returns NULL_TREE.
8556 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8557 valid if signed overflow is undefined. */
8560 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8561 tree arg0
, tree arg1
,
8562 bool *strict_overflow_p
)
8564 enum tree_code code0
= TREE_CODE (arg0
);
8565 tree t
, cst0
= NULL_TREE
;
8569 /* Match A +- CST code arg1 and CST code arg1. We can change the
8570 first form only if overflow is undefined. */
8571 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8572 /* In principle pointers also have undefined overflow behavior,
8573 but that causes problems elsewhere. */
8574 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8575 && (code0
== MINUS_EXPR
8576 || code0
== PLUS_EXPR
)
8577 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8578 || code0
== INTEGER_CST
))
8581 /* Identify the constant in arg0 and its sign. */
8582 if (code0
== INTEGER_CST
)
8585 cst0
= TREE_OPERAND (arg0
, 1);
8586 sgn0
= tree_int_cst_sgn (cst0
);
8588 /* Overflowed constants and zero will cause problems. */
8589 if (integer_zerop (cst0
)
8590 || TREE_OVERFLOW (cst0
))
8593 /* See if we can reduce the magnitude of the constant in
8594 arg0 by changing the comparison code. */
8595 if (code0
== INTEGER_CST
)
8597 /* CST <= arg1 -> CST-1 < arg1. */
8598 if (code
== LE_EXPR
&& sgn0
== 1)
8600 /* -CST < arg1 -> -CST-1 <= arg1. */
8601 else if (code
== LT_EXPR
&& sgn0
== -1)
8603 /* CST > arg1 -> CST-1 >= arg1. */
8604 else if (code
== GT_EXPR
&& sgn0
== 1)
8606 /* -CST >= arg1 -> -CST-1 > arg1. */
8607 else if (code
== GE_EXPR
&& sgn0
== -1)
8611 /* arg1 code' CST' might be more canonical. */
8616 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8618 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8620 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8621 else if (code
== GT_EXPR
8622 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8624 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8625 else if (code
== LE_EXPR
8626 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8628 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8629 else if (code
== GE_EXPR
8630 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8634 *strict_overflow_p
= true;
8637 /* Now build the constant reduced in magnitude. But not if that
8638 would produce one outside of its types range. */
8639 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8641 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8642 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8644 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8645 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8646 /* We cannot swap the comparison here as that would cause us to
8647 endlessly recurse. */
8650 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8651 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8652 if (code0
!= INTEGER_CST
)
8653 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8654 t
= fold_convert (TREE_TYPE (arg1
), t
);
8656 /* If swapping might yield to a more canonical form, do so. */
8658 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8660 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8663 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8664 overflow further. Try to decrease the magnitude of constants involved
8665 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8666 and put sole constants at the second argument position.
8667 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8670 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8671 tree arg0
, tree arg1
)
8674 bool strict_overflow_p
;
8675 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8676 "when reducing constant in comparison");
8678 /* Try canonicalization by simplifying arg0. */
8679 strict_overflow_p
= false;
8680 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8681 &strict_overflow_p
);
8684 if (strict_overflow_p
)
8685 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8689 /* Try canonicalization by simplifying arg1 using the swapped
8691 code
= swap_tree_comparison (code
);
8692 strict_overflow_p
= false;
8693 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8694 &strict_overflow_p
);
8695 if (t
&& strict_overflow_p
)
8696 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8700 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8701 space. This is used to avoid issuing overflow warnings for
8702 expressions like &p->x which can not wrap. */
8705 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8707 unsigned HOST_WIDE_INT offset_low
, total_low
;
8708 HOST_WIDE_INT size
, offset_high
, total_high
;
8710 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8716 if (offset
== NULL_TREE
)
8721 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8725 offset_low
= TREE_INT_CST_LOW (offset
);
8726 offset_high
= TREE_INT_CST_HIGH (offset
);
8729 if (add_double_with_sign (offset_low
, offset_high
,
8730 bitpos
/ BITS_PER_UNIT
, 0,
8731 &total_low
, &total_high
,
8735 if (total_high
!= 0)
8738 size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8742 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8744 if (TREE_CODE (base
) == ADDR_EXPR
)
8746 HOST_WIDE_INT base_size
;
8748 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8749 if (base_size
> 0 && size
< base_size
)
8753 return total_low
> (unsigned HOST_WIDE_INT
) size
;
8756 /* Subroutine of fold_binary. This routine performs all of the
8757 transformations that are common to the equality/inequality
8758 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8759 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8760 fold_binary should call fold_binary. Fold a comparison with
8761 tree code CODE and type TYPE with operands OP0 and OP1. Return
8762 the folded comparison or NULL_TREE. */
8765 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8768 tree arg0
, arg1
, tem
;
8773 STRIP_SIGN_NOPS (arg0
);
8774 STRIP_SIGN_NOPS (arg1
);
8776 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8777 if (tem
!= NULL_TREE
)
8780 /* If one arg is a real or integer constant, put it last. */
8781 if (tree_swap_operands_p (arg0
, arg1
, true))
8782 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8784 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8785 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8786 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8787 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8788 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8789 && (TREE_CODE (arg1
) == INTEGER_CST
8790 && !TREE_OVERFLOW (arg1
)))
8792 tree const1
= TREE_OPERAND (arg0
, 1);
8794 tree variable
= TREE_OPERAND (arg0
, 0);
8797 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8799 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8800 TREE_TYPE (arg1
), const2
, const1
);
8802 /* If the constant operation overflowed this can be
8803 simplified as a comparison against INT_MAX/INT_MIN. */
8804 if (TREE_CODE (lhs
) == INTEGER_CST
8805 && TREE_OVERFLOW (lhs
))
8807 int const1_sgn
= tree_int_cst_sgn (const1
);
8808 enum tree_code code2
= code
;
8810 /* Get the sign of the constant on the lhs if the
8811 operation were VARIABLE + CONST1. */
8812 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8813 const1_sgn
= -const1_sgn
;
8815 /* The sign of the constant determines if we overflowed
8816 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8817 Canonicalize to the INT_MIN overflow by swapping the comparison
8819 if (const1_sgn
== -1)
8820 code2
= swap_tree_comparison (code
);
8822 /* We now can look at the canonicalized case
8823 VARIABLE + 1 CODE2 INT_MIN
8824 and decide on the result. */
8825 if (code2
== LT_EXPR
8827 || code2
== EQ_EXPR
)
8828 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8829 else if (code2
== NE_EXPR
8831 || code2
== GT_EXPR
)
8832 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8835 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8836 && (TREE_CODE (lhs
) != INTEGER_CST
8837 || !TREE_OVERFLOW (lhs
)))
8839 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
8840 fold_overflow_warning ("assuming signed overflow does not occur "
8841 "when changing X +- C1 cmp C2 to "
8843 WARN_STRICT_OVERFLOW_COMPARISON
);
8844 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
8848 /* For comparisons of pointers we can decompose it to a compile time
8849 comparison of the base objects and the offsets into the object.
8850 This requires at least one operand being an ADDR_EXPR or a
8851 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8852 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8853 && (TREE_CODE (arg0
) == ADDR_EXPR
8854 || TREE_CODE (arg1
) == ADDR_EXPR
8855 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8856 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8858 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8859 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8860 enum machine_mode mode
;
8861 int volatilep
, unsignedp
;
8862 bool indirect_base0
= false, indirect_base1
= false;
8864 /* Get base and offset for the access. Strip ADDR_EXPR for
8865 get_inner_reference, but put it back by stripping INDIRECT_REF
8866 off the base object if possible. indirect_baseN will be true
8867 if baseN is not an address but refers to the object itself. */
8869 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8871 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8872 &bitsize
, &bitpos0
, &offset0
, &mode
,
8873 &unsignedp
, &volatilep
, false);
8874 if (TREE_CODE (base0
) == INDIRECT_REF
)
8875 base0
= TREE_OPERAND (base0
, 0);
8877 indirect_base0
= true;
8879 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8881 base0
= TREE_OPERAND (arg0
, 0);
8882 STRIP_SIGN_NOPS (base0
);
8883 if (TREE_CODE (base0
) == ADDR_EXPR
)
8885 base0
= TREE_OPERAND (base0
, 0);
8886 indirect_base0
= true;
8888 offset0
= TREE_OPERAND (arg0
, 1);
8892 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8894 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8895 &bitsize
, &bitpos1
, &offset1
, &mode
,
8896 &unsignedp
, &volatilep
, false);
8897 if (TREE_CODE (base1
) == INDIRECT_REF
)
8898 base1
= TREE_OPERAND (base1
, 0);
8900 indirect_base1
= true;
8902 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8904 base1
= TREE_OPERAND (arg1
, 0);
8905 STRIP_SIGN_NOPS (base1
);
8906 if (TREE_CODE (base1
) == ADDR_EXPR
)
8908 base1
= TREE_OPERAND (base1
, 0);
8909 indirect_base1
= true;
8911 offset1
= TREE_OPERAND (arg1
, 1);
8914 /* A local variable can never be pointed to by
8915 the default SSA name of an incoming parameter. */
8916 if ((TREE_CODE (arg0
) == ADDR_EXPR
8918 && TREE_CODE (base0
) == VAR_DECL
8919 && auto_var_in_fn_p (base0
, current_function_decl
)
8921 && TREE_CODE (base1
) == SSA_NAME
8922 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
8923 && SSA_NAME_IS_DEFAULT_DEF (base1
))
8924 || (TREE_CODE (arg1
) == ADDR_EXPR
8926 && TREE_CODE (base1
) == VAR_DECL
8927 && auto_var_in_fn_p (base1
, current_function_decl
)
8929 && TREE_CODE (base0
) == SSA_NAME
8930 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
8931 && SSA_NAME_IS_DEFAULT_DEF (base0
)))
8933 if (code
== NE_EXPR
)
8934 return constant_boolean_node (1, type
);
8935 else if (code
== EQ_EXPR
)
8936 return constant_boolean_node (0, type
);
8938 /* If we have equivalent bases we might be able to simplify. */
8939 else if (indirect_base0
== indirect_base1
8940 && operand_equal_p (base0
, base1
, 0))
8942 /* We can fold this expression to a constant if the non-constant
8943 offset parts are equal. */
8944 if ((offset0
== offset1
8945 || (offset0
&& offset1
8946 && operand_equal_p (offset0
, offset1
, 0)))
8949 || (indirect_base0
&& DECL_P (base0
))
8950 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8955 && bitpos0
!= bitpos1
8956 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8957 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8958 fold_overflow_warning (("assuming pointer wraparound does not "
8959 "occur when comparing P +- C1 with "
8961 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8966 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8968 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8970 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8972 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8974 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8976 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8980 /* We can simplify the comparison to a comparison of the variable
8981 offset parts if the constant offset parts are equal.
8982 Be careful to use signed size type here because otherwise we
8983 mess with array offsets in the wrong way. This is possible
8984 because pointer arithmetic is restricted to retain within an
8985 object and overflow on pointer differences is undefined as of
8986 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8987 else if (bitpos0
== bitpos1
8988 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8989 || (indirect_base0
&& DECL_P (base0
))
8990 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8992 /* By converting to signed size type we cover middle-end pointer
8993 arithmetic which operates on unsigned pointer types of size
8994 type size and ARRAY_REF offsets which are properly sign or
8995 zero extended from their type in case it is narrower than
8997 if (offset0
== NULL_TREE
)
8998 offset0
= build_int_cst (ssizetype
, 0);
9000 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
9001 if (offset1
== NULL_TREE
)
9002 offset1
= build_int_cst (ssizetype
, 0);
9004 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
9008 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
9009 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
9010 fold_overflow_warning (("assuming pointer wraparound does not "
9011 "occur when comparing P +- C1 with "
9013 WARN_STRICT_OVERFLOW_COMPARISON
);
9015 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
9018 /* For non-equal bases we can simplify if they are addresses
9019 of local binding decls or constants. */
9020 else if (indirect_base0
&& indirect_base1
9021 /* We know that !operand_equal_p (base0, base1, 0)
9022 because the if condition was false. But make
9023 sure two decls are not the same. */
9025 && TREE_CODE (arg0
) == ADDR_EXPR
9026 && TREE_CODE (arg1
) == ADDR_EXPR
9027 && (((TREE_CODE (base0
) == VAR_DECL
9028 || TREE_CODE (base0
) == PARM_DECL
)
9029 && (targetm
.binds_local_p (base0
)
9030 || CONSTANT_CLASS_P (base1
)))
9031 || CONSTANT_CLASS_P (base0
))
9032 && (((TREE_CODE (base1
) == VAR_DECL
9033 || TREE_CODE (base1
) == PARM_DECL
)
9034 && (targetm
.binds_local_p (base1
)
9035 || CONSTANT_CLASS_P (base0
)))
9036 || CONSTANT_CLASS_P (base1
)))
9038 if (code
== EQ_EXPR
)
9039 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9041 else if (code
== NE_EXPR
)
9042 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9045 /* For equal offsets we can simplify to a comparison of the
9047 else if (bitpos0
== bitpos1
9049 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9051 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9052 && ((offset0
== offset1
)
9053 || (offset0
&& offset1
9054 && operand_equal_p (offset0
, offset1
, 0))))
9057 base0
= build_fold_addr_expr_loc (loc
, base0
);
9059 base1
= build_fold_addr_expr_loc (loc
, base1
);
9060 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9064 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9065 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9066 the resulting offset is smaller in absolute value than the
9068 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9069 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9070 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9071 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9072 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9073 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9074 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9076 tree const1
= TREE_OPERAND (arg0
, 1);
9077 tree const2
= TREE_OPERAND (arg1
, 1);
9078 tree variable1
= TREE_OPERAND (arg0
, 0);
9079 tree variable2
= TREE_OPERAND (arg1
, 0);
9081 const char * const warnmsg
= G_("assuming signed overflow does not "
9082 "occur when combining constants around "
9085 /* Put the constant on the side where it doesn't overflow and is
9086 of lower absolute value than before. */
9087 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9088 ? MINUS_EXPR
: PLUS_EXPR
,
9090 if (!TREE_OVERFLOW (cst
)
9091 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
9093 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9094 return fold_build2_loc (loc
, code
, type
,
9096 fold_build2_loc (loc
,
9097 TREE_CODE (arg1
), TREE_TYPE (arg1
),
9101 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9102 ? MINUS_EXPR
: PLUS_EXPR
,
9104 if (!TREE_OVERFLOW (cst
)
9105 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
9107 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9108 return fold_build2_loc (loc
, code
, type
,
9109 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
9115 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9116 signed arithmetic case. That form is created by the compiler
9117 often enough for folding it to be of value. One example is in
9118 computing loop trip counts after Operator Strength Reduction. */
9119 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9120 && TREE_CODE (arg0
) == MULT_EXPR
9121 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9122 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9123 && integer_zerop (arg1
))
9125 tree const1
= TREE_OPERAND (arg0
, 1);
9126 tree const2
= arg1
; /* zero */
9127 tree variable1
= TREE_OPERAND (arg0
, 0);
9128 enum tree_code cmp_code
= code
;
9130 /* Handle unfolded multiplication by zero. */
9131 if (integer_zerop (const1
))
9132 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9134 fold_overflow_warning (("assuming signed overflow does not occur when "
9135 "eliminating multiplication in comparison "
9137 WARN_STRICT_OVERFLOW_COMPARISON
);
9139 /* If const1 is negative we swap the sense of the comparison. */
9140 if (tree_int_cst_sgn (const1
) < 0)
9141 cmp_code
= swap_tree_comparison (cmp_code
);
9143 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9146 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9150 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9152 tree targ0
= strip_float_extensions (arg0
);
9153 tree targ1
= strip_float_extensions (arg1
);
9154 tree newtype
= TREE_TYPE (targ0
);
9156 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9157 newtype
= TREE_TYPE (targ1
);
9159 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9160 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9161 return fold_build2_loc (loc
, code
, type
,
9162 fold_convert_loc (loc
, newtype
, targ0
),
9163 fold_convert_loc (loc
, newtype
, targ1
));
9165 /* (-a) CMP (-b) -> b CMP a */
9166 if (TREE_CODE (arg0
) == NEGATE_EXPR
9167 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9168 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9169 TREE_OPERAND (arg0
, 0));
9171 if (TREE_CODE (arg1
) == REAL_CST
)
9173 REAL_VALUE_TYPE cst
;
9174 cst
= TREE_REAL_CST (arg1
);
9176 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9177 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9178 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9179 TREE_OPERAND (arg0
, 0),
9180 build_real (TREE_TYPE (arg1
),
9181 real_value_negate (&cst
)));
9183 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9184 /* a CMP (-0) -> a CMP 0 */
9185 if (REAL_VALUE_MINUS_ZERO (cst
))
9186 return fold_build2_loc (loc
, code
, type
, arg0
,
9187 build_real (TREE_TYPE (arg1
), dconst0
));
9189 /* x != NaN is always true, other ops are always false. */
9190 if (REAL_VALUE_ISNAN (cst
)
9191 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9193 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9194 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9197 /* Fold comparisons against infinity. */
9198 if (REAL_VALUE_ISINF (cst
)
9199 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9201 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9202 if (tem
!= NULL_TREE
)
9207 /* If this is a comparison of a real constant with a PLUS_EXPR
9208 or a MINUS_EXPR of a real constant, we can convert it into a
9209 comparison with a revised real constant as long as no overflow
9210 occurs when unsafe_math_optimizations are enabled. */
9211 if (flag_unsafe_math_optimizations
9212 && TREE_CODE (arg1
) == REAL_CST
9213 && (TREE_CODE (arg0
) == PLUS_EXPR
9214 || TREE_CODE (arg0
) == MINUS_EXPR
)
9215 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9216 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9217 ? MINUS_EXPR
: PLUS_EXPR
,
9218 arg1
, TREE_OPERAND (arg0
, 1)))
9219 && !TREE_OVERFLOW (tem
))
9220 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9222 /* Likewise, we can simplify a comparison of a real constant with
9223 a MINUS_EXPR whose first operand is also a real constant, i.e.
9224 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9225 floating-point types only if -fassociative-math is set. */
9226 if (flag_associative_math
9227 && TREE_CODE (arg1
) == REAL_CST
9228 && TREE_CODE (arg0
) == MINUS_EXPR
9229 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9230 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9232 && !TREE_OVERFLOW (tem
))
9233 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9234 TREE_OPERAND (arg0
, 1), tem
);
9236 /* Fold comparisons against built-in math functions. */
9237 if (TREE_CODE (arg1
) == REAL_CST
9238 && flag_unsafe_math_optimizations
9239 && ! flag_errno_math
)
9241 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9243 if (fcode
!= END_BUILTINS
)
9245 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9246 if (tem
!= NULL_TREE
)
9252 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9253 && CONVERT_EXPR_P (arg0
))
9255 /* If we are widening one operand of an integer comparison,
9256 see if the other operand is similarly being widened. Perhaps we
9257 can do the comparison in the narrower type. */
9258 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9262 /* Or if we are changing signedness. */
9263 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9268 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9269 constant, we can simplify it. */
9270 if (TREE_CODE (arg1
) == INTEGER_CST
9271 && (TREE_CODE (arg0
) == MIN_EXPR
9272 || TREE_CODE (arg0
) == MAX_EXPR
)
9273 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9275 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9280 /* Simplify comparison of something with itself. (For IEEE
9281 floating-point, we can only do some of these simplifications.) */
9282 if (operand_equal_p (arg0
, arg1
, 0))
9287 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9288 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9289 return constant_boolean_node (1, type
);
9294 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9295 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9296 return constant_boolean_node (1, type
);
9297 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9300 /* For NE, we can only do this simplification if integer
9301 or we don't honor IEEE floating point NaNs. */
9302 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9303 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9305 /* ... fall through ... */
9308 return constant_boolean_node (0, type
);
9314 /* If we are comparing an expression that just has comparisons
9315 of two integer values, arithmetic expressions of those comparisons,
9316 and constants, we can simplify it. There are only three cases
9317 to check: the two values can either be equal, the first can be
9318 greater, or the second can be greater. Fold the expression for
9319 those three values. Since each value must be 0 or 1, we have
9320 eight possibilities, each of which corresponds to the constant 0
9321 or 1 or one of the six possible comparisons.
9323 This handles common cases like (a > b) == 0 but also handles
9324 expressions like ((x > y) - (y > x)) > 0, which supposedly
9325 occur in macroized code. */
9327 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9329 tree cval1
= 0, cval2
= 0;
9332 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9333 /* Don't handle degenerate cases here; they should already
9334 have been handled anyway. */
9335 && cval1
!= 0 && cval2
!= 0
9336 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9337 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9338 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9339 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9340 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9341 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9342 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9344 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9345 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9347 /* We can't just pass T to eval_subst in case cval1 or cval2
9348 was the same as ARG1. */
9351 = fold_build2_loc (loc
, code
, type
,
9352 eval_subst (loc
, arg0
, cval1
, maxval
,
9356 = fold_build2_loc (loc
, code
, type
,
9357 eval_subst (loc
, arg0
, cval1
, maxval
,
9361 = fold_build2_loc (loc
, code
, type
,
9362 eval_subst (loc
, arg0
, cval1
, minval
,
9366 /* All three of these results should be 0 or 1. Confirm they are.
9367 Then use those values to select the proper code to use. */
9369 if (TREE_CODE (high_result
) == INTEGER_CST
9370 && TREE_CODE (equal_result
) == INTEGER_CST
9371 && TREE_CODE (low_result
) == INTEGER_CST
)
9373 /* Make a 3-bit mask with the high-order bit being the
9374 value for `>', the next for '=', and the low for '<'. */
9375 switch ((integer_onep (high_result
) * 4)
9376 + (integer_onep (equal_result
) * 2)
9377 + integer_onep (low_result
))
9381 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9402 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9407 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9408 SET_EXPR_LOCATION (tem
, loc
);
9411 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9416 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9417 into a single range test. */
9418 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9419 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9420 && TREE_CODE (arg1
) == INTEGER_CST
9421 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9422 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9423 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9424 && !TREE_OVERFLOW (arg1
))
9426 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9427 if (tem
!= NULL_TREE
)
9431 /* Fold ~X op ~Y as Y op X. */
9432 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9433 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9435 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9436 return fold_build2_loc (loc
, code
, type
,
9437 fold_convert_loc (loc
, cmp_type
,
9438 TREE_OPERAND (arg1
, 0)),
9439 TREE_OPERAND (arg0
, 0));
9442 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9443 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9444 && TREE_CODE (arg1
) == INTEGER_CST
)
9446 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9447 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9448 TREE_OPERAND (arg0
, 0),
9449 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9450 fold_convert_loc (loc
, cmp_type
, arg1
)));
9457 /* Subroutine of fold_binary. Optimize complex multiplications of the
9458 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9459 argument EXPR represents the expression "z" of type TYPE. */
9462 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9464 tree itype
= TREE_TYPE (type
);
9465 tree rpart
, ipart
, tem
;
9467 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9469 rpart
= TREE_OPERAND (expr
, 0);
9470 ipart
= TREE_OPERAND (expr
, 1);
9472 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9474 rpart
= TREE_REALPART (expr
);
9475 ipart
= TREE_IMAGPART (expr
);
9479 expr
= save_expr (expr
);
9480 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9481 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9484 rpart
= save_expr (rpart
);
9485 ipart
= save_expr (ipart
);
9486 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9487 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9488 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9489 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9490 build_zero_cst (itype
));
9494 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9495 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9496 guarantees that P and N have the same least significant log2(M) bits.
9497 N is not otherwise constrained. In particular, N is not normalized to
9498 0 <= N < M as is common. In general, the precise value of P is unknown.
9499 M is chosen as large as possible such that constant N can be determined.
9501 Returns M and sets *RESIDUE to N.
9503 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9504 account. This is not always possible due to PR 35705.
9507 static unsigned HOST_WIDE_INT
9508 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9509 bool allow_func_align
)
9511 enum tree_code code
;
9515 code
= TREE_CODE (expr
);
9516 if (code
== ADDR_EXPR
)
9518 unsigned int bitalign
;
9519 bitalign
= get_object_alignment_1 (TREE_OPERAND (expr
, 0), residue
);
9520 *residue
/= BITS_PER_UNIT
;
9521 return bitalign
/ BITS_PER_UNIT
;
9523 else if (code
== POINTER_PLUS_EXPR
)
9526 unsigned HOST_WIDE_INT modulus
;
9527 enum tree_code inner_code
;
9529 op0
= TREE_OPERAND (expr
, 0);
9531 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9534 op1
= TREE_OPERAND (expr
, 1);
9536 inner_code
= TREE_CODE (op1
);
9537 if (inner_code
== INTEGER_CST
)
9539 *residue
+= TREE_INT_CST_LOW (op1
);
9542 else if (inner_code
== MULT_EXPR
)
9544 op1
= TREE_OPERAND (op1
, 1);
9545 if (TREE_CODE (op1
) == INTEGER_CST
)
9547 unsigned HOST_WIDE_INT align
;
9549 /* Compute the greatest power-of-2 divisor of op1. */
9550 align
= TREE_INT_CST_LOW (op1
);
9553 /* If align is non-zero and less than *modulus, replace
9554 *modulus with align., If align is 0, then either op1 is 0
9555 or the greatest power-of-2 divisor of op1 doesn't fit in an
9556 unsigned HOST_WIDE_INT. In either case, no additional
9557 constraint is imposed. */
9559 modulus
= MIN (modulus
, align
);
9566 /* If we get here, we were unable to determine anything useful about the
9571 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9572 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9575 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9577 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9579 if (TREE_CODE (arg
) == VECTOR_CST
)
9583 for (i
= 0, t
= TREE_VECTOR_CST_ELTS (arg
);
9584 i
< nelts
&& t
; i
++, t
= TREE_CHAIN (t
))
9585 elts
[i
] = TREE_VALUE (t
);
9589 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9591 constructor_elt
*elt
;
9593 FOR_EACH_VEC_ELT (constructor_elt
, CONSTRUCTOR_ELTS (arg
), i
, elt
)
9597 elts
[i
] = elt
->value
;
9601 for (; i
< nelts
; i
++)
9603 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9607 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9608 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9609 NULL_TREE otherwise. */
9612 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9614 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9616 bool need_ctor
= false;
9618 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9619 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9620 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9621 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9624 elts
= XALLOCAVEC (tree
, nelts
* 3);
9625 if (!vec_cst_ctor_to_array (arg0
, elts
)
9626 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9629 for (i
= 0; i
< nelts
; i
++)
9631 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9633 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9638 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, nelts
);
9639 for (i
= 0; i
< nelts
; i
++)
9640 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9641 return build_constructor (type
, v
);
9645 tree vals
= NULL_TREE
;
9646 for (i
= 0; i
< nelts
; i
++)
9647 vals
= tree_cons (NULL_TREE
, elts
[3 * nelts
- i
- 1], vals
);
9648 return build_vector (type
, vals
);
9652 /* Fold a binary expression of code CODE and type TYPE with operands
9653 OP0 and OP1. LOC is the location of the resulting expression.
9654 Return the folded expression if folding is successful. Otherwise,
9655 return NULL_TREE. */
9658 fold_binary_loc (location_t loc
,
9659 enum tree_code code
, tree type
, tree op0
, tree op1
)
9661 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9662 tree arg0
, arg1
, tem
;
9663 tree t1
= NULL_TREE
;
9664 bool strict_overflow_p
;
9666 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9667 && TREE_CODE_LENGTH (code
) == 2
9669 && op1
!= NULL_TREE
);
9674 /* Strip any conversions that don't change the mode. This is
9675 safe for every expression, except for a comparison expression
9676 because its signedness is derived from its operands. So, in
9677 the latter case, only strip conversions that don't change the
9678 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9681 Note that this is done as an internal manipulation within the
9682 constant folder, in order to find the simplest representation
9683 of the arguments so that their form can be studied. In any
9684 cases, the appropriate type conversions should be put back in
9685 the tree that will get out of the constant folder. */
9687 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9689 STRIP_SIGN_NOPS (arg0
);
9690 STRIP_SIGN_NOPS (arg1
);
9698 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9699 constant but we can't do arithmetic on them. */
9700 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9701 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9702 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9703 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9704 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9705 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
9707 if (kind
== tcc_binary
)
9709 /* Make sure type and arg0 have the same saturating flag. */
9710 gcc_assert (TYPE_SATURATING (type
)
9711 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9712 tem
= const_binop (code
, arg0
, arg1
);
9714 else if (kind
== tcc_comparison
)
9715 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9719 if (tem
!= NULL_TREE
)
9721 if (TREE_TYPE (tem
) != type
)
9722 tem
= fold_convert_loc (loc
, type
, tem
);
9727 /* If this is a commutative operation, and ARG0 is a constant, move it
9728 to ARG1 to reduce the number of tests below. */
9729 if (commutative_tree_code (code
)
9730 && tree_swap_operands_p (arg0
, arg1
, true))
9731 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9733 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9735 First check for cases where an arithmetic operation is applied to a
9736 compound, conditional, or comparison operation. Push the arithmetic
9737 operation inside the compound or conditional to see if any folding
9738 can then be done. Convert comparison to conditional for this purpose.
9739 The also optimizes non-constant cases that used to be done in
9742 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9743 one of the operands is a comparison and the other is a comparison, a
9744 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9745 code below would make the expression more complex. Change it to a
9746 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9747 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9749 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9750 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9751 && ((truth_value_p (TREE_CODE (arg0
))
9752 && (truth_value_p (TREE_CODE (arg1
))
9753 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9754 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9755 || (truth_value_p (TREE_CODE (arg1
))
9756 && (truth_value_p (TREE_CODE (arg0
))
9757 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9758 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9760 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9761 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9764 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9765 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9767 if (code
== EQ_EXPR
)
9768 tem
= invert_truthvalue_loc (loc
, tem
);
9770 return fold_convert_loc (loc
, type
, tem
);
9773 if (TREE_CODE_CLASS (code
) == tcc_binary
9774 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9776 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9778 tem
= fold_build2_loc (loc
, code
, type
,
9779 fold_convert_loc (loc
, TREE_TYPE (op0
),
9780 TREE_OPERAND (arg0
, 1)), op1
);
9781 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9784 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9785 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9787 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9788 fold_convert_loc (loc
, TREE_TYPE (op1
),
9789 TREE_OPERAND (arg1
, 1)));
9790 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9794 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
9796 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9798 /*cond_first_p=*/1);
9799 if (tem
!= NULL_TREE
)
9803 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
9805 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9807 /*cond_first_p=*/0);
9808 if (tem
!= NULL_TREE
)
9816 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9817 if (TREE_CODE (arg0
) == ADDR_EXPR
9818 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9820 tree iref
= TREE_OPERAND (arg0
, 0);
9821 return fold_build2 (MEM_REF
, type
,
9822 TREE_OPERAND (iref
, 0),
9823 int_const_binop (PLUS_EXPR
, arg1
,
9824 TREE_OPERAND (iref
, 1)));
9827 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9828 if (TREE_CODE (arg0
) == ADDR_EXPR
9829 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9832 HOST_WIDE_INT coffset
;
9833 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9837 return fold_build2 (MEM_REF
, type
,
9838 build_fold_addr_expr (base
),
9839 int_const_binop (PLUS_EXPR
, arg1
,
9840 size_int (coffset
)));
9845 case POINTER_PLUS_EXPR
:
9846 /* 0 +p index -> (type)index */
9847 if (integer_zerop (arg0
))
9848 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
9850 /* PTR +p 0 -> PTR */
9851 if (integer_zerop (arg1
))
9852 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9854 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9855 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9856 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9857 return fold_convert_loc (loc
, type
,
9858 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9859 fold_convert_loc (loc
, sizetype
,
9861 fold_convert_loc (loc
, sizetype
,
9864 /* (PTR +p B) +p A -> PTR +p (B + A) */
9865 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9868 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
9869 tree arg00
= TREE_OPERAND (arg0
, 0);
9870 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9871 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
9872 return fold_convert_loc (loc
, type
,
9873 fold_build_pointer_plus_loc (loc
,
9877 /* PTR_CST +p CST -> CST1 */
9878 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9879 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
9880 fold_convert_loc (loc
, type
, arg1
));
9882 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9883 of the array. Loop optimizer sometimes produce this type of
9885 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9887 tem
= try_move_mult_to_index (loc
, arg0
,
9888 fold_convert_loc (loc
, sizetype
, arg1
));
9890 return fold_convert_loc (loc
, type
, tem
);
9896 /* A + (-B) -> A - B */
9897 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9898 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9899 fold_convert_loc (loc
, type
, arg0
),
9900 fold_convert_loc (loc
, type
,
9901 TREE_OPERAND (arg1
, 0)));
9902 /* (-A) + B -> B - A */
9903 if (TREE_CODE (arg0
) == NEGATE_EXPR
9904 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
9905 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9906 fold_convert_loc (loc
, type
, arg1
),
9907 fold_convert_loc (loc
, type
,
9908 TREE_OPERAND (arg0
, 0)));
9910 if (INTEGRAL_TYPE_P (type
))
9912 /* Convert ~A + 1 to -A. */
9913 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9914 && integer_onep (arg1
))
9915 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
9916 fold_convert_loc (loc
, type
,
9917 TREE_OPERAND (arg0
, 0)));
9920 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9921 && !TYPE_OVERFLOW_TRAPS (type
))
9923 tree tem
= TREE_OPERAND (arg0
, 0);
9926 if (operand_equal_p (tem
, arg1
, 0))
9928 t1
= build_int_cst_type (type
, -1);
9929 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
9934 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9935 && !TYPE_OVERFLOW_TRAPS (type
))
9937 tree tem
= TREE_OPERAND (arg1
, 0);
9940 if (operand_equal_p (arg0
, tem
, 0))
9942 t1
= build_int_cst_type (type
, -1);
9943 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
9947 /* X + (X / CST) * -CST is X % CST. */
9948 if (TREE_CODE (arg1
) == MULT_EXPR
9949 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9950 && operand_equal_p (arg0
,
9951 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9953 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9954 tree cst1
= TREE_OPERAND (arg1
, 1);
9955 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9957 if (sum
&& integer_zerop (sum
))
9958 return fold_convert_loc (loc
, type
,
9959 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9960 TREE_TYPE (arg0
), arg0
,
9965 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9966 same or one. Make sure type is not saturating.
9967 fold_plusminus_mult_expr will re-associate. */
9968 if ((TREE_CODE (arg0
) == MULT_EXPR
9969 || TREE_CODE (arg1
) == MULT_EXPR
)
9970 && !TYPE_SATURATING (type
)
9971 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9973 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9978 if (! FLOAT_TYPE_P (type
))
9980 if (integer_zerop (arg1
))
9981 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9983 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9984 with a constant, and the two constants have no bits in common,
9985 we should treat this as a BIT_IOR_EXPR since this may produce more
9987 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9988 && TREE_CODE (arg1
) == BIT_AND_EXPR
9989 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9990 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9991 && integer_zerop (const_binop (BIT_AND_EXPR
,
9992 TREE_OPERAND (arg0
, 1),
9993 TREE_OPERAND (arg1
, 1))))
9995 code
= BIT_IOR_EXPR
;
9999 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10000 (plus (plus (mult) (mult)) (foo)) so that we can
10001 take advantage of the factoring cases below. */
10002 if (TYPE_OVERFLOW_WRAPS (type
)
10003 && (((TREE_CODE (arg0
) == PLUS_EXPR
10004 || TREE_CODE (arg0
) == MINUS_EXPR
)
10005 && TREE_CODE (arg1
) == MULT_EXPR
)
10006 || ((TREE_CODE (arg1
) == PLUS_EXPR
10007 || TREE_CODE (arg1
) == MINUS_EXPR
)
10008 && TREE_CODE (arg0
) == MULT_EXPR
)))
10010 tree parg0
, parg1
, parg
, marg
;
10011 enum tree_code pcode
;
10013 if (TREE_CODE (arg1
) == MULT_EXPR
)
10014 parg
= arg0
, marg
= arg1
;
10016 parg
= arg1
, marg
= arg0
;
10017 pcode
= TREE_CODE (parg
);
10018 parg0
= TREE_OPERAND (parg
, 0);
10019 parg1
= TREE_OPERAND (parg
, 1);
10020 STRIP_NOPS (parg0
);
10021 STRIP_NOPS (parg1
);
10023 if (TREE_CODE (parg0
) == MULT_EXPR
10024 && TREE_CODE (parg1
) != MULT_EXPR
)
10025 return fold_build2_loc (loc
, pcode
, type
,
10026 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10027 fold_convert_loc (loc
, type
,
10029 fold_convert_loc (loc
, type
,
10031 fold_convert_loc (loc
, type
, parg1
));
10032 if (TREE_CODE (parg0
) != MULT_EXPR
10033 && TREE_CODE (parg1
) == MULT_EXPR
)
10035 fold_build2_loc (loc
, PLUS_EXPR
, type
,
10036 fold_convert_loc (loc
, type
, parg0
),
10037 fold_build2_loc (loc
, pcode
, type
,
10038 fold_convert_loc (loc
, type
, marg
),
10039 fold_convert_loc (loc
, type
,
10045 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10046 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10047 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10049 /* Likewise if the operands are reversed. */
10050 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10051 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10053 /* Convert X + -C into X - C. */
10054 if (TREE_CODE (arg1
) == REAL_CST
10055 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10057 tem
= fold_negate_const (arg1
, type
);
10058 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10059 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10060 fold_convert_loc (loc
, type
, arg0
),
10061 fold_convert_loc (loc
, type
, tem
));
10064 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10065 to __complex__ ( x, y ). This is not the same for SNaNs or
10066 if signed zeros are involved. */
10067 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10068 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10069 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10071 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10072 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10073 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10074 bool arg0rz
= false, arg0iz
= false;
10075 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10076 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10078 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10079 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10080 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10082 tree rp
= arg1r
? arg1r
10083 : build1 (REALPART_EXPR
, rtype
, arg1
);
10084 tree ip
= arg0i
? arg0i
10085 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10086 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10088 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10090 tree rp
= arg0r
? arg0r
10091 : build1 (REALPART_EXPR
, rtype
, arg0
);
10092 tree ip
= arg1i
? arg1i
10093 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10094 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10099 if (flag_unsafe_math_optimizations
10100 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10101 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10102 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10105 /* Convert x+x into x*2.0. */
10106 if (operand_equal_p (arg0
, arg1
, 0)
10107 && SCALAR_FLOAT_TYPE_P (type
))
10108 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10109 build_real (type
, dconst2
));
10111 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10112 We associate floats only if the user has specified
10113 -fassociative-math. */
10114 if (flag_associative_math
10115 && TREE_CODE (arg1
) == PLUS_EXPR
10116 && TREE_CODE (arg0
) != MULT_EXPR
)
10118 tree tree10
= TREE_OPERAND (arg1
, 0);
10119 tree tree11
= TREE_OPERAND (arg1
, 1);
10120 if (TREE_CODE (tree11
) == MULT_EXPR
10121 && TREE_CODE (tree10
) == MULT_EXPR
)
10124 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10125 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10128 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10129 We associate floats only if the user has specified
10130 -fassociative-math. */
10131 if (flag_associative_math
10132 && TREE_CODE (arg0
) == PLUS_EXPR
10133 && TREE_CODE (arg1
) != MULT_EXPR
)
10135 tree tree00
= TREE_OPERAND (arg0
, 0);
10136 tree tree01
= TREE_OPERAND (arg0
, 1);
10137 if (TREE_CODE (tree01
) == MULT_EXPR
10138 && TREE_CODE (tree00
) == MULT_EXPR
)
10141 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10142 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10148 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10149 is a rotate of A by C1 bits. */
10150 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10151 is a rotate of A by B bits. */
10153 enum tree_code code0
, code1
;
10155 code0
= TREE_CODE (arg0
);
10156 code1
= TREE_CODE (arg1
);
10157 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10158 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10159 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10160 TREE_OPERAND (arg1
, 0), 0)
10161 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10162 TYPE_UNSIGNED (rtype
))
10163 /* Only create rotates in complete modes. Other cases are not
10164 expanded properly. */
10165 && TYPE_PRECISION (rtype
) == GET_MODE_PRECISION (TYPE_MODE (rtype
)))
10167 tree tree01
, tree11
;
10168 enum tree_code code01
, code11
;
10170 tree01
= TREE_OPERAND (arg0
, 1);
10171 tree11
= TREE_OPERAND (arg1
, 1);
10172 STRIP_NOPS (tree01
);
10173 STRIP_NOPS (tree11
);
10174 code01
= TREE_CODE (tree01
);
10175 code11
= TREE_CODE (tree11
);
10176 if (code01
== INTEGER_CST
10177 && code11
== INTEGER_CST
10178 && TREE_INT_CST_HIGH (tree01
) == 0
10179 && TREE_INT_CST_HIGH (tree11
) == 0
10180 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
10181 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10183 tem
= build2_loc (loc
, LROTATE_EXPR
,
10184 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10185 TREE_OPERAND (arg0
, 0),
10186 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10187 return fold_convert_loc (loc
, type
, tem
);
10189 else if (code11
== MINUS_EXPR
)
10191 tree tree110
, tree111
;
10192 tree110
= TREE_OPERAND (tree11
, 0);
10193 tree111
= TREE_OPERAND (tree11
, 1);
10194 STRIP_NOPS (tree110
);
10195 STRIP_NOPS (tree111
);
10196 if (TREE_CODE (tree110
) == INTEGER_CST
10197 && 0 == compare_tree_int (tree110
,
10199 (TREE_TYPE (TREE_OPERAND
10201 && operand_equal_p (tree01
, tree111
, 0))
10203 fold_convert_loc (loc
, type
,
10204 build2 ((code0
== LSHIFT_EXPR
10207 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10208 TREE_OPERAND (arg0
, 0), tree01
));
10210 else if (code01
== MINUS_EXPR
)
10212 tree tree010
, tree011
;
10213 tree010
= TREE_OPERAND (tree01
, 0);
10214 tree011
= TREE_OPERAND (tree01
, 1);
10215 STRIP_NOPS (tree010
);
10216 STRIP_NOPS (tree011
);
10217 if (TREE_CODE (tree010
) == INTEGER_CST
10218 && 0 == compare_tree_int (tree010
,
10220 (TREE_TYPE (TREE_OPERAND
10222 && operand_equal_p (tree11
, tree011
, 0))
10223 return fold_convert_loc
10225 build2 ((code0
!= LSHIFT_EXPR
10228 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10229 TREE_OPERAND (arg0
, 0), tree11
));
10235 /* In most languages, can't associate operations on floats through
10236 parentheses. Rather than remember where the parentheses were, we
10237 don't associate floats at all, unless the user has specified
10238 -fassociative-math.
10239 And, we need to make sure type is not saturating. */
10241 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10242 && !TYPE_SATURATING (type
))
10244 tree var0
, con0
, lit0
, minus_lit0
;
10245 tree var1
, con1
, lit1
, minus_lit1
;
10248 /* Split both trees into variables, constants, and literals. Then
10249 associate each group together, the constants with literals,
10250 then the result with variables. This increases the chances of
10251 literals being recombined later and of generating relocatable
10252 expressions for the sum of a constant and literal. */
10253 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10254 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10255 code
== MINUS_EXPR
);
10257 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10258 if (code
== MINUS_EXPR
)
10261 /* With undefined overflow we can only associate constants with one
10262 variable, and constants whose association doesn't overflow. */
10263 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10264 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10271 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10272 tmp0
= TREE_OPERAND (tmp0
, 0);
10273 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10274 tmp1
= TREE_OPERAND (tmp1
, 0);
10275 /* The only case we can still associate with two variables
10276 is if they are the same, modulo negation. */
10277 if (!operand_equal_p (tmp0
, tmp1
, 0))
10281 if (ok
&& lit0
&& lit1
)
10283 tree tmp0
= fold_convert (type
, lit0
);
10284 tree tmp1
= fold_convert (type
, lit1
);
10286 if (!TREE_OVERFLOW (tmp0
) && !TREE_OVERFLOW (tmp1
)
10287 && TREE_OVERFLOW (fold_build2 (code
, type
, tmp0
, tmp1
)))
10292 /* Only do something if we found more than two objects. Otherwise,
10293 nothing has changed and we risk infinite recursion. */
10295 && (2 < ((var0
!= 0) + (var1
!= 0)
10296 + (con0
!= 0) + (con1
!= 0)
10297 + (lit0
!= 0) + (lit1
!= 0)
10298 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10300 var0
= associate_trees (loc
, var0
, var1
, code
, type
);
10301 con0
= associate_trees (loc
, con0
, con1
, code
, type
);
10302 lit0
= associate_trees (loc
, lit0
, lit1
, code
, type
);
10303 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
, code
, type
);
10305 /* Preserve the MINUS_EXPR if the negative part of the literal is
10306 greater than the positive part. Otherwise, the multiplicative
10307 folding code (i.e extract_muldiv) may be fooled in case
10308 unsigned constants are subtracted, like in the following
10309 example: ((X*2 + 4) - 8U)/2. */
10310 if (minus_lit0
&& lit0
)
10312 if (TREE_CODE (lit0
) == INTEGER_CST
10313 && TREE_CODE (minus_lit0
) == INTEGER_CST
10314 && tree_int_cst_lt (lit0
, minus_lit0
))
10316 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10322 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10331 fold_convert_loc (loc
, type
,
10332 associate_trees (loc
, var0
, minus_lit0
,
10333 MINUS_EXPR
, type
));
10336 con0
= associate_trees (loc
, con0
, minus_lit0
,
10339 fold_convert_loc (loc
, type
,
10340 associate_trees (loc
, var0
, con0
,
10345 con0
= associate_trees (loc
, con0
, lit0
, code
, type
);
10347 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10355 /* Pointer simplifications for subtraction, simple reassociations. */
10356 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10358 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10359 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10360 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10362 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10363 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10364 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10365 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10366 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10367 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10369 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10372 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10373 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10375 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10376 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10377 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10378 fold_convert_loc (loc
, type
, arg1
));
10380 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10383 /* A - (-B) -> A + B */
10384 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10385 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10386 fold_convert_loc (loc
, type
,
10387 TREE_OPERAND (arg1
, 0)));
10388 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10389 if (TREE_CODE (arg0
) == NEGATE_EXPR
10390 && (FLOAT_TYPE_P (type
)
10391 || INTEGRAL_TYPE_P (type
))
10392 && negate_expr_p (arg1
)
10393 && reorder_operands_p (arg0
, arg1
))
10394 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10395 fold_convert_loc (loc
, type
,
10396 negate_expr (arg1
)),
10397 fold_convert_loc (loc
, type
,
10398 TREE_OPERAND (arg0
, 0)));
10399 /* Convert -A - 1 to ~A. */
10400 if (INTEGRAL_TYPE_P (type
)
10401 && TREE_CODE (arg0
) == NEGATE_EXPR
10402 && integer_onep (arg1
)
10403 && !TYPE_OVERFLOW_TRAPS (type
))
10404 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10405 fold_convert_loc (loc
, type
,
10406 TREE_OPERAND (arg0
, 0)));
10408 /* Convert -1 - A to ~A. */
10409 if (INTEGRAL_TYPE_P (type
)
10410 && integer_all_onesp (arg0
))
10411 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10414 /* X - (X / CST) * CST is X % CST. */
10415 if (INTEGRAL_TYPE_P (type
)
10416 && TREE_CODE (arg1
) == MULT_EXPR
10417 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10418 && operand_equal_p (arg0
,
10419 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10420 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10421 TREE_OPERAND (arg1
, 1), 0))
10423 fold_convert_loc (loc
, type
,
10424 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10425 arg0
, TREE_OPERAND (arg1
, 1)));
10427 if (! FLOAT_TYPE_P (type
))
10429 if (integer_zerop (arg0
))
10430 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10431 if (integer_zerop (arg1
))
10432 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10434 /* Fold A - (A & B) into ~B & A. */
10435 if (!TREE_SIDE_EFFECTS (arg0
)
10436 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10438 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10440 tree arg10
= fold_convert_loc (loc
, type
,
10441 TREE_OPERAND (arg1
, 0));
10442 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10443 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10445 fold_convert_loc (loc
, type
, arg0
));
10447 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10449 tree arg11
= fold_convert_loc (loc
,
10450 type
, TREE_OPERAND (arg1
, 1));
10451 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10452 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10454 fold_convert_loc (loc
, type
, arg0
));
10458 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10459 any power of 2 minus 1. */
10460 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10461 && TREE_CODE (arg1
) == BIT_AND_EXPR
10462 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10463 TREE_OPERAND (arg1
, 0), 0))
10465 tree mask0
= TREE_OPERAND (arg0
, 1);
10466 tree mask1
= TREE_OPERAND (arg1
, 1);
10467 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10469 if (operand_equal_p (tem
, mask1
, 0))
10471 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10472 TREE_OPERAND (arg0
, 0), mask1
);
10473 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10478 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10479 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10480 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10482 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10483 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10484 (-ARG1 + ARG0) reduces to -ARG1. */
10485 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10486 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10488 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10489 __complex__ ( x, -y ). This is not the same for SNaNs or if
10490 signed zeros are involved. */
10491 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10492 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10493 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10495 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10496 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10497 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10498 bool arg0rz
= false, arg0iz
= false;
10499 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10500 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10502 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10503 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10504 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10506 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10508 : build1 (REALPART_EXPR
, rtype
, arg1
));
10509 tree ip
= arg0i
? arg0i
10510 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10511 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10513 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10515 tree rp
= arg0r
? arg0r
10516 : build1 (REALPART_EXPR
, rtype
, arg0
);
10517 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10519 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10520 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10525 /* Fold &x - &x. This can happen from &x.foo - &x.
10526 This is unsafe for certain floats even in non-IEEE formats.
10527 In IEEE, it is unsafe because it does wrong for NaNs.
10528 Also note that operand_equal_p is always false if an operand
10531 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10532 && operand_equal_p (arg0
, arg1
, 0))
10533 return build_zero_cst (type
);
10535 /* A - B -> A + (-B) if B is easily negatable. */
10536 if (negate_expr_p (arg1
)
10537 && ((FLOAT_TYPE_P (type
)
10538 /* Avoid this transformation if B is a positive REAL_CST. */
10539 && (TREE_CODE (arg1
) != REAL_CST
10540 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10541 || INTEGRAL_TYPE_P (type
)))
10542 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10543 fold_convert_loc (loc
, type
, arg0
),
10544 fold_convert_loc (loc
, type
,
10545 negate_expr (arg1
)));
10547 /* Try folding difference of addresses. */
10549 HOST_WIDE_INT diff
;
10551 if ((TREE_CODE (arg0
) == ADDR_EXPR
10552 || TREE_CODE (arg1
) == ADDR_EXPR
)
10553 && ptr_difference_const (arg0
, arg1
, &diff
))
10554 return build_int_cst_type (type
, diff
);
10557 /* Fold &a[i] - &a[j] to i-j. */
10558 if (TREE_CODE (arg0
) == ADDR_EXPR
10559 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10560 && TREE_CODE (arg1
) == ADDR_EXPR
10561 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10563 tree aref0
= TREE_OPERAND (arg0
, 0);
10564 tree aref1
= TREE_OPERAND (arg1
, 0);
10565 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
10566 TREE_OPERAND (aref1
, 0), 0))
10568 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
10569 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
10570 tree esz
= array_ref_element_size (aref0
);
10571 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
10572 return fold_build2_loc (loc
, MULT_EXPR
, type
, diff
,
10573 fold_convert_loc (loc
, type
, esz
));
10578 if (FLOAT_TYPE_P (type
)
10579 && flag_unsafe_math_optimizations
10580 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10581 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10582 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10585 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10586 same or one. Make sure type is not saturating.
10587 fold_plusminus_mult_expr will re-associate. */
10588 if ((TREE_CODE (arg0
) == MULT_EXPR
10589 || TREE_CODE (arg1
) == MULT_EXPR
)
10590 && !TYPE_SATURATING (type
)
10591 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10593 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10601 /* (-A) * (-B) -> A * B */
10602 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10603 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10604 fold_convert_loc (loc
, type
,
10605 TREE_OPERAND (arg0
, 0)),
10606 fold_convert_loc (loc
, type
,
10607 negate_expr (arg1
)));
10608 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10609 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10610 fold_convert_loc (loc
, type
,
10611 negate_expr (arg0
)),
10612 fold_convert_loc (loc
, type
,
10613 TREE_OPERAND (arg1
, 0)));
10615 if (! FLOAT_TYPE_P (type
))
10617 if (integer_zerop (arg1
))
10618 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10619 if (integer_onep (arg1
))
10620 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10621 /* Transform x * -1 into -x. Make sure to do the negation
10622 on the original operand with conversions not stripped
10623 because we can only strip non-sign-changing conversions. */
10624 if (integer_all_onesp (arg1
))
10625 return fold_convert_loc (loc
, type
, negate_expr (op0
));
10626 /* Transform x * -C into -x * C if x is easily negatable. */
10627 if (TREE_CODE (arg1
) == INTEGER_CST
10628 && tree_int_cst_sgn (arg1
) == -1
10629 && negate_expr_p (arg0
)
10630 && (tem
= negate_expr (arg1
)) != arg1
10631 && !TREE_OVERFLOW (tem
))
10632 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10633 fold_convert_loc (loc
, type
,
10634 negate_expr (arg0
)),
10637 /* (a * (1 << b)) is (a << b) */
10638 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10639 && integer_onep (TREE_OPERAND (arg1
, 0)))
10640 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10641 TREE_OPERAND (arg1
, 1));
10642 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10643 && integer_onep (TREE_OPERAND (arg0
, 0)))
10644 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10645 TREE_OPERAND (arg0
, 1));
10647 /* (A + A) * C -> A * 2 * C */
10648 if (TREE_CODE (arg0
) == PLUS_EXPR
10649 && TREE_CODE (arg1
) == INTEGER_CST
10650 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10651 TREE_OPERAND (arg0
, 1), 0))
10652 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10653 omit_one_operand_loc (loc
, type
,
10654 TREE_OPERAND (arg0
, 0),
10655 TREE_OPERAND (arg0
, 1)),
10656 fold_build2_loc (loc
, MULT_EXPR
, type
,
10657 build_int_cst (type
, 2) , arg1
));
10659 strict_overflow_p
= false;
10660 if (TREE_CODE (arg1
) == INTEGER_CST
10661 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10662 &strict_overflow_p
)))
10664 if (strict_overflow_p
)
10665 fold_overflow_warning (("assuming signed overflow does not "
10666 "occur when simplifying "
10668 WARN_STRICT_OVERFLOW_MISC
);
10669 return fold_convert_loc (loc
, type
, tem
);
10672 /* Optimize z * conj(z) for integer complex numbers. */
10673 if (TREE_CODE (arg0
) == CONJ_EXPR
10674 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10675 return fold_mult_zconjz (loc
, type
, arg1
);
10676 if (TREE_CODE (arg1
) == CONJ_EXPR
10677 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10678 return fold_mult_zconjz (loc
, type
, arg0
);
10682 /* Maybe fold x * 0 to 0. The expressions aren't the same
10683 when x is NaN, since x * 0 is also NaN. Nor are they the
10684 same in modes with signed zeros, since multiplying a
10685 negative value by 0 gives -0, not +0. */
10686 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10687 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10688 && real_zerop (arg1
))
10689 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10690 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10691 Likewise for complex arithmetic with signed zeros. */
10692 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10693 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10694 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10695 && real_onep (arg1
))
10696 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10698 /* Transform x * -1.0 into -x. */
10699 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10700 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10701 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10702 && real_minus_onep (arg1
))
10703 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
10705 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10706 the result for floating point types due to rounding so it is applied
10707 only if -fassociative-math was specify. */
10708 if (flag_associative_math
10709 && TREE_CODE (arg0
) == RDIV_EXPR
10710 && TREE_CODE (arg1
) == REAL_CST
10711 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10713 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10716 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10717 TREE_OPERAND (arg0
, 1));
10720 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10721 if (operand_equal_p (arg0
, arg1
, 0))
10723 tree tem
= fold_strip_sign_ops (arg0
);
10724 if (tem
!= NULL_TREE
)
10726 tem
= fold_convert_loc (loc
, type
, tem
);
10727 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10731 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10732 This is not the same for NaNs or if signed zeros are
10734 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10735 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10736 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10737 && TREE_CODE (arg1
) == COMPLEX_CST
10738 && real_zerop (TREE_REALPART (arg1
)))
10740 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10741 if (real_onep (TREE_IMAGPART (arg1
)))
10743 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10744 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10746 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10747 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10749 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10750 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10751 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10755 /* Optimize z * conj(z) for floating point complex numbers.
10756 Guarded by flag_unsafe_math_optimizations as non-finite
10757 imaginary components don't produce scalar results. */
10758 if (flag_unsafe_math_optimizations
10759 && TREE_CODE (arg0
) == CONJ_EXPR
10760 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10761 return fold_mult_zconjz (loc
, type
, arg1
);
10762 if (flag_unsafe_math_optimizations
10763 && TREE_CODE (arg1
) == CONJ_EXPR
10764 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10765 return fold_mult_zconjz (loc
, type
, arg0
);
10767 if (flag_unsafe_math_optimizations
)
10769 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10770 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10772 /* Optimizations of root(...)*root(...). */
10773 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10776 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10777 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10779 /* Optimize sqrt(x)*sqrt(x) as x. */
10780 if (BUILTIN_SQRT_P (fcode0
)
10781 && operand_equal_p (arg00
, arg10
, 0)
10782 && ! HONOR_SNANS (TYPE_MODE (type
)))
10785 /* Optimize root(x)*root(y) as root(x*y). */
10786 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10787 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10788 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10791 /* Optimize expN(x)*expN(y) as expN(x+y). */
10792 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10794 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10795 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10796 CALL_EXPR_ARG (arg0
, 0),
10797 CALL_EXPR_ARG (arg1
, 0));
10798 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10801 /* Optimizations of pow(...)*pow(...). */
10802 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10803 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10804 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10806 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10807 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10808 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10809 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10811 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10812 if (operand_equal_p (arg01
, arg11
, 0))
10814 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10815 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10817 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10820 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10821 if (operand_equal_p (arg00
, arg10
, 0))
10823 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10824 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10826 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10830 /* Optimize tan(x)*cos(x) as sin(x). */
10831 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10832 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10833 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10834 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10835 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10836 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10837 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10838 CALL_EXPR_ARG (arg1
, 0), 0))
10840 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10842 if (sinfn
!= NULL_TREE
)
10843 return build_call_expr_loc (loc
, sinfn
, 1,
10844 CALL_EXPR_ARG (arg0
, 0));
10847 /* Optimize x*pow(x,c) as pow(x,c+1). */
10848 if (fcode1
== BUILT_IN_POW
10849 || fcode1
== BUILT_IN_POWF
10850 || fcode1
== BUILT_IN_POWL
)
10852 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10853 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10854 if (TREE_CODE (arg11
) == REAL_CST
10855 && !TREE_OVERFLOW (arg11
)
10856 && operand_equal_p (arg0
, arg10
, 0))
10858 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10862 c
= TREE_REAL_CST (arg11
);
10863 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10864 arg
= build_real (type
, c
);
10865 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10869 /* Optimize pow(x,c)*x as pow(x,c+1). */
10870 if (fcode0
== BUILT_IN_POW
10871 || fcode0
== BUILT_IN_POWF
10872 || fcode0
== BUILT_IN_POWL
)
10874 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10875 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10876 if (TREE_CODE (arg01
) == REAL_CST
10877 && !TREE_OVERFLOW (arg01
)
10878 && operand_equal_p (arg1
, arg00
, 0))
10880 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10884 c
= TREE_REAL_CST (arg01
);
10885 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10886 arg
= build_real (type
, c
);
10887 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10891 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10892 if (!in_gimple_form
10894 && operand_equal_p (arg0
, arg1
, 0))
10896 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10900 tree arg
= build_real (type
, dconst2
);
10901 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10910 if (integer_all_onesp (arg1
))
10911 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10912 if (integer_zerop (arg1
))
10913 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10914 if (operand_equal_p (arg0
, arg1
, 0))
10915 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10917 /* ~X | X is -1. */
10918 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10919 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10921 t1
= build_zero_cst (type
);
10922 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10923 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10926 /* X | ~X is -1. */
10927 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10928 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10930 t1
= build_zero_cst (type
);
10931 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10932 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10935 /* Canonicalize (X & C1) | C2. */
10936 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10937 && TREE_CODE (arg1
) == INTEGER_CST
10938 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10940 unsigned HOST_WIDE_INT hi1
, lo1
, hi2
, lo2
, hi3
, lo3
, mlo
, mhi
;
10941 int width
= TYPE_PRECISION (type
), w
;
10942 hi1
= TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1));
10943 lo1
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
10944 hi2
= TREE_INT_CST_HIGH (arg1
);
10945 lo2
= TREE_INT_CST_LOW (arg1
);
10947 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10948 if ((hi1
& hi2
) == hi1
&& (lo1
& lo2
) == lo1
)
10949 return omit_one_operand_loc (loc
, type
, arg1
,
10950 TREE_OPERAND (arg0
, 0));
10952 if (width
> HOST_BITS_PER_WIDE_INT
)
10954 mhi
= (unsigned HOST_WIDE_INT
) -1
10955 >> (2 * HOST_BITS_PER_WIDE_INT
- width
);
10961 mlo
= (unsigned HOST_WIDE_INT
) -1
10962 >> (HOST_BITS_PER_WIDE_INT
- width
);
10965 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10966 if ((~(hi1
| hi2
) & mhi
) == 0 && (~(lo1
| lo2
) & mlo
) == 0)
10967 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10968 TREE_OPERAND (arg0
, 0), arg1
);
10970 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10971 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10972 mode which allows further optimizations. */
10979 for (w
= BITS_PER_UNIT
;
10980 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
10983 unsigned HOST_WIDE_INT mask
10984 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
10985 if (((lo1
| lo2
) & mask
) == mask
10986 && (lo1
& ~mask
) == 0 && hi1
== 0)
10993 if (hi3
!= hi1
|| lo3
!= lo1
)
10994 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10995 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10996 TREE_OPERAND (arg0
, 0),
10997 build_int_cst_wide (type
,
11002 /* (X & Y) | Y is (X, Y). */
11003 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11004 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11005 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11006 /* (X & Y) | X is (Y, X). */
11007 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11008 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11009 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11010 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11011 /* X | (X & Y) is (Y, X). */
11012 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11013 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11014 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11015 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11016 /* X | (Y & X) is (Y, X). */
11017 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11018 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11019 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11020 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11022 /* (X & ~Y) | (~X & Y) is X ^ Y */
11023 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11024 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
11026 tree a0
, a1
, l0
, l1
, n0
, n1
;
11028 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11029 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11031 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11032 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11034 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
11035 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
11037 if ((operand_equal_p (n0
, a0
, 0)
11038 && operand_equal_p (n1
, a1
, 0))
11039 || (operand_equal_p (n0
, a1
, 0)
11040 && operand_equal_p (n1
, a0
, 0)))
11041 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11044 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11045 if (t1
!= NULL_TREE
)
11048 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11050 This results in more efficient code for machines without a NAND
11051 instruction. Combine will canonicalize to the first form
11052 which will allow use of NAND instructions provided by the
11053 backend if they exist. */
11054 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11055 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11058 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11059 build2 (BIT_AND_EXPR
, type
,
11060 fold_convert_loc (loc
, type
,
11061 TREE_OPERAND (arg0
, 0)),
11062 fold_convert_loc (loc
, type
,
11063 TREE_OPERAND (arg1
, 0))));
11066 /* See if this can be simplified into a rotate first. If that
11067 is unsuccessful continue in the association code. */
11071 if (integer_zerop (arg1
))
11072 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11073 if (integer_all_onesp (arg1
))
11074 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11075 if (operand_equal_p (arg0
, arg1
, 0))
11076 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11078 /* ~X ^ X is -1. */
11079 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11080 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11082 t1
= build_zero_cst (type
);
11083 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11084 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11087 /* X ^ ~X is -1. */
11088 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11089 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11091 t1
= build_zero_cst (type
);
11092 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11093 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11096 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11097 with a constant, and the two constants have no bits in common,
11098 we should treat this as a BIT_IOR_EXPR since this may produce more
11099 simplifications. */
11100 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11101 && TREE_CODE (arg1
) == BIT_AND_EXPR
11102 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11103 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11104 && integer_zerop (const_binop (BIT_AND_EXPR
,
11105 TREE_OPERAND (arg0
, 1),
11106 TREE_OPERAND (arg1
, 1))))
11108 code
= BIT_IOR_EXPR
;
11112 /* (X | Y) ^ X -> Y & ~ X*/
11113 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11114 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11116 tree t2
= TREE_OPERAND (arg0
, 1);
11117 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11119 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11120 fold_convert_loc (loc
, type
, t2
),
11121 fold_convert_loc (loc
, type
, t1
));
11125 /* (Y | X) ^ X -> Y & ~ X*/
11126 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11127 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11129 tree t2
= TREE_OPERAND (arg0
, 0);
11130 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11132 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11133 fold_convert_loc (loc
, type
, t2
),
11134 fold_convert_loc (loc
, type
, t1
));
11138 /* X ^ (X | Y) -> Y & ~ X*/
11139 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11140 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11142 tree t2
= TREE_OPERAND (arg1
, 1);
11143 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11145 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11146 fold_convert_loc (loc
, type
, t2
),
11147 fold_convert_loc (loc
, type
, t1
));
11151 /* X ^ (Y | X) -> Y & ~ X*/
11152 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11153 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11155 tree t2
= TREE_OPERAND (arg1
, 0);
11156 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11158 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11159 fold_convert_loc (loc
, type
, t2
),
11160 fold_convert_loc (loc
, type
, t1
));
11164 /* Convert ~X ^ ~Y to X ^ Y. */
11165 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11166 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11167 return fold_build2_loc (loc
, code
, type
,
11168 fold_convert_loc (loc
, type
,
11169 TREE_OPERAND (arg0
, 0)),
11170 fold_convert_loc (loc
, type
,
11171 TREE_OPERAND (arg1
, 0)));
11173 /* Convert ~X ^ C to X ^ ~C. */
11174 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11175 && TREE_CODE (arg1
) == INTEGER_CST
)
11176 return fold_build2_loc (loc
, code
, type
,
11177 fold_convert_loc (loc
, type
,
11178 TREE_OPERAND (arg0
, 0)),
11179 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11181 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11182 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11183 && integer_onep (TREE_OPERAND (arg0
, 1))
11184 && integer_onep (arg1
))
11185 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11186 build_int_cst (TREE_TYPE (arg0
), 0));
11188 /* Fold (X & Y) ^ Y as ~X & Y. */
11189 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11190 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11192 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11193 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11194 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11195 fold_convert_loc (loc
, type
, arg1
));
11197 /* Fold (X & Y) ^ X as ~Y & X. */
11198 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11199 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11200 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11202 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11203 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11204 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11205 fold_convert_loc (loc
, type
, arg1
));
11207 /* Fold X ^ (X & Y) as X & ~Y. */
11208 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11209 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11211 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11212 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11213 fold_convert_loc (loc
, type
, arg0
),
11214 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11216 /* Fold X ^ (Y & X) as ~Y & X. */
11217 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11218 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11219 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11221 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11222 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11223 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11224 fold_convert_loc (loc
, type
, arg0
));
11227 /* See if this can be simplified into a rotate first. If that
11228 is unsuccessful continue in the association code. */
11232 if (integer_all_onesp (arg1
))
11233 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11234 if (integer_zerop (arg1
))
11235 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11236 if (operand_equal_p (arg0
, arg1
, 0))
11237 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11239 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11240 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11241 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11242 || (TREE_CODE (arg0
) == EQ_EXPR
11243 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11244 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11245 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11247 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11248 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11249 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11250 || (TREE_CODE (arg1
) == EQ_EXPR
11251 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11252 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11253 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11255 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11256 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11257 && TREE_CODE (arg1
) == INTEGER_CST
11258 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11260 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11261 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11262 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11263 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11264 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11266 fold_convert_loc (loc
, type
,
11267 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11268 type
, tmp2
, tmp3
));
11271 /* (X | Y) & Y is (X, Y). */
11272 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11273 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11274 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11275 /* (X | Y) & X is (Y, X). */
11276 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11277 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11278 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11279 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11280 /* X & (X | Y) is (Y, X). */
11281 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11282 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11283 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11284 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11285 /* X & (Y | X) is (Y, X). */
11286 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11287 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11288 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11289 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11291 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11292 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11293 && integer_onep (TREE_OPERAND (arg0
, 1))
11294 && integer_onep (arg1
))
11296 tem
= TREE_OPERAND (arg0
, 0);
11297 return fold_build2_loc (loc
, EQ_EXPR
, type
,
11298 fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
11299 build_int_cst (TREE_TYPE (tem
), 1)),
11300 build_int_cst (TREE_TYPE (tem
), 0));
11302 /* Fold ~X & 1 as (X & 1) == 0. */
11303 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11304 && integer_onep (arg1
))
11306 tem
= TREE_OPERAND (arg0
, 0);
11307 return fold_build2_loc (loc
, EQ_EXPR
, type
,
11308 fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
11309 build_int_cst (TREE_TYPE (tem
), 1)),
11310 build_int_cst (TREE_TYPE (tem
), 0));
11312 /* Fold !X & 1 as X == 0. */
11313 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11314 && integer_onep (arg1
))
11316 tem
= TREE_OPERAND (arg0
, 0);
11317 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11318 build_int_cst (TREE_TYPE (tem
), 0));
11321 /* Fold (X ^ Y) & Y as ~X & Y. */
11322 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11323 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11325 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11326 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11327 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11328 fold_convert_loc (loc
, type
, arg1
));
11330 /* Fold (X ^ Y) & X as ~Y & X. */
11331 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11332 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11333 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11335 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11336 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11337 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11338 fold_convert_loc (loc
, type
, arg1
));
11340 /* Fold X & (X ^ Y) as X & ~Y. */
11341 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11342 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11344 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11345 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11346 fold_convert_loc (loc
, type
, arg0
),
11347 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11349 /* Fold X & (Y ^ X) as ~Y & X. */
11350 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11351 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11352 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11354 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11355 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11356 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11357 fold_convert_loc (loc
, type
, arg0
));
11360 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11361 ((A & N) + B) & M -> (A + B) & M
11362 Similarly if (N & M) == 0,
11363 ((A | N) + B) & M -> (A + B) & M
11364 and for - instead of + (or unary - instead of +)
11365 and/or ^ instead of |.
11366 If B is constant and (B & M) == 0, fold into A & M. */
11367 if (host_integerp (arg1
, 1))
11369 unsigned HOST_WIDE_INT cst1
= tree_low_cst (arg1
, 1);
11370 if (~cst1
&& (cst1
& (cst1
+ 1)) == 0
11371 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11372 && (TREE_CODE (arg0
) == PLUS_EXPR
11373 || TREE_CODE (arg0
) == MINUS_EXPR
11374 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11375 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11376 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11380 unsigned HOST_WIDE_INT cst0
;
11382 /* Now we know that arg0 is (C + D) or (C - D) or
11383 -C and arg1 (M) is == (1LL << cst) - 1.
11384 Store C into PMOP[0] and D into PMOP[1]. */
11385 pmop
[0] = TREE_OPERAND (arg0
, 0);
11387 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11389 pmop
[1] = TREE_OPERAND (arg0
, 1);
11393 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11394 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11398 for (; which
>= 0; which
--)
11399 switch (TREE_CODE (pmop
[which
]))
11404 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11407 /* tree_low_cst not used, because we don't care about
11409 cst0
= TREE_INT_CST_LOW (TREE_OPERAND (pmop
[which
], 1));
11411 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11416 else if (cst0
!= 0)
11418 /* If C or D is of the form (A & N) where
11419 (N & M) == M, or of the form (A | N) or
11420 (A ^ N) where (N & M) == 0, replace it with A. */
11421 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11424 /* If C or D is a N where (N & M) == 0, it can be
11425 omitted (assumed 0). */
11426 if ((TREE_CODE (arg0
) == PLUS_EXPR
11427 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11428 && (TREE_INT_CST_LOW (pmop
[which
]) & cst1
) == 0)
11429 pmop
[which
] = NULL
;
11435 /* Only build anything new if we optimized one or both arguments
11437 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11438 || (TREE_CODE (arg0
) != NEGATE_EXPR
11439 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11441 tree utype
= TREE_TYPE (arg0
);
11442 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11444 /* Perform the operations in a type that has defined
11445 overflow behavior. */
11446 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11447 if (pmop
[0] != NULL
)
11448 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11449 if (pmop
[1] != NULL
)
11450 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11453 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11454 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11455 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11457 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11458 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11460 else if (pmop
[0] != NULL
)
11462 else if (pmop
[1] != NULL
)
11465 return build_int_cst (type
, 0);
11467 else if (pmop
[0] == NULL
)
11468 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11470 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11472 /* TEM is now the new binary +, - or unary - replacement. */
11473 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11474 fold_convert_loc (loc
, utype
, arg1
));
11475 return fold_convert_loc (loc
, type
, tem
);
11480 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11481 if (t1
!= NULL_TREE
)
11483 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11484 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11485 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11488 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11490 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
11491 && (~TREE_INT_CST_LOW (arg1
)
11492 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
11494 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11497 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11499 This results in more efficient code for machines without a NOR
11500 instruction. Combine will canonicalize to the first form
11501 which will allow use of NOR instructions provided by the
11502 backend if they exist. */
11503 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11504 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11506 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11507 build2 (BIT_IOR_EXPR
, type
,
11508 fold_convert_loc (loc
, type
,
11509 TREE_OPERAND (arg0
, 0)),
11510 fold_convert_loc (loc
, type
,
11511 TREE_OPERAND (arg1
, 0))));
11514 /* If arg0 is derived from the address of an object or function, we may
11515 be able to fold this expression using the object or function's
11517 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
11519 unsigned HOST_WIDE_INT modulus
, residue
;
11520 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
11522 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11523 integer_onep (arg1
));
11525 /* This works because modulus is a power of 2. If this weren't the
11526 case, we'd have to replace it by its greatest power-of-2
11527 divisor: modulus & -modulus. */
11529 return build_int_cst (type
, residue
& low
);
11532 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11533 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11534 if the new mask might be further optimized. */
11535 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11536 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11537 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
11538 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
11539 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
11540 < TYPE_PRECISION (TREE_TYPE (arg0
))
11541 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11542 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
11544 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
11545 unsigned HOST_WIDE_INT mask
11546 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11547 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11548 tree shift_type
= TREE_TYPE (arg0
);
11550 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11551 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11552 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11553 && TYPE_PRECISION (TREE_TYPE (arg0
))
11554 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
11556 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11557 tree arg00
= TREE_OPERAND (arg0
, 0);
11558 /* See if more bits can be proven as zero because of
11560 if (TREE_CODE (arg00
) == NOP_EXPR
11561 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11563 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11564 if (TYPE_PRECISION (inner_type
)
11565 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
11566 && TYPE_PRECISION (inner_type
) < prec
)
11568 prec
= TYPE_PRECISION (inner_type
);
11569 /* See if we can shorten the right shift. */
11571 shift_type
= inner_type
;
11574 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11575 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11576 zerobits
<<= prec
- shiftc
;
11577 /* For arithmetic shift if sign bit could be set, zerobits
11578 can contain actually sign bits, so no transformation is
11579 possible, unless MASK masks them all away. In that
11580 case the shift needs to be converted into logical shift. */
11581 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11582 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11584 if ((mask
& zerobits
) == 0)
11585 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11591 /* ((X << 16) & 0xff00) is (X, 0). */
11592 if ((mask
& zerobits
) == mask
)
11593 return omit_one_operand_loc (loc
, type
,
11594 build_int_cst (type
, 0), arg0
);
11596 newmask
= mask
| zerobits
;
11597 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11601 /* Only do the transformation if NEWMASK is some integer
11603 for (prec
= BITS_PER_UNIT
;
11604 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11605 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11607 if (prec
< HOST_BITS_PER_WIDE_INT
11608 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11612 if (shift_type
!= TREE_TYPE (arg0
))
11614 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11615 fold_convert_loc (loc
, shift_type
,
11616 TREE_OPERAND (arg0
, 0)),
11617 TREE_OPERAND (arg0
, 1));
11618 tem
= fold_convert_loc (loc
, type
, tem
);
11622 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11623 if (!tree_int_cst_equal (newmaskt
, arg1
))
11624 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11632 /* Don't touch a floating-point divide by zero unless the mode
11633 of the constant can represent infinity. */
11634 if (TREE_CODE (arg1
) == REAL_CST
11635 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11636 && real_zerop (arg1
))
11639 /* Optimize A / A to 1.0 if we don't care about
11640 NaNs or Infinities. Skip the transformation
11641 for non-real operands. */
11642 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11643 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11644 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11645 && operand_equal_p (arg0
, arg1
, 0))
11647 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11649 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11652 /* The complex version of the above A / A optimization. */
11653 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11654 && operand_equal_p (arg0
, arg1
, 0))
11656 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11657 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11658 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11660 tree r
= build_real (elem_type
, dconst1
);
11661 /* omit_two_operands will call fold_convert for us. */
11662 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11666 /* (-A) / (-B) -> A / B */
11667 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11668 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11669 TREE_OPERAND (arg0
, 0),
11670 negate_expr (arg1
));
11671 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11672 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11673 negate_expr (arg0
),
11674 TREE_OPERAND (arg1
, 0));
11676 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11677 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11678 && real_onep (arg1
))
11679 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11681 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11682 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11683 && real_minus_onep (arg1
))
11684 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
11685 negate_expr (arg0
)));
11687 /* If ARG1 is a constant, we can convert this to a multiply by the
11688 reciprocal. This does not have the same rounding properties,
11689 so only do this if -freciprocal-math. We can actually
11690 always safely do it if ARG1 is a power of two, but it's hard to
11691 tell if it is or not in a portable manner. */
11692 if (TREE_CODE (arg1
) == REAL_CST
)
11694 if (flag_reciprocal_math
11695 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
11697 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
11698 /* Find the reciprocal if optimizing and the result is exact. */
11702 r
= TREE_REAL_CST (arg1
);
11703 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
11705 tem
= build_real (type
, r
);
11706 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11707 fold_convert_loc (loc
, type
, arg0
), tem
);
11711 /* Convert A/B/C to A/(B*C). */
11712 if (flag_reciprocal_math
11713 && TREE_CODE (arg0
) == RDIV_EXPR
)
11714 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11715 fold_build2_loc (loc
, MULT_EXPR
, type
,
11716 TREE_OPERAND (arg0
, 1), arg1
));
11718 /* Convert A/(B/C) to (A/B)*C. */
11719 if (flag_reciprocal_math
11720 && TREE_CODE (arg1
) == RDIV_EXPR
)
11721 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11722 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11723 TREE_OPERAND (arg1
, 0)),
11724 TREE_OPERAND (arg1
, 1));
11726 /* Convert C1/(X*C2) into (C1/C2)/X. */
11727 if (flag_reciprocal_math
11728 && TREE_CODE (arg1
) == MULT_EXPR
11729 && TREE_CODE (arg0
) == REAL_CST
11730 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11732 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11733 TREE_OPERAND (arg1
, 1));
11735 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11736 TREE_OPERAND (arg1
, 0));
11739 if (flag_unsafe_math_optimizations
)
11741 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11742 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11744 /* Optimize sin(x)/cos(x) as tan(x). */
11745 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11746 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11747 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11748 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11749 CALL_EXPR_ARG (arg1
, 0), 0))
11751 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11753 if (tanfn
!= NULL_TREE
)
11754 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11757 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11758 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11759 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11760 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11761 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11762 CALL_EXPR_ARG (arg1
, 0), 0))
11764 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11766 if (tanfn
!= NULL_TREE
)
11768 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11769 CALL_EXPR_ARG (arg0
, 0));
11770 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11771 build_real (type
, dconst1
), tmp
);
11775 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11776 NaNs or Infinities. */
11777 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11778 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11779 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11781 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11782 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11784 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11785 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11786 && operand_equal_p (arg00
, arg01
, 0))
11788 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11790 if (cosfn
!= NULL_TREE
)
11791 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11795 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11796 NaNs or Infinities. */
11797 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11798 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11799 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11801 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11802 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11804 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11805 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11806 && operand_equal_p (arg00
, arg01
, 0))
11808 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11810 if (cosfn
!= NULL_TREE
)
11812 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11813 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11814 build_real (type
, dconst1
),
11820 /* Optimize pow(x,c)/x as pow(x,c-1). */
11821 if (fcode0
== BUILT_IN_POW
11822 || fcode0
== BUILT_IN_POWF
11823 || fcode0
== BUILT_IN_POWL
)
11825 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11826 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11827 if (TREE_CODE (arg01
) == REAL_CST
11828 && !TREE_OVERFLOW (arg01
)
11829 && operand_equal_p (arg1
, arg00
, 0))
11831 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11835 c
= TREE_REAL_CST (arg01
);
11836 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11837 arg
= build_real (type
, c
);
11838 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11842 /* Optimize a/root(b/c) into a*root(c/b). */
11843 if (BUILTIN_ROOT_P (fcode1
))
11845 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11847 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11849 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11850 tree b
= TREE_OPERAND (rootarg
, 0);
11851 tree c
= TREE_OPERAND (rootarg
, 1);
11853 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
11855 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
11856 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
11860 /* Optimize x/expN(y) into x*expN(-y). */
11861 if (BUILTIN_EXPONENT_P (fcode1
))
11863 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11864 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11865 arg1
= build_call_expr_loc (loc
,
11867 fold_convert_loc (loc
, type
, arg
));
11868 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11871 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11872 if (fcode1
== BUILT_IN_POW
11873 || fcode1
== BUILT_IN_POWF
11874 || fcode1
== BUILT_IN_POWL
)
11876 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11877 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11878 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11879 tree neg11
= fold_convert_loc (loc
, type
,
11880 negate_expr (arg11
));
11881 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
11882 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11887 case TRUNC_DIV_EXPR
:
11888 /* Optimize (X & (-A)) / A where A is a power of 2,
11890 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11891 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
11892 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
11894 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
11895 arg1
, TREE_OPERAND (arg0
, 1));
11896 if (sum
&& integer_zerop (sum
)) {
11897 unsigned long pow2
;
11899 if (TREE_INT_CST_LOW (arg1
))
11900 pow2
= exact_log2 (TREE_INT_CST_LOW (arg1
));
11902 pow2
= exact_log2 (TREE_INT_CST_HIGH (arg1
))
11903 + HOST_BITS_PER_WIDE_INT
;
11905 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11906 TREE_OPERAND (arg0
, 0),
11907 build_int_cst (integer_type_node
, pow2
));
11913 case FLOOR_DIV_EXPR
:
11914 /* Simplify A / (B << N) where A and B are positive and B is
11915 a power of 2, to A >> (N + log2(B)). */
11916 strict_overflow_p
= false;
11917 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11918 && (TYPE_UNSIGNED (type
)
11919 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11921 tree sval
= TREE_OPERAND (arg1
, 0);
11922 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11924 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11925 unsigned long pow2
;
11927 if (TREE_INT_CST_LOW (sval
))
11928 pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
11930 pow2
= exact_log2 (TREE_INT_CST_HIGH (sval
))
11931 + HOST_BITS_PER_WIDE_INT
;
11933 if (strict_overflow_p
)
11934 fold_overflow_warning (("assuming signed overflow does not "
11935 "occur when simplifying A / (B << N)"),
11936 WARN_STRICT_OVERFLOW_MISC
);
11938 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11940 build_int_cst (TREE_TYPE (sh_cnt
),
11942 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11943 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11947 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11948 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11949 if (INTEGRAL_TYPE_P (type
)
11950 && TYPE_UNSIGNED (type
)
11951 && code
== FLOOR_DIV_EXPR
)
11952 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
11956 case ROUND_DIV_EXPR
:
11957 case CEIL_DIV_EXPR
:
11958 case EXACT_DIV_EXPR
:
11959 if (integer_onep (arg1
))
11960 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11961 if (integer_zerop (arg1
))
11963 /* X / -1 is -X. */
11964 if (!TYPE_UNSIGNED (type
)
11965 && TREE_CODE (arg1
) == INTEGER_CST
11966 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11967 && TREE_INT_CST_HIGH (arg1
) == -1)
11968 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
11970 /* Convert -A / -B to A / B when the type is signed and overflow is
11972 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11973 && TREE_CODE (arg0
) == NEGATE_EXPR
11974 && negate_expr_p (arg1
))
11976 if (INTEGRAL_TYPE_P (type
))
11977 fold_overflow_warning (("assuming signed overflow does not occur "
11978 "when distributing negation across "
11980 WARN_STRICT_OVERFLOW_MISC
);
11981 return fold_build2_loc (loc
, code
, type
,
11982 fold_convert_loc (loc
, type
,
11983 TREE_OPERAND (arg0
, 0)),
11984 fold_convert_loc (loc
, type
,
11985 negate_expr (arg1
)));
11987 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11988 && TREE_CODE (arg1
) == NEGATE_EXPR
11989 && negate_expr_p (arg0
))
11991 if (INTEGRAL_TYPE_P (type
))
11992 fold_overflow_warning (("assuming signed overflow does not occur "
11993 "when distributing negation across "
11995 WARN_STRICT_OVERFLOW_MISC
);
11996 return fold_build2_loc (loc
, code
, type
,
11997 fold_convert_loc (loc
, type
,
11998 negate_expr (arg0
)),
11999 fold_convert_loc (loc
, type
,
12000 TREE_OPERAND (arg1
, 0)));
12003 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12004 operation, EXACT_DIV_EXPR.
12006 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12007 At one time others generated faster code, it's not clear if they do
12008 after the last round to changes to the DIV code in expmed.c. */
12009 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
12010 && multiple_of_p (type
, arg0
, arg1
))
12011 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
12013 strict_overflow_p
= false;
12014 if (TREE_CODE (arg1
) == INTEGER_CST
12015 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12016 &strict_overflow_p
)))
12018 if (strict_overflow_p
)
12019 fold_overflow_warning (("assuming signed overflow does not occur "
12020 "when simplifying division"),
12021 WARN_STRICT_OVERFLOW_MISC
);
12022 return fold_convert_loc (loc
, type
, tem
);
12027 case CEIL_MOD_EXPR
:
12028 case FLOOR_MOD_EXPR
:
12029 case ROUND_MOD_EXPR
:
12030 case TRUNC_MOD_EXPR
:
12031 /* X % 1 is always zero, but be sure to preserve any side
12033 if (integer_onep (arg1
))
12034 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12036 /* X % 0, return X % 0 unchanged so that we can get the
12037 proper warnings and errors. */
12038 if (integer_zerop (arg1
))
12041 /* 0 % X is always zero, but be sure to preserve any side
12042 effects in X. Place this after checking for X == 0. */
12043 if (integer_zerop (arg0
))
12044 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12046 /* X % -1 is zero. */
12047 if (!TYPE_UNSIGNED (type
)
12048 && TREE_CODE (arg1
) == INTEGER_CST
12049 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12050 && TREE_INT_CST_HIGH (arg1
) == -1)
12051 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12053 /* X % -C is the same as X % C. */
12054 if (code
== TRUNC_MOD_EXPR
12055 && !TYPE_UNSIGNED (type
)
12056 && TREE_CODE (arg1
) == INTEGER_CST
12057 && !TREE_OVERFLOW (arg1
)
12058 && TREE_INT_CST_HIGH (arg1
) < 0
12059 && !TYPE_OVERFLOW_TRAPS (type
)
12060 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12061 && !sign_bit_p (arg1
, arg1
))
12062 return fold_build2_loc (loc
, code
, type
,
12063 fold_convert_loc (loc
, type
, arg0
),
12064 fold_convert_loc (loc
, type
,
12065 negate_expr (arg1
)));
12067 /* X % -Y is the same as X % Y. */
12068 if (code
== TRUNC_MOD_EXPR
12069 && !TYPE_UNSIGNED (type
)
12070 && TREE_CODE (arg1
) == NEGATE_EXPR
12071 && !TYPE_OVERFLOW_TRAPS (type
))
12072 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12073 fold_convert_loc (loc
, type
,
12074 TREE_OPERAND (arg1
, 0)));
12076 strict_overflow_p
= false;
12077 if (TREE_CODE (arg1
) == INTEGER_CST
12078 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12079 &strict_overflow_p
)))
12081 if (strict_overflow_p
)
12082 fold_overflow_warning (("assuming signed overflow does not occur "
12083 "when simplifying modulus"),
12084 WARN_STRICT_OVERFLOW_MISC
);
12085 return fold_convert_loc (loc
, type
, tem
);
12088 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12089 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12090 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12091 && (TYPE_UNSIGNED (type
)
12092 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12095 /* Also optimize A % (C << N) where C is a power of 2,
12096 to A & ((C << N) - 1). */
12097 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12098 c
= TREE_OPERAND (arg1
, 0);
12100 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12103 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12104 build_int_cst (TREE_TYPE (arg1
), 1));
12105 if (strict_overflow_p
)
12106 fold_overflow_warning (("assuming signed overflow does not "
12107 "occur when simplifying "
12108 "X % (power of two)"),
12109 WARN_STRICT_OVERFLOW_MISC
);
12110 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12111 fold_convert_loc (loc
, type
, arg0
),
12112 fold_convert_loc (loc
, type
, mask
));
12120 if (integer_all_onesp (arg0
))
12121 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12125 /* Optimize -1 >> x for arithmetic right shifts. */
12126 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12127 && tree_expr_nonnegative_p (arg1
))
12128 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12129 /* ... fall through ... */
12133 if (integer_zerop (arg1
))
12134 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12135 if (integer_zerop (arg0
))
12136 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12138 /* Since negative shift count is not well-defined,
12139 don't try to compute it in the compiler. */
12140 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12143 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12144 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
12145 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12146 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12147 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12149 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
12150 + TREE_INT_CST_LOW (arg1
));
12152 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12153 being well defined. */
12154 if (low
>= TYPE_PRECISION (type
))
12156 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12157 low
= low
% TYPE_PRECISION (type
);
12158 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12159 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 0),
12160 TREE_OPERAND (arg0
, 0));
12162 low
= TYPE_PRECISION (type
) - 1;
12165 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12166 build_int_cst (type
, low
));
12169 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12170 into x & ((unsigned)-1 >> c) for unsigned types. */
12171 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12172 || (TYPE_UNSIGNED (type
)
12173 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12174 && host_integerp (arg1
, false)
12175 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12176 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12177 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12179 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
12180 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
12186 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12188 lshift
= build_int_cst (type
, -1);
12189 lshift
= int_const_binop (code
, lshift
, arg1
);
12191 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12195 /* Rewrite an LROTATE_EXPR by a constant into an
12196 RROTATE_EXPR by a new constant. */
12197 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12199 tree tem
= build_int_cst (TREE_TYPE (arg1
),
12200 TYPE_PRECISION (type
));
12201 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12202 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12205 /* If we have a rotate of a bit operation with the rotate count and
12206 the second operand of the bit operation both constant,
12207 permute the two operations. */
12208 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12209 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12210 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12211 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12212 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12213 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12214 fold_build2_loc (loc
, code
, type
,
12215 TREE_OPERAND (arg0
, 0), arg1
),
12216 fold_build2_loc (loc
, code
, type
,
12217 TREE_OPERAND (arg0
, 1), arg1
));
12219 /* Two consecutive rotates adding up to the precision of the
12220 type can be ignored. */
12221 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12222 && TREE_CODE (arg0
) == RROTATE_EXPR
12223 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12224 && TREE_INT_CST_HIGH (arg1
) == 0
12225 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
12226 && ((TREE_INT_CST_LOW (arg1
)
12227 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
12228 == (unsigned int) TYPE_PRECISION (type
)))
12229 return TREE_OPERAND (arg0
, 0);
12231 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12232 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12233 if the latter can be further optimized. */
12234 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12235 && TREE_CODE (arg0
) == BIT_AND_EXPR
12236 && TREE_CODE (arg1
) == INTEGER_CST
12237 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12239 tree mask
= fold_build2_loc (loc
, code
, type
,
12240 fold_convert_loc (loc
, type
,
12241 TREE_OPERAND (arg0
, 1)),
12243 tree shift
= fold_build2_loc (loc
, code
, type
,
12244 fold_convert_loc (loc
, type
,
12245 TREE_OPERAND (arg0
, 0)),
12247 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12255 if (operand_equal_p (arg0
, arg1
, 0))
12256 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12257 if (INTEGRAL_TYPE_P (type
)
12258 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12259 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12260 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12266 if (operand_equal_p (arg0
, arg1
, 0))
12267 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12268 if (INTEGRAL_TYPE_P (type
)
12269 && TYPE_MAX_VALUE (type
)
12270 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12271 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12272 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12277 case TRUTH_ANDIF_EXPR
:
12278 /* Note that the operands of this must be ints
12279 and their values must be 0 or 1.
12280 ("true" is a fixed value perhaps depending on the language.) */
12281 /* If first arg is constant zero, return it. */
12282 if (integer_zerop (arg0
))
12283 return fold_convert_loc (loc
, type
, arg0
);
12284 case TRUTH_AND_EXPR
:
12285 /* If either arg is constant true, drop it. */
12286 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12287 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12288 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12289 /* Preserve sequence points. */
12290 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12291 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12292 /* If second arg is constant zero, result is zero, but first arg
12293 must be evaluated. */
12294 if (integer_zerop (arg1
))
12295 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12296 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12297 case will be handled here. */
12298 if (integer_zerop (arg0
))
12299 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12301 /* !X && X is always false. */
12302 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12303 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12304 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12305 /* X && !X is always false. */
12306 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12307 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12308 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12310 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12311 means A >= Y && A != MAX, but in this case we know that
12314 if (!TREE_SIDE_EFFECTS (arg0
)
12315 && !TREE_SIDE_EFFECTS (arg1
))
12317 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12318 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12319 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12321 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12322 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12323 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12326 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12332 case TRUTH_ORIF_EXPR
:
12333 /* Note that the operands of this must be ints
12334 and their values must be 0 or true.
12335 ("true" is a fixed value perhaps depending on the language.) */
12336 /* If first arg is constant true, return it. */
12337 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12338 return fold_convert_loc (loc
, type
, arg0
);
12339 case TRUTH_OR_EXPR
:
12340 /* If either arg is constant zero, drop it. */
12341 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12342 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12343 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12344 /* Preserve sequence points. */
12345 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12346 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12347 /* If second arg is constant true, result is true, but we must
12348 evaluate first arg. */
12349 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12350 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12351 /* Likewise for first arg, but note this only occurs here for
12353 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12354 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12356 /* !X || X is always true. */
12357 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12358 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12359 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12360 /* X || !X is always true. */
12361 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12362 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12363 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12365 /* (X && !Y) || (!X && Y) is X ^ Y */
12366 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12367 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12369 tree a0
, a1
, l0
, l1
, n0
, n1
;
12371 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12372 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12374 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12375 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12377 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12378 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12380 if ((operand_equal_p (n0
, a0
, 0)
12381 && operand_equal_p (n1
, a1
, 0))
12382 || (operand_equal_p (n0
, a1
, 0)
12383 && operand_equal_p (n1
, a0
, 0)))
12384 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12387 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12393 case TRUTH_XOR_EXPR
:
12394 /* If the second arg is constant zero, drop it. */
12395 if (integer_zerop (arg1
))
12396 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12397 /* If the second arg is constant true, this is a logical inversion. */
12398 if (integer_onep (arg1
))
12400 /* Only call invert_truthvalue if operand is a truth value. */
12401 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
12402 tem
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
12404 tem
= invert_truthvalue_loc (loc
, arg0
);
12405 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12407 /* Identical arguments cancel to zero. */
12408 if (operand_equal_p (arg0
, arg1
, 0))
12409 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12411 /* !X ^ X is always true. */
12412 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12413 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12414 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12416 /* X ^ !X is always true. */
12417 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12418 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12419 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12428 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12429 if (tem
!= NULL_TREE
)
12432 /* bool_var != 0 becomes bool_var. */
12433 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12434 && code
== NE_EXPR
)
12435 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12437 /* bool_var == 1 becomes bool_var. */
12438 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12439 && code
== EQ_EXPR
)
12440 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12442 /* bool_var != 1 becomes !bool_var. */
12443 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12444 && code
== NE_EXPR
)
12445 return fold_convert_loc (loc
, type
,
12446 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12447 TREE_TYPE (arg0
), arg0
));
12449 /* bool_var == 0 becomes !bool_var. */
12450 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12451 && code
== EQ_EXPR
)
12452 return fold_convert_loc (loc
, type
,
12453 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12454 TREE_TYPE (arg0
), arg0
));
12456 /* !exp != 0 becomes !exp */
12457 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12458 && code
== NE_EXPR
)
12459 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12461 /* If this is an equality comparison of the address of two non-weak,
12462 unaliased symbols neither of which are extern (since we do not
12463 have access to attributes for externs), then we know the result. */
12464 if (TREE_CODE (arg0
) == ADDR_EXPR
12465 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12466 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12467 && ! lookup_attribute ("alias",
12468 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12469 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12470 && TREE_CODE (arg1
) == ADDR_EXPR
12471 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12472 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12473 && ! lookup_attribute ("alias",
12474 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12475 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12477 /* We know that we're looking at the address of two
12478 non-weak, unaliased, static _DECL nodes.
12480 It is both wasteful and incorrect to call operand_equal_p
12481 to compare the two ADDR_EXPR nodes. It is wasteful in that
12482 all we need to do is test pointer equality for the arguments
12483 to the two ADDR_EXPR nodes. It is incorrect to use
12484 operand_equal_p as that function is NOT equivalent to a
12485 C equality test. It can in fact return false for two
12486 objects which would test as equal using the C equality
12488 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12489 return constant_boolean_node (equal
12490 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12494 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12495 a MINUS_EXPR of a constant, we can convert it into a comparison with
12496 a revised constant as long as no overflow occurs. */
12497 if (TREE_CODE (arg1
) == INTEGER_CST
12498 && (TREE_CODE (arg0
) == PLUS_EXPR
12499 || TREE_CODE (arg0
) == MINUS_EXPR
)
12500 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12501 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
12502 ? MINUS_EXPR
: PLUS_EXPR
,
12503 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12505 TREE_OPERAND (arg0
, 1)))
12506 && !TREE_OVERFLOW (tem
))
12507 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12509 /* Similarly for a NEGATE_EXPR. */
12510 if (TREE_CODE (arg0
) == NEGATE_EXPR
12511 && TREE_CODE (arg1
) == INTEGER_CST
12512 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12514 && TREE_CODE (tem
) == INTEGER_CST
12515 && !TREE_OVERFLOW (tem
))
12516 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12518 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12519 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12520 && TREE_CODE (arg1
) == INTEGER_CST
12521 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12522 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12523 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12524 fold_convert_loc (loc
,
12527 TREE_OPERAND (arg0
, 1)));
12529 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12530 if ((TREE_CODE (arg0
) == PLUS_EXPR
12531 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12532 || TREE_CODE (arg0
) == MINUS_EXPR
)
12533 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12536 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12537 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12539 tree val
= TREE_OPERAND (arg0
, 1);
12540 return omit_two_operands_loc (loc
, type
,
12541 fold_build2_loc (loc
, code
, type
,
12543 build_int_cst (TREE_TYPE (val
),
12545 TREE_OPERAND (arg0
, 0), arg1
);
12548 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12549 if (TREE_CODE (arg0
) == MINUS_EXPR
12550 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12551 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12554 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
12556 return omit_two_operands_loc (loc
, type
,
12558 ? boolean_true_node
: boolean_false_node
,
12559 TREE_OPERAND (arg0
, 1), arg1
);
12562 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12563 for !=. Don't do this for ordered comparisons due to overflow. */
12564 if (TREE_CODE (arg0
) == MINUS_EXPR
12565 && integer_zerop (arg1
))
12566 return fold_build2_loc (loc
, code
, type
,
12567 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
12569 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12570 if (TREE_CODE (arg0
) == ABS_EXPR
12571 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12572 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12574 /* If this is an EQ or NE comparison with zero and ARG0 is
12575 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12576 two operations, but the latter can be done in one less insn
12577 on machines that have only two-operand insns or on which a
12578 constant cannot be the first operand. */
12579 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12580 && integer_zerop (arg1
))
12582 tree arg00
= TREE_OPERAND (arg0
, 0);
12583 tree arg01
= TREE_OPERAND (arg0
, 1);
12584 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12585 && integer_onep (TREE_OPERAND (arg00
, 0)))
12587 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12588 arg01
, TREE_OPERAND (arg00
, 1));
12589 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12590 build_int_cst (TREE_TYPE (arg0
), 1));
12591 return fold_build2_loc (loc
, code
, type
,
12592 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12595 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12596 && integer_onep (TREE_OPERAND (arg01
, 0)))
12598 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12599 arg00
, TREE_OPERAND (arg01
, 1));
12600 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12601 build_int_cst (TREE_TYPE (arg0
), 1));
12602 return fold_build2_loc (loc
, code
, type
,
12603 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12608 /* If this is an NE or EQ comparison of zero against the result of a
12609 signed MOD operation whose second operand is a power of 2, make
12610 the MOD operation unsigned since it is simpler and equivalent. */
12611 if (integer_zerop (arg1
)
12612 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12613 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12614 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12615 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12616 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12617 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12619 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12620 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12621 fold_convert_loc (loc
, newtype
,
12622 TREE_OPERAND (arg0
, 0)),
12623 fold_convert_loc (loc
, newtype
,
12624 TREE_OPERAND (arg0
, 1)));
12626 return fold_build2_loc (loc
, code
, type
, newmod
,
12627 fold_convert_loc (loc
, newtype
, arg1
));
12630 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12631 C1 is a valid shift constant, and C2 is a power of two, i.e.
12633 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12634 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12635 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12637 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12638 && integer_zerop (arg1
))
12640 tree itype
= TREE_TYPE (arg0
);
12641 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
12642 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12644 /* Check for a valid shift count. */
12645 if (TREE_INT_CST_HIGH (arg001
) == 0
12646 && TREE_INT_CST_LOW (arg001
) < prec
)
12648 tree arg01
= TREE_OPERAND (arg0
, 1);
12649 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12650 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12651 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12652 can be rewritten as (X & (C2 << C1)) != 0. */
12653 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12655 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12656 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12657 return fold_build2_loc (loc
, code
, type
, tem
,
12658 fold_convert_loc (loc
, itype
, arg1
));
12660 /* Otherwise, for signed (arithmetic) shifts,
12661 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12662 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12663 else if (!TYPE_UNSIGNED (itype
))
12664 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12665 arg000
, build_int_cst (itype
, 0));
12666 /* Otherwise, of unsigned (logical) shifts,
12667 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12668 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12670 return omit_one_operand_loc (loc
, type
,
12671 code
== EQ_EXPR
? integer_one_node
12672 : integer_zero_node
,
12677 /* If we have (A & C) == C where C is a power of 2, convert this into
12678 (A & C) != 0. Similarly for NE_EXPR. */
12679 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12680 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12681 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12682 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12683 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12684 integer_zero_node
));
12686 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12687 bit, then fold the expression into A < 0 or A >= 0. */
12688 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12692 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12693 Similarly for NE_EXPR. */
12694 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12695 && TREE_CODE (arg1
) == INTEGER_CST
12696 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12698 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12699 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12700 TREE_OPERAND (arg0
, 1));
12702 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12703 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12705 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12706 if (integer_nonzerop (dandnotc
))
12707 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12710 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12711 Similarly for NE_EXPR. */
12712 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12713 && TREE_CODE (arg1
) == INTEGER_CST
12714 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12716 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12718 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12719 TREE_OPERAND (arg0
, 1),
12720 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12721 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12722 if (integer_nonzerop (candnotd
))
12723 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12726 /* If this is a comparison of a field, we may be able to simplify it. */
12727 if ((TREE_CODE (arg0
) == COMPONENT_REF
12728 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12729 /* Handle the constant case even without -O
12730 to make sure the warnings are given. */
12731 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12733 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12738 /* Optimize comparisons of strlen vs zero to a compare of the
12739 first character of the string vs zero. To wit,
12740 strlen(ptr) == 0 => *ptr == 0
12741 strlen(ptr) != 0 => *ptr != 0
12742 Other cases should reduce to one of these two (or a constant)
12743 due to the return value of strlen being unsigned. */
12744 if (TREE_CODE (arg0
) == CALL_EXPR
12745 && integer_zerop (arg1
))
12747 tree fndecl
= get_callee_fndecl (arg0
);
12750 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12751 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12752 && call_expr_nargs (arg0
) == 1
12753 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12755 tree iref
= build_fold_indirect_ref_loc (loc
,
12756 CALL_EXPR_ARG (arg0
, 0));
12757 return fold_build2_loc (loc
, code
, type
, iref
,
12758 build_int_cst (TREE_TYPE (iref
), 0));
12762 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12763 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12764 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12765 && integer_zerop (arg1
)
12766 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12768 tree arg00
= TREE_OPERAND (arg0
, 0);
12769 tree arg01
= TREE_OPERAND (arg0
, 1);
12770 tree itype
= TREE_TYPE (arg00
);
12771 if (TREE_INT_CST_HIGH (arg01
) == 0
12772 && TREE_INT_CST_LOW (arg01
)
12773 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
12775 if (TYPE_UNSIGNED (itype
))
12777 itype
= signed_type_for (itype
);
12778 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12780 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12781 type
, arg00
, build_int_cst (itype
, 0));
12785 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12786 if (integer_zerop (arg1
)
12787 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12788 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12789 TREE_OPERAND (arg0
, 1));
12791 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12792 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12793 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12794 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12795 build_int_cst (TREE_TYPE (arg0
), 0));
12796 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12797 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12798 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12799 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12800 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12801 build_int_cst (TREE_TYPE (arg0
), 0));
12803 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12804 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12805 && TREE_CODE (arg1
) == INTEGER_CST
12806 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12807 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12808 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12809 TREE_OPERAND (arg0
, 1), arg1
));
12811 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12812 (X & C) == 0 when C is a single bit. */
12813 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12814 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12815 && integer_zerop (arg1
)
12816 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12818 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12819 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12820 TREE_OPERAND (arg0
, 1));
12821 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12823 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12827 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12828 constant C is a power of two, i.e. a single bit. */
12829 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12830 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12831 && integer_zerop (arg1
)
12832 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12833 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12834 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12836 tree arg00
= TREE_OPERAND (arg0
, 0);
12837 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12838 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12841 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12842 when is C is a power of two, i.e. a single bit. */
12843 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12844 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12845 && integer_zerop (arg1
)
12846 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12847 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12848 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12850 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12851 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
12852 arg000
, TREE_OPERAND (arg0
, 1));
12853 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12854 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12857 if (integer_zerop (arg1
)
12858 && tree_expr_nonzero_p (arg0
))
12860 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12861 return omit_one_operand_loc (loc
, type
, res
, arg0
);
12864 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12865 if (TREE_CODE (arg0
) == NEGATE_EXPR
12866 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12867 return fold_build2_loc (loc
, code
, type
,
12868 TREE_OPERAND (arg0
, 0),
12869 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12870 TREE_OPERAND (arg1
, 0)));
12872 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12873 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12874 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12876 tree arg00
= TREE_OPERAND (arg0
, 0);
12877 tree arg01
= TREE_OPERAND (arg0
, 1);
12878 tree arg10
= TREE_OPERAND (arg1
, 0);
12879 tree arg11
= TREE_OPERAND (arg1
, 1);
12880 tree itype
= TREE_TYPE (arg0
);
12882 if (operand_equal_p (arg01
, arg11
, 0))
12883 return fold_build2_loc (loc
, code
, type
,
12884 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12885 fold_build2_loc (loc
,
12886 BIT_XOR_EXPR
, itype
,
12889 build_int_cst (itype
, 0));
12891 if (operand_equal_p (arg01
, arg10
, 0))
12892 return fold_build2_loc (loc
, code
, type
,
12893 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12894 fold_build2_loc (loc
,
12895 BIT_XOR_EXPR
, itype
,
12898 build_int_cst (itype
, 0));
12900 if (operand_equal_p (arg00
, arg11
, 0))
12901 return fold_build2_loc (loc
, code
, type
,
12902 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12903 fold_build2_loc (loc
,
12904 BIT_XOR_EXPR
, itype
,
12907 build_int_cst (itype
, 0));
12909 if (operand_equal_p (arg00
, arg10
, 0))
12910 return fold_build2_loc (loc
, code
, type
,
12911 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12912 fold_build2_loc (loc
,
12913 BIT_XOR_EXPR
, itype
,
12916 build_int_cst (itype
, 0));
12919 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12920 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12922 tree arg00
= TREE_OPERAND (arg0
, 0);
12923 tree arg01
= TREE_OPERAND (arg0
, 1);
12924 tree arg10
= TREE_OPERAND (arg1
, 0);
12925 tree arg11
= TREE_OPERAND (arg1
, 1);
12926 tree itype
= TREE_TYPE (arg0
);
12928 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12929 operand_equal_p guarantees no side-effects so we don't need
12930 to use omit_one_operand on Z. */
12931 if (operand_equal_p (arg01
, arg11
, 0))
12932 return fold_build2_loc (loc
, code
, type
, arg00
,
12933 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12935 if (operand_equal_p (arg01
, arg10
, 0))
12936 return fold_build2_loc (loc
, code
, type
, arg00
,
12937 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12939 if (operand_equal_p (arg00
, arg11
, 0))
12940 return fold_build2_loc (loc
, code
, type
, arg01
,
12941 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12943 if (operand_equal_p (arg00
, arg10
, 0))
12944 return fold_build2_loc (loc
, code
, type
, arg01
,
12945 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12948 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12949 if (TREE_CODE (arg01
) == INTEGER_CST
12950 && TREE_CODE (arg11
) == INTEGER_CST
)
12952 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
12953 fold_convert_loc (loc
, itype
, arg11
));
12954 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
12955 return fold_build2_loc (loc
, code
, type
, tem
,
12956 fold_convert_loc (loc
, itype
, arg10
));
12960 /* Attempt to simplify equality/inequality comparisons of complex
12961 values. Only lower the comparison if the result is known or
12962 can be simplified to a single scalar comparison. */
12963 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12964 || TREE_CODE (arg0
) == COMPLEX_CST
)
12965 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12966 || TREE_CODE (arg1
) == COMPLEX_CST
))
12968 tree real0
, imag0
, real1
, imag1
;
12971 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12973 real0
= TREE_OPERAND (arg0
, 0);
12974 imag0
= TREE_OPERAND (arg0
, 1);
12978 real0
= TREE_REALPART (arg0
);
12979 imag0
= TREE_IMAGPART (arg0
);
12982 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12984 real1
= TREE_OPERAND (arg1
, 0);
12985 imag1
= TREE_OPERAND (arg1
, 1);
12989 real1
= TREE_REALPART (arg1
);
12990 imag1
= TREE_IMAGPART (arg1
);
12993 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
12994 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12996 if (integer_zerop (rcond
))
12998 if (code
== EQ_EXPR
)
12999 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13001 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
13005 if (code
== NE_EXPR
)
13006 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13008 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
13012 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
13013 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
13015 if (integer_zerop (icond
))
13017 if (code
== EQ_EXPR
)
13018 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
13020 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
13024 if (code
== NE_EXPR
)
13025 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
13027 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
13038 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
13039 if (tem
!= NULL_TREE
)
13042 /* Transform comparisons of the form X +- C CMP X. */
13043 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13044 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13045 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13046 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13047 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13048 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13050 tree arg01
= TREE_OPERAND (arg0
, 1);
13051 enum tree_code code0
= TREE_CODE (arg0
);
13054 if (TREE_CODE (arg01
) == REAL_CST
)
13055 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13057 is_positive
= tree_int_cst_sgn (arg01
);
13059 /* (X - c) > X becomes false. */
13060 if (code
== GT_EXPR
13061 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13062 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13064 if (TREE_CODE (arg01
) == INTEGER_CST
13065 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13066 fold_overflow_warning (("assuming signed overflow does not "
13067 "occur when assuming that (X - c) > X "
13068 "is always false"),
13069 WARN_STRICT_OVERFLOW_ALL
);
13070 return constant_boolean_node (0, type
);
13073 /* Likewise (X + c) < X becomes false. */
13074 if (code
== LT_EXPR
13075 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13076 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13078 if (TREE_CODE (arg01
) == INTEGER_CST
13079 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13080 fold_overflow_warning (("assuming signed overflow does not "
13081 "occur when assuming that "
13082 "(X + c) < X is always false"),
13083 WARN_STRICT_OVERFLOW_ALL
);
13084 return constant_boolean_node (0, type
);
13087 /* Convert (X - c) <= X to true. */
13088 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13090 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13091 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13093 if (TREE_CODE (arg01
) == INTEGER_CST
13094 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13095 fold_overflow_warning (("assuming signed overflow does not "
13096 "occur when assuming that "
13097 "(X - c) <= X is always true"),
13098 WARN_STRICT_OVERFLOW_ALL
);
13099 return constant_boolean_node (1, type
);
13102 /* Convert (X + c) >= X to true. */
13103 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13105 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13106 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13108 if (TREE_CODE (arg01
) == INTEGER_CST
13109 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13110 fold_overflow_warning (("assuming signed overflow does not "
13111 "occur when assuming that "
13112 "(X + c) >= X is always true"),
13113 WARN_STRICT_OVERFLOW_ALL
);
13114 return constant_boolean_node (1, type
);
13117 if (TREE_CODE (arg01
) == INTEGER_CST
)
13119 /* Convert X + c > X and X - c < X to true for integers. */
13120 if (code
== GT_EXPR
13121 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13122 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13124 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13125 fold_overflow_warning (("assuming signed overflow does "
13126 "not occur when assuming that "
13127 "(X + c) > X is always true"),
13128 WARN_STRICT_OVERFLOW_ALL
);
13129 return constant_boolean_node (1, type
);
13132 if (code
== LT_EXPR
13133 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13134 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13136 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13137 fold_overflow_warning (("assuming signed overflow does "
13138 "not occur when assuming that "
13139 "(X - c) < X is always true"),
13140 WARN_STRICT_OVERFLOW_ALL
);
13141 return constant_boolean_node (1, type
);
13144 /* Convert X + c <= X and X - c >= X to false for integers. */
13145 if (code
== LE_EXPR
13146 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13147 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13149 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13150 fold_overflow_warning (("assuming signed overflow does "
13151 "not occur when assuming that "
13152 "(X + c) <= X is always false"),
13153 WARN_STRICT_OVERFLOW_ALL
);
13154 return constant_boolean_node (0, type
);
13157 if (code
== GE_EXPR
13158 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13159 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13161 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13162 fold_overflow_warning (("assuming signed overflow does "
13163 "not occur when assuming that "
13164 "(X - c) >= X is always false"),
13165 WARN_STRICT_OVERFLOW_ALL
);
13166 return constant_boolean_node (0, type
);
13171 /* Comparisons with the highest or lowest possible integer of
13172 the specified precision will have known values. */
13174 tree arg1_type
= TREE_TYPE (arg1
);
13175 unsigned int width
= TYPE_PRECISION (arg1_type
);
13177 if (TREE_CODE (arg1
) == INTEGER_CST
13178 && width
<= 2 * HOST_BITS_PER_WIDE_INT
13179 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13181 HOST_WIDE_INT signed_max_hi
;
13182 unsigned HOST_WIDE_INT signed_max_lo
;
13183 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
13185 if (width
<= HOST_BITS_PER_WIDE_INT
)
13187 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13192 if (TYPE_UNSIGNED (arg1_type
))
13194 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13200 max_lo
= signed_max_lo
;
13201 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13207 width
-= HOST_BITS_PER_WIDE_INT
;
13208 signed_max_lo
= -1;
13209 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13214 if (TYPE_UNSIGNED (arg1_type
))
13216 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13221 max_hi
= signed_max_hi
;
13222 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13226 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
13227 && TREE_INT_CST_LOW (arg1
) == max_lo
)
13231 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13234 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13237 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13240 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13242 /* The GE_EXPR and LT_EXPR cases above are not normally
13243 reached because of previous transformations. */
13248 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13250 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
13254 arg1
= const_binop (PLUS_EXPR
, arg1
,
13255 build_int_cst (TREE_TYPE (arg1
), 1));
13256 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13257 fold_convert_loc (loc
,
13258 TREE_TYPE (arg1
), arg0
),
13261 arg1
= const_binop (PLUS_EXPR
, arg1
,
13262 build_int_cst (TREE_TYPE (arg1
), 1));
13263 return fold_build2_loc (loc
, NE_EXPR
, type
,
13264 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13270 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13272 && TREE_INT_CST_LOW (arg1
) == min_lo
)
13276 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13279 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13282 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13285 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13290 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13292 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
13296 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13297 return fold_build2_loc (loc
, NE_EXPR
, type
,
13298 fold_convert_loc (loc
,
13299 TREE_TYPE (arg1
), arg0
),
13302 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13303 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13304 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13311 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
13312 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
13313 && TYPE_UNSIGNED (arg1_type
)
13314 /* We will flip the signedness of the comparison operator
13315 associated with the mode of arg1, so the sign bit is
13316 specified by this mode. Check that arg1 is the signed
13317 max associated with this sign bit. */
13318 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
13319 /* signed_type does not work on pointer types. */
13320 && INTEGRAL_TYPE_P (arg1_type
))
13322 /* The following case also applies to X < signed_max+1
13323 and X >= signed_max+1 because previous transformations. */
13324 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13327 st
= signed_type_for (TREE_TYPE (arg1
));
13328 return fold_build2_loc (loc
,
13329 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13330 type
, fold_convert_loc (loc
, st
, arg0
),
13331 build_int_cst (st
, 0));
13337 /* If we are comparing an ABS_EXPR with a constant, we can
13338 convert all the cases into explicit comparisons, but they may
13339 well not be faster than doing the ABS and one comparison.
13340 But ABS (X) <= C is a range comparison, which becomes a subtraction
13341 and a comparison, and is probably faster. */
13342 if (code
== LE_EXPR
13343 && TREE_CODE (arg1
) == INTEGER_CST
13344 && TREE_CODE (arg0
) == ABS_EXPR
13345 && ! TREE_SIDE_EFFECTS (arg0
)
13346 && (0 != (tem
= negate_expr (arg1
)))
13347 && TREE_CODE (tem
) == INTEGER_CST
13348 && !TREE_OVERFLOW (tem
))
13349 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13350 build2 (GE_EXPR
, type
,
13351 TREE_OPERAND (arg0
, 0), tem
),
13352 build2 (LE_EXPR
, type
,
13353 TREE_OPERAND (arg0
, 0), arg1
));
13355 /* Convert ABS_EXPR<x> >= 0 to true. */
13356 strict_overflow_p
= false;
13357 if (code
== GE_EXPR
13358 && (integer_zerop (arg1
)
13359 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13360 && real_zerop (arg1
)))
13361 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13363 if (strict_overflow_p
)
13364 fold_overflow_warning (("assuming signed overflow does not occur "
13365 "when simplifying comparison of "
13366 "absolute value and zero"),
13367 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13368 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13371 /* Convert ABS_EXPR<x> < 0 to false. */
13372 strict_overflow_p
= false;
13373 if (code
== LT_EXPR
13374 && (integer_zerop (arg1
) || real_zerop (arg1
))
13375 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13377 if (strict_overflow_p
)
13378 fold_overflow_warning (("assuming signed overflow does not occur "
13379 "when simplifying comparison of "
13380 "absolute value and zero"),
13381 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13382 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13385 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13386 and similarly for >= into !=. */
13387 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13388 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13389 && TREE_CODE (arg1
) == LSHIFT_EXPR
13390 && integer_onep (TREE_OPERAND (arg1
, 0)))
13391 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13392 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13393 TREE_OPERAND (arg1
, 1)),
13394 build_int_cst (TREE_TYPE (arg0
), 0));
13396 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13397 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13398 && CONVERT_EXPR_P (arg1
)
13399 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13400 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13402 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13403 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13404 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13405 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13406 build_int_cst (TREE_TYPE (arg0
), 0));
13411 case UNORDERED_EXPR
:
13419 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13421 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13422 if (t1
!= NULL_TREE
)
13426 /* If the first operand is NaN, the result is constant. */
13427 if (TREE_CODE (arg0
) == REAL_CST
13428 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13429 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13431 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13432 ? integer_zero_node
13433 : integer_one_node
;
13434 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13437 /* If the second operand is NaN, the result is constant. */
13438 if (TREE_CODE (arg1
) == REAL_CST
13439 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13440 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13442 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13443 ? integer_zero_node
13444 : integer_one_node
;
13445 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13448 /* Simplify unordered comparison of something with itself. */
13449 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13450 && operand_equal_p (arg0
, arg1
, 0))
13451 return constant_boolean_node (1, type
);
13453 if (code
== LTGT_EXPR
13454 && !flag_trapping_math
13455 && operand_equal_p (arg0
, arg1
, 0))
13456 return constant_boolean_node (0, type
);
13458 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13460 tree targ0
= strip_float_extensions (arg0
);
13461 tree targ1
= strip_float_extensions (arg1
);
13462 tree newtype
= TREE_TYPE (targ0
);
13464 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13465 newtype
= TREE_TYPE (targ1
);
13467 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13468 return fold_build2_loc (loc
, code
, type
,
13469 fold_convert_loc (loc
, newtype
, targ0
),
13470 fold_convert_loc (loc
, newtype
, targ1
));
13475 case COMPOUND_EXPR
:
13476 /* When pedantic, a compound expression can be neither an lvalue
13477 nor an integer constant expression. */
13478 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13480 /* Don't let (0, 0) be null pointer constant. */
13481 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13482 : fold_convert_loc (loc
, type
, arg1
);
13483 return pedantic_non_lvalue_loc (loc
, tem
);
13486 if ((TREE_CODE (arg0
) == REAL_CST
13487 && TREE_CODE (arg1
) == REAL_CST
)
13488 || (TREE_CODE (arg0
) == INTEGER_CST
13489 && TREE_CODE (arg1
) == INTEGER_CST
))
13490 return build_complex (type
, arg0
, arg1
);
13491 if (TREE_CODE (arg0
) == REALPART_EXPR
13492 && TREE_CODE (arg1
) == IMAGPART_EXPR
13493 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
13494 && operand_equal_p (TREE_OPERAND (arg0
, 0),
13495 TREE_OPERAND (arg1
, 0), 0))
13496 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
13497 TREE_OPERAND (arg1
, 0));
13501 /* An ASSERT_EXPR should never be passed to fold_binary. */
13502 gcc_unreachable ();
13504 case VEC_EXTRACT_EVEN_EXPR
:
13505 case VEC_EXTRACT_ODD_EXPR
:
13506 case VEC_INTERLEAVE_HIGH_EXPR
:
13507 case VEC_INTERLEAVE_LOW_EXPR
:
13508 if ((TREE_CODE (arg0
) == VECTOR_CST
13509 || TREE_CODE (arg0
) == CONSTRUCTOR
)
13510 && (TREE_CODE (arg1
) == VECTOR_CST
13511 || TREE_CODE (arg1
) == CONSTRUCTOR
))
13513 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13514 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
13516 for (i
= 0; i
< nelts
; i
++)
13519 case VEC_EXTRACT_EVEN_EXPR
:
13522 case VEC_EXTRACT_ODD_EXPR
:
13523 sel
[i
] = i
* 2 + 1;
13525 case VEC_INTERLEAVE_HIGH_EXPR
:
13526 sel
[i
] = (i
+ (BYTES_BIG_ENDIAN
? 0 : nelts
)) / 2
13527 + ((i
& 1) ? nelts
: 0);
13529 case VEC_INTERLEAVE_LOW_EXPR
:
13530 sel
[i
] = (i
+ (BYTES_BIG_ENDIAN
? nelts
: 0)) / 2
13531 + ((i
& 1) ? nelts
: 0);
13534 gcc_unreachable ();
13537 return fold_vec_perm (type
, arg0
, arg1
, sel
);
13541 case VEC_PACK_TRUNC_EXPR
:
13542 case VEC_PACK_FIX_TRUNC_EXPR
:
13544 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13545 tree
*elts
, vals
= NULL_TREE
;
13547 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
/ 2
13548 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
/ 2);
13549 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13552 elts
= XALLOCAVEC (tree
, nelts
);
13553 if (!vec_cst_ctor_to_array (arg0
, elts
)
13554 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
/ 2))
13557 for (i
= 0; i
< nelts
; i
++)
13559 elts
[i
] = fold_convert_const (code
== VEC_PACK_TRUNC_EXPR
13560 ? NOP_EXPR
: FIX_TRUNC_EXPR
,
13561 TREE_TYPE (type
), elts
[i
]);
13562 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
13566 for (i
= 0; i
< nelts
; i
++)
13567 vals
= tree_cons (NULL_TREE
, elts
[nelts
- i
- 1], vals
);
13568 return build_vector (type
, vals
);
13571 case VEC_WIDEN_MULT_LO_EXPR
:
13572 case VEC_WIDEN_MULT_HI_EXPR
:
13574 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13575 tree
*elts
, vals
= NULL_TREE
;
13577 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
* 2
13578 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
* 2);
13579 if (TREE_CODE (arg0
) != VECTOR_CST
|| TREE_CODE (arg1
) != VECTOR_CST
)
13582 elts
= XALLOCAVEC (tree
, nelts
* 4);
13583 if (!vec_cst_ctor_to_array (arg0
, elts
)
13584 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
* 2))
13587 if ((!BYTES_BIG_ENDIAN
) ^ (code
== VEC_WIDEN_MULT_LO_EXPR
))
13590 for (i
= 0; i
< nelts
; i
++)
13592 elts
[i
] = fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), elts
[i
]);
13593 elts
[i
+ nelts
* 2]
13594 = fold_convert_const (NOP_EXPR
, TREE_TYPE (type
),
13595 elts
[i
+ nelts
* 2]);
13596 if (elts
[i
] == NULL_TREE
|| elts
[i
+ nelts
* 2] == NULL_TREE
)
13598 elts
[i
] = const_binop (MULT_EXPR
, elts
[i
], elts
[i
+ nelts
* 2]);
13599 if (elts
[i
] == NULL_TREE
|| !CONSTANT_CLASS_P (elts
[i
]))
13603 for (i
= 0; i
< nelts
; i
++)
13604 vals
= tree_cons (NULL_TREE
, elts
[nelts
- i
- 1], vals
);
13605 return build_vector (type
, vals
);
13610 } /* switch (code) */
13613 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13614 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13618 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13620 switch (TREE_CODE (*tp
))
13626 *walk_subtrees
= 0;
13628 /* ... fall through ... */
13635 /* Return whether the sub-tree ST contains a label which is accessible from
13636 outside the sub-tree. */
13639 contains_label_p (tree st
)
13642 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13645 /* Fold a ternary expression of code CODE and type TYPE with operands
13646 OP0, OP1, and OP2. Return the folded expression if folding is
13647 successful. Otherwise, return NULL_TREE. */
13650 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13651 tree op0
, tree op1
, tree op2
)
13654 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13655 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13657 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13658 && TREE_CODE_LENGTH (code
) == 3);
13660 /* Strip any conversions that don't change the mode. This is safe
13661 for every expression, except for a comparison expression because
13662 its signedness is derived from its operands. So, in the latter
13663 case, only strip conversions that don't change the signedness.
13665 Note that this is done as an internal manipulation within the
13666 constant folder, in order to find the simplest representation of
13667 the arguments so that their form can be studied. In any cases,
13668 the appropriate type conversions should be put back in the tree
13669 that will get out of the constant folder. */
13690 case COMPONENT_REF
:
13691 if (TREE_CODE (arg0
) == CONSTRUCTOR
13692 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13694 unsigned HOST_WIDE_INT idx
;
13696 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13703 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13704 so all simple results must be passed through pedantic_non_lvalue. */
13705 if (TREE_CODE (arg0
) == INTEGER_CST
)
13707 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13708 tem
= integer_zerop (arg0
) ? op2
: op1
;
13709 /* Only optimize constant conditions when the selected branch
13710 has the same type as the COND_EXPR. This avoids optimizing
13711 away "c ? x : throw", where the throw has a void type.
13712 Avoid throwing away that operand which contains label. */
13713 if ((!TREE_SIDE_EFFECTS (unused_op
)
13714 || !contains_label_p (unused_op
))
13715 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13716 || VOID_TYPE_P (type
)))
13717 return pedantic_non_lvalue_loc (loc
, tem
);
13720 if (operand_equal_p (arg1
, op2
, 0))
13721 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
13723 /* If we have A op B ? A : C, we may be able to convert this to a
13724 simpler expression, depending on the operation and the values
13725 of B and C. Signed zeros prevent all of these transformations,
13726 for reasons given above each one.
13728 Also try swapping the arguments and inverting the conditional. */
13729 if (COMPARISON_CLASS_P (arg0
)
13730 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13731 arg1
, TREE_OPERAND (arg0
, 1))
13732 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
13734 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13739 if (COMPARISON_CLASS_P (arg0
)
13740 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13742 TREE_OPERAND (arg0
, 1))
13743 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
13745 location_t loc0
= expr_location_or (arg0
, loc
);
13746 tem
= fold_truth_not_expr (loc0
, arg0
);
13747 if (tem
&& COMPARISON_CLASS_P (tem
))
13749 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13755 /* If the second operand is simpler than the third, swap them
13756 since that produces better jump optimization results. */
13757 if (truth_value_p (TREE_CODE (arg0
))
13758 && tree_swap_operands_p (op1
, op2
, false))
13760 location_t loc0
= expr_location_or (arg0
, loc
);
13761 /* See if this can be inverted. If it can't, possibly because
13762 it was a floating-point inequality comparison, don't do
13764 tem
= fold_truth_not_expr (loc0
, arg0
);
13766 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13769 /* Convert A ? 1 : 0 to simply A. */
13770 if (integer_onep (op1
)
13771 && integer_zerop (op2
)
13772 /* If we try to convert OP0 to our type, the
13773 call to fold will try to move the conversion inside
13774 a COND, which will recurse. In that case, the COND_EXPR
13775 is probably the best choice, so leave it alone. */
13776 && type
== TREE_TYPE (arg0
))
13777 return pedantic_non_lvalue_loc (loc
, arg0
);
13779 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13780 over COND_EXPR in cases such as floating point comparisons. */
13781 if (integer_zerop (op1
)
13782 && integer_onep (op2
)
13783 && truth_value_p (TREE_CODE (arg0
)))
13784 return pedantic_non_lvalue_loc (loc
,
13785 fold_convert_loc (loc
, type
,
13786 invert_truthvalue_loc (loc
,
13789 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13790 if (TREE_CODE (arg0
) == LT_EXPR
13791 && integer_zerop (TREE_OPERAND (arg0
, 1))
13792 && integer_zerop (op2
)
13793 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13795 /* sign_bit_p only checks ARG1 bits within A's precision.
13796 If <sign bit of A> has wider type than A, bits outside
13797 of A's precision in <sign bit of A> need to be checked.
13798 If they are all 0, this optimization needs to be done
13799 in unsigned A's type, if they are all 1 in signed A's type,
13800 otherwise this can't be done. */
13801 if (TYPE_PRECISION (TREE_TYPE (tem
))
13802 < TYPE_PRECISION (TREE_TYPE (arg1
))
13803 && TYPE_PRECISION (TREE_TYPE (tem
))
13804 < TYPE_PRECISION (type
))
13806 unsigned HOST_WIDE_INT mask_lo
;
13807 HOST_WIDE_INT mask_hi
;
13808 int inner_width
, outer_width
;
13811 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13812 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13813 if (outer_width
> TYPE_PRECISION (type
))
13814 outer_width
= TYPE_PRECISION (type
);
13816 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
13818 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
13819 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
13825 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
13826 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
13828 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
13830 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
13831 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13835 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
13836 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13838 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
13839 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
13841 tem_type
= signed_type_for (TREE_TYPE (tem
));
13842 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13844 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
13845 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
13847 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13848 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13856 fold_convert_loc (loc
, type
,
13857 fold_build2_loc (loc
, BIT_AND_EXPR
,
13858 TREE_TYPE (tem
), tem
,
13859 fold_convert_loc (loc
,
13864 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13865 already handled above. */
13866 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13867 && integer_onep (TREE_OPERAND (arg0
, 1))
13868 && integer_zerop (op2
)
13869 && integer_pow2p (arg1
))
13871 tree tem
= TREE_OPERAND (arg0
, 0);
13873 if (TREE_CODE (tem
) == RSHIFT_EXPR
13874 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
13875 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13876 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
13877 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13878 TREE_OPERAND (tem
, 0), arg1
);
13881 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13882 is probably obsolete because the first operand should be a
13883 truth value (that's why we have the two cases above), but let's
13884 leave it in until we can confirm this for all front-ends. */
13885 if (integer_zerop (op2
)
13886 && TREE_CODE (arg0
) == NE_EXPR
13887 && integer_zerop (TREE_OPERAND (arg0
, 1))
13888 && integer_pow2p (arg1
)
13889 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13890 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13891 arg1
, OEP_ONLY_CONST
))
13892 return pedantic_non_lvalue_loc (loc
,
13893 fold_convert_loc (loc
, type
,
13894 TREE_OPERAND (arg0
, 0)));
13896 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13897 if (integer_zerop (op2
)
13898 && truth_value_p (TREE_CODE (arg0
))
13899 && truth_value_p (TREE_CODE (arg1
)))
13900 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13901 fold_convert_loc (loc
, type
, arg0
),
13904 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13905 if (integer_onep (op2
)
13906 && truth_value_p (TREE_CODE (arg0
))
13907 && truth_value_p (TREE_CODE (arg1
)))
13909 location_t loc0
= expr_location_or (arg0
, loc
);
13910 /* Only perform transformation if ARG0 is easily inverted. */
13911 tem
= fold_truth_not_expr (loc0
, arg0
);
13913 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
13914 fold_convert_loc (loc
, type
, tem
),
13918 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13919 if (integer_zerop (arg1
)
13920 && truth_value_p (TREE_CODE (arg0
))
13921 && truth_value_p (TREE_CODE (op2
)))
13923 location_t loc0
= expr_location_or (arg0
, loc
);
13924 /* Only perform transformation if ARG0 is easily inverted. */
13925 tem
= fold_truth_not_expr (loc0
, arg0
);
13927 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13928 fold_convert_loc (loc
, type
, tem
),
13932 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13933 if (integer_onep (arg1
)
13934 && truth_value_p (TREE_CODE (arg0
))
13935 && truth_value_p (TREE_CODE (op2
)))
13936 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
13937 fold_convert_loc (loc
, type
, arg0
),
13943 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13944 of fold_ternary on them. */
13945 gcc_unreachable ();
13947 case BIT_FIELD_REF
:
13948 if ((TREE_CODE (arg0
) == VECTOR_CST
13949 || TREE_CODE (arg0
) == CONSTRUCTOR
)
13950 && type
== TREE_TYPE (TREE_TYPE (arg0
)))
13952 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
13953 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
13956 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
13957 && (idx
% width
) == 0
13958 && (idx
= idx
/ width
)
13959 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
13961 if (TREE_CODE (arg0
) == VECTOR_CST
)
13963 tree elements
= TREE_VECTOR_CST_ELTS (arg0
);
13964 while (idx
-- > 0 && elements
)
13965 elements
= TREE_CHAIN (elements
);
13967 return TREE_VALUE (elements
);
13969 else if (idx
< CONSTRUCTOR_NELTS (arg0
))
13970 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
13971 return build_zero_cst (type
);
13975 /* A bit-field-ref that referenced the full argument can be stripped. */
13976 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
13977 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_low_cst (arg1
, 1)
13978 && integer_zerop (op2
))
13979 return fold_convert_loc (loc
, type
, arg0
);
13984 /* For integers we can decompose the FMA if possible. */
13985 if (TREE_CODE (arg0
) == INTEGER_CST
13986 && TREE_CODE (arg1
) == INTEGER_CST
)
13987 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
13988 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
13989 if (integer_zerop (arg2
))
13990 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
13992 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
13994 case VEC_PERM_EXPR
:
13995 if (TREE_CODE (arg2
) == VECTOR_CST
)
13997 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13998 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
14000 bool need_mask_canon
= false;
14002 gcc_assert (nelts
== TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)));
14003 for (i
= 0, t
= TREE_VECTOR_CST_ELTS (arg2
);
14004 i
< nelts
&& t
; i
++, t
= TREE_CHAIN (t
))
14006 if (TREE_CODE (TREE_VALUE (t
)) != INTEGER_CST
)
14009 sel
[i
] = TREE_INT_CST_LOW (TREE_VALUE (t
)) & (2 * nelts
- 1);
14010 if (TREE_INT_CST_HIGH (TREE_VALUE (t
))
14011 || ((unsigned HOST_WIDE_INT
)
14012 TREE_INT_CST_LOW (TREE_VALUE (t
)) != sel
[i
]))
14013 need_mask_canon
= true;
14017 for (; i
< nelts
; i
++)
14020 if ((TREE_CODE (arg0
) == VECTOR_CST
14021 || TREE_CODE (arg0
) == CONSTRUCTOR
)
14022 && (TREE_CODE (arg1
) == VECTOR_CST
14023 || TREE_CODE (arg1
) == CONSTRUCTOR
))
14025 t
= fold_vec_perm (type
, arg0
, arg1
, sel
);
14026 if (t
!= NULL_TREE
)
14030 if (need_mask_canon
&& arg2
== op2
)
14032 tree list
= NULL_TREE
, eltype
= TREE_TYPE (TREE_TYPE (arg2
));
14033 for (i
= 0; i
< nelts
; i
++)
14034 list
= tree_cons (NULL_TREE
,
14035 build_int_cst (eltype
, sel
[nelts
- i
- 1]),
14037 t
= build_vector (TREE_TYPE (arg2
), list
);
14038 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, t
);
14045 } /* switch (code) */
14048 /* Perform constant folding and related simplification of EXPR.
14049 The related simplifications include x*1 => x, x*0 => 0, etc.,
14050 and application of the associative law.
14051 NOP_EXPR conversions may be removed freely (as long as we
14052 are careful not to change the type of the overall expression).
14053 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14054 but we can constant-fold them if they have constant operands. */
14056 #ifdef ENABLE_FOLD_CHECKING
14057 # define fold(x) fold_1 (x)
14058 static tree
fold_1 (tree
);
14064 const tree t
= expr
;
14065 enum tree_code code
= TREE_CODE (t
);
14066 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
14068 location_t loc
= EXPR_LOCATION (expr
);
14070 /* Return right away if a constant. */
14071 if (kind
== tcc_constant
)
14074 /* CALL_EXPR-like objects with variable numbers of operands are
14075 treated specially. */
14076 if (kind
== tcc_vl_exp
)
14078 if (code
== CALL_EXPR
)
14080 tem
= fold_call_expr (loc
, expr
, false);
14081 return tem
? tem
: expr
;
14086 if (IS_EXPR_CODE_CLASS (kind
))
14088 tree type
= TREE_TYPE (t
);
14089 tree op0
, op1
, op2
;
14091 switch (TREE_CODE_LENGTH (code
))
14094 op0
= TREE_OPERAND (t
, 0);
14095 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14096 return tem
? tem
: expr
;
14098 op0
= TREE_OPERAND (t
, 0);
14099 op1
= TREE_OPERAND (t
, 1);
14100 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14101 return tem
? tem
: expr
;
14103 op0
= TREE_OPERAND (t
, 0);
14104 op1
= TREE_OPERAND (t
, 1);
14105 op2
= TREE_OPERAND (t
, 2);
14106 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14107 return tem
? tem
: expr
;
14117 tree op0
= TREE_OPERAND (t
, 0);
14118 tree op1
= TREE_OPERAND (t
, 1);
14120 if (TREE_CODE (op1
) == INTEGER_CST
14121 && TREE_CODE (op0
) == CONSTRUCTOR
14122 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14124 VEC(constructor_elt
,gc
) *elts
= CONSTRUCTOR_ELTS (op0
);
14125 unsigned HOST_WIDE_INT end
= VEC_length (constructor_elt
, elts
);
14126 unsigned HOST_WIDE_INT begin
= 0;
14128 /* Find a matching index by means of a binary search. */
14129 while (begin
!= end
)
14131 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14132 tree index
= VEC_index (constructor_elt
, elts
, middle
)->index
;
14134 if (TREE_CODE (index
) == INTEGER_CST
14135 && tree_int_cst_lt (index
, op1
))
14136 begin
= middle
+ 1;
14137 else if (TREE_CODE (index
) == INTEGER_CST
14138 && tree_int_cst_lt (op1
, index
))
14140 else if (TREE_CODE (index
) == RANGE_EXPR
14141 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14142 begin
= middle
+ 1;
14143 else if (TREE_CODE (index
) == RANGE_EXPR
14144 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14147 return VEC_index (constructor_elt
, elts
, middle
)->value
;
14155 return fold (DECL_INITIAL (t
));
14159 } /* switch (code) */
14162 #ifdef ENABLE_FOLD_CHECKING
14165 static void fold_checksum_tree (const_tree
, struct md5_ctx
*, htab_t
);
14166 static void fold_check_failed (const_tree
, const_tree
);
14167 void print_fold_checksum (const_tree
);
14169 /* When --enable-checking=fold, compute a digest of expr before
14170 and after actual fold call to see if fold did not accidentally
14171 change original expr. */
14177 struct md5_ctx ctx
;
14178 unsigned char checksum_before
[16], checksum_after
[16];
14181 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14182 md5_init_ctx (&ctx
);
14183 fold_checksum_tree (expr
, &ctx
, ht
);
14184 md5_finish_ctx (&ctx
, checksum_before
);
14187 ret
= fold_1 (expr
);
14189 md5_init_ctx (&ctx
);
14190 fold_checksum_tree (expr
, &ctx
, ht
);
14191 md5_finish_ctx (&ctx
, checksum_after
);
14194 if (memcmp (checksum_before
, checksum_after
, 16))
14195 fold_check_failed (expr
, ret
);
14201 print_fold_checksum (const_tree expr
)
14203 struct md5_ctx ctx
;
14204 unsigned char checksum
[16], cnt
;
14207 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14208 md5_init_ctx (&ctx
);
14209 fold_checksum_tree (expr
, &ctx
, ht
);
14210 md5_finish_ctx (&ctx
, checksum
);
14212 for (cnt
= 0; cnt
< 16; ++cnt
)
14213 fprintf (stderr
, "%02x", checksum
[cnt
]);
14214 putc ('\n', stderr
);
14218 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14220 internal_error ("fold check: original tree changed by fold");
14224 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
14227 enum tree_code code
;
14228 union tree_node buf
;
14234 slot
= (void **) htab_find_slot (ht
, expr
, INSERT
);
14237 *slot
= CONST_CAST_TREE (expr
);
14238 code
= TREE_CODE (expr
);
14239 if (TREE_CODE_CLASS (code
) == tcc_declaration
14240 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14242 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14243 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14244 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14245 expr
= (tree
) &buf
;
14247 else if (TREE_CODE_CLASS (code
) == tcc_type
14248 && (TYPE_POINTER_TO (expr
)
14249 || TYPE_REFERENCE_TO (expr
)
14250 || TYPE_CACHED_VALUES_P (expr
)
14251 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14252 || TYPE_NEXT_VARIANT (expr
)))
14254 /* Allow these fields to be modified. */
14256 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14257 expr
= tmp
= (tree
) &buf
;
14258 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14259 TYPE_POINTER_TO (tmp
) = NULL
;
14260 TYPE_REFERENCE_TO (tmp
) = NULL
;
14261 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14262 if (TYPE_CACHED_VALUES_P (tmp
))
14264 TYPE_CACHED_VALUES_P (tmp
) = 0;
14265 TYPE_CACHED_VALUES (tmp
) = NULL
;
14268 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14269 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14270 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14271 if (TREE_CODE_CLASS (code
) != tcc_type
14272 && TREE_CODE_CLASS (code
) != tcc_declaration
14273 && code
!= TREE_LIST
14274 && code
!= SSA_NAME
14275 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14276 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14277 switch (TREE_CODE_CLASS (code
))
14283 md5_process_bytes (TREE_STRING_POINTER (expr
),
14284 TREE_STRING_LENGTH (expr
), ctx
);
14287 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14288 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14291 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
14297 case tcc_exceptional
:
14301 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14302 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14303 expr
= TREE_CHAIN (expr
);
14304 goto recursive_label
;
14307 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14308 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14314 case tcc_expression
:
14315 case tcc_reference
:
14316 case tcc_comparison
:
14319 case tcc_statement
:
14321 len
= TREE_OPERAND_LENGTH (expr
);
14322 for (i
= 0; i
< len
; ++i
)
14323 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14325 case tcc_declaration
:
14326 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14327 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14328 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14330 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14331 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14332 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14333 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14334 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14336 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
14337 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
14339 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14341 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14342 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14343 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
14347 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14348 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14349 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14350 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14351 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14352 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14353 if (INTEGRAL_TYPE_P (expr
)
14354 || SCALAR_FLOAT_TYPE_P (expr
))
14356 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14357 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14359 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14360 if (TREE_CODE (expr
) == RECORD_TYPE
14361 || TREE_CODE (expr
) == UNION_TYPE
14362 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14363 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14364 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14371 /* Helper function for outputting the checksum of a tree T. When
14372 debugging with gdb, you can "define mynext" to be "next" followed
14373 by "call debug_fold_checksum (op0)", then just trace down till the
14376 DEBUG_FUNCTION
void
14377 debug_fold_checksum (const_tree t
)
14380 unsigned char checksum
[16];
14381 struct md5_ctx ctx
;
14382 htab_t ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14384 md5_init_ctx (&ctx
);
14385 fold_checksum_tree (t
, &ctx
, ht
);
14386 md5_finish_ctx (&ctx
, checksum
);
14389 for (i
= 0; i
< 16; i
++)
14390 fprintf (stderr
, "%d ", checksum
[i
]);
14392 fprintf (stderr
, "\n");
14397 /* Fold a unary tree expression with code CODE of type TYPE with an
14398 operand OP0. LOC is the location of the resulting expression.
14399 Return a folded expression if successful. Otherwise, return a tree
14400 expression with code CODE of type TYPE with an operand OP0. */
14403 fold_build1_stat_loc (location_t loc
,
14404 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14407 #ifdef ENABLE_FOLD_CHECKING
14408 unsigned char checksum_before
[16], checksum_after
[16];
14409 struct md5_ctx ctx
;
14412 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14413 md5_init_ctx (&ctx
);
14414 fold_checksum_tree (op0
, &ctx
, ht
);
14415 md5_finish_ctx (&ctx
, checksum_before
);
14419 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14421 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14423 #ifdef ENABLE_FOLD_CHECKING
14424 md5_init_ctx (&ctx
);
14425 fold_checksum_tree (op0
, &ctx
, ht
);
14426 md5_finish_ctx (&ctx
, checksum_after
);
14429 if (memcmp (checksum_before
, checksum_after
, 16))
14430 fold_check_failed (op0
, tem
);
14435 /* Fold a binary tree expression with code CODE of type TYPE with
14436 operands OP0 and OP1. LOC is the location of the resulting
14437 expression. Return a folded expression if successful. Otherwise,
14438 return a tree expression with code CODE of type TYPE with operands
14442 fold_build2_stat_loc (location_t loc
,
14443 enum tree_code code
, tree type
, tree op0
, tree op1
14447 #ifdef ENABLE_FOLD_CHECKING
14448 unsigned char checksum_before_op0
[16],
14449 checksum_before_op1
[16],
14450 checksum_after_op0
[16],
14451 checksum_after_op1
[16];
14452 struct md5_ctx ctx
;
14455 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14456 md5_init_ctx (&ctx
);
14457 fold_checksum_tree (op0
, &ctx
, ht
);
14458 md5_finish_ctx (&ctx
, checksum_before_op0
);
14461 md5_init_ctx (&ctx
);
14462 fold_checksum_tree (op1
, &ctx
, ht
);
14463 md5_finish_ctx (&ctx
, checksum_before_op1
);
14467 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14469 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14471 #ifdef ENABLE_FOLD_CHECKING
14472 md5_init_ctx (&ctx
);
14473 fold_checksum_tree (op0
, &ctx
, ht
);
14474 md5_finish_ctx (&ctx
, checksum_after_op0
);
14477 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14478 fold_check_failed (op0
, tem
);
14480 md5_init_ctx (&ctx
);
14481 fold_checksum_tree (op1
, &ctx
, ht
);
14482 md5_finish_ctx (&ctx
, checksum_after_op1
);
14485 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14486 fold_check_failed (op1
, tem
);
14491 /* Fold a ternary tree expression with code CODE of type TYPE with
14492 operands OP0, OP1, and OP2. Return a folded expression if
14493 successful. Otherwise, return a tree expression with code CODE of
14494 type TYPE with operands OP0, OP1, and OP2. */
14497 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14498 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14501 #ifdef ENABLE_FOLD_CHECKING
14502 unsigned char checksum_before_op0
[16],
14503 checksum_before_op1
[16],
14504 checksum_before_op2
[16],
14505 checksum_after_op0
[16],
14506 checksum_after_op1
[16],
14507 checksum_after_op2
[16];
14508 struct md5_ctx ctx
;
14511 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14512 md5_init_ctx (&ctx
);
14513 fold_checksum_tree (op0
, &ctx
, ht
);
14514 md5_finish_ctx (&ctx
, checksum_before_op0
);
14517 md5_init_ctx (&ctx
);
14518 fold_checksum_tree (op1
, &ctx
, ht
);
14519 md5_finish_ctx (&ctx
, checksum_before_op1
);
14522 md5_init_ctx (&ctx
);
14523 fold_checksum_tree (op2
, &ctx
, ht
);
14524 md5_finish_ctx (&ctx
, checksum_before_op2
);
14528 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14529 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14531 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14533 #ifdef ENABLE_FOLD_CHECKING
14534 md5_init_ctx (&ctx
);
14535 fold_checksum_tree (op0
, &ctx
, ht
);
14536 md5_finish_ctx (&ctx
, checksum_after_op0
);
14539 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14540 fold_check_failed (op0
, tem
);
14542 md5_init_ctx (&ctx
);
14543 fold_checksum_tree (op1
, &ctx
, ht
);
14544 md5_finish_ctx (&ctx
, checksum_after_op1
);
14547 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14548 fold_check_failed (op1
, tem
);
14550 md5_init_ctx (&ctx
);
14551 fold_checksum_tree (op2
, &ctx
, ht
);
14552 md5_finish_ctx (&ctx
, checksum_after_op2
);
14555 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14556 fold_check_failed (op2
, tem
);
14561 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14562 arguments in ARGARRAY, and a null static chain.
14563 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14564 of type TYPE from the given operands as constructed by build_call_array. */
14567 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14568 int nargs
, tree
*argarray
)
14571 #ifdef ENABLE_FOLD_CHECKING
14572 unsigned char checksum_before_fn
[16],
14573 checksum_before_arglist
[16],
14574 checksum_after_fn
[16],
14575 checksum_after_arglist
[16];
14576 struct md5_ctx ctx
;
14580 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14581 md5_init_ctx (&ctx
);
14582 fold_checksum_tree (fn
, &ctx
, ht
);
14583 md5_finish_ctx (&ctx
, checksum_before_fn
);
14586 md5_init_ctx (&ctx
);
14587 for (i
= 0; i
< nargs
; i
++)
14588 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14589 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14593 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14595 #ifdef ENABLE_FOLD_CHECKING
14596 md5_init_ctx (&ctx
);
14597 fold_checksum_tree (fn
, &ctx
, ht
);
14598 md5_finish_ctx (&ctx
, checksum_after_fn
);
14601 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14602 fold_check_failed (fn
, tem
);
14604 md5_init_ctx (&ctx
);
14605 for (i
= 0; i
< nargs
; i
++)
14606 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14607 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14610 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14611 fold_check_failed (NULL_TREE
, tem
);
14616 /* Perform constant folding and related simplification of initializer
14617 expression EXPR. These behave identically to "fold_buildN" but ignore
14618 potential run-time traps and exceptions that fold must preserve. */
14620 #define START_FOLD_INIT \
14621 int saved_signaling_nans = flag_signaling_nans;\
14622 int saved_trapping_math = flag_trapping_math;\
14623 int saved_rounding_math = flag_rounding_math;\
14624 int saved_trapv = flag_trapv;\
14625 int saved_folding_initializer = folding_initializer;\
14626 flag_signaling_nans = 0;\
14627 flag_trapping_math = 0;\
14628 flag_rounding_math = 0;\
14630 folding_initializer = 1;
14632 #define END_FOLD_INIT \
14633 flag_signaling_nans = saved_signaling_nans;\
14634 flag_trapping_math = saved_trapping_math;\
14635 flag_rounding_math = saved_rounding_math;\
14636 flag_trapv = saved_trapv;\
14637 folding_initializer = saved_folding_initializer;
14640 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14641 tree type
, tree op
)
14646 result
= fold_build1_loc (loc
, code
, type
, op
);
14653 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14654 tree type
, tree op0
, tree op1
)
14659 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14666 fold_build3_initializer_loc (location_t loc
, enum tree_code code
,
14667 tree type
, tree op0
, tree op1
, tree op2
)
14672 result
= fold_build3_loc (loc
, code
, type
, op0
, op1
, op2
);
14679 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14680 int nargs
, tree
*argarray
)
14685 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14691 #undef START_FOLD_INIT
14692 #undef END_FOLD_INIT
14694 /* Determine if first argument is a multiple of second argument. Return 0 if
14695 it is not, or we cannot easily determined it to be.
14697 An example of the sort of thing we care about (at this point; this routine
14698 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14699 fold cases do now) is discovering that
14701 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14707 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14709 This code also handles discovering that
14711 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14713 is a multiple of 8 so we don't have to worry about dealing with a
14714 possible remainder.
14716 Note that we *look* inside a SAVE_EXPR only to determine how it was
14717 calculated; it is not safe for fold to do much of anything else with the
14718 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14719 at run time. For example, the latter example above *cannot* be implemented
14720 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14721 evaluation time of the original SAVE_EXPR is not necessarily the same at
14722 the time the new expression is evaluated. The only optimization of this
14723 sort that would be valid is changing
14725 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14729 SAVE_EXPR (I) * SAVE_EXPR (J)
14731 (where the same SAVE_EXPR (J) is used in the original and the
14732 transformed version). */
14735 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
14737 if (operand_equal_p (top
, bottom
, 0))
14740 if (TREE_CODE (type
) != INTEGER_TYPE
)
14743 switch (TREE_CODE (top
))
14746 /* Bitwise and provides a power of two multiple. If the mask is
14747 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14748 if (!integer_pow2p (bottom
))
14753 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14754 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14758 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14759 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14762 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14766 op1
= TREE_OPERAND (top
, 1);
14767 /* const_binop may not detect overflow correctly,
14768 so check for it explicitly here. */
14769 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
14770 > TREE_INT_CST_LOW (op1
)
14771 && TREE_INT_CST_HIGH (op1
) == 0
14772 && 0 != (t1
= fold_convert (type
,
14773 const_binop (LSHIFT_EXPR
,
14776 && !TREE_OVERFLOW (t1
))
14777 return multiple_of_p (type
, t1
, bottom
);
14782 /* Can't handle conversions from non-integral or wider integral type. */
14783 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14784 || (TYPE_PRECISION (type
)
14785 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14788 /* .. fall through ... */
14791 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14794 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
14795 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
14798 if (TREE_CODE (bottom
) != INTEGER_CST
14799 || integer_zerop (bottom
)
14800 || (TYPE_UNSIGNED (type
)
14801 && (tree_int_cst_sgn (top
) < 0
14802 || tree_int_cst_sgn (bottom
) < 0)))
14804 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
14812 /* Return true if CODE or TYPE is known to be non-negative. */
14815 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
14817 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14818 && truth_value_p (code
))
14819 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14820 have a signed:1 type (where the value is -1 and 0). */
14825 /* Return true if (CODE OP0) is known to be non-negative. If the return
14826 value is based on the assumption that signed overflow is undefined,
14827 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14828 *STRICT_OVERFLOW_P. */
14831 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14832 bool *strict_overflow_p
)
14834 if (TYPE_UNSIGNED (type
))
14840 /* We can't return 1 if flag_wrapv is set because
14841 ABS_EXPR<INT_MIN> = INT_MIN. */
14842 if (!INTEGRAL_TYPE_P (type
))
14844 if (TYPE_OVERFLOW_UNDEFINED (type
))
14846 *strict_overflow_p
= true;
14851 case NON_LVALUE_EXPR
:
14853 case FIX_TRUNC_EXPR
:
14854 return tree_expr_nonnegative_warnv_p (op0
,
14855 strict_overflow_p
);
14859 tree inner_type
= TREE_TYPE (op0
);
14860 tree outer_type
= type
;
14862 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14864 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14865 return tree_expr_nonnegative_warnv_p (op0
,
14866 strict_overflow_p
);
14867 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14869 if (TYPE_UNSIGNED (inner_type
))
14871 return tree_expr_nonnegative_warnv_p (op0
,
14872 strict_overflow_p
);
14875 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
14877 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14878 return tree_expr_nonnegative_warnv_p (op0
,
14879 strict_overflow_p
);
14880 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14881 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14882 && TYPE_UNSIGNED (inner_type
);
14888 return tree_simple_nonnegative_warnv_p (code
, type
);
14891 /* We don't know sign of `t', so be conservative and return false. */
14895 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14896 value is based on the assumption that signed overflow is undefined,
14897 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14898 *STRICT_OVERFLOW_P. */
14901 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14902 tree op1
, bool *strict_overflow_p
)
14904 if (TYPE_UNSIGNED (type
))
14909 case POINTER_PLUS_EXPR
:
14911 if (FLOAT_TYPE_P (type
))
14912 return (tree_expr_nonnegative_warnv_p (op0
,
14914 && tree_expr_nonnegative_warnv_p (op1
,
14915 strict_overflow_p
));
14917 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14918 both unsigned and at least 2 bits shorter than the result. */
14919 if (TREE_CODE (type
) == INTEGER_TYPE
14920 && TREE_CODE (op0
) == NOP_EXPR
14921 && TREE_CODE (op1
) == NOP_EXPR
)
14923 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
14924 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
14925 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14926 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14928 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14929 TYPE_PRECISION (inner2
)) + 1;
14930 return prec
< TYPE_PRECISION (type
);
14936 if (FLOAT_TYPE_P (type
))
14938 /* x * x for floating point x is always non-negative. */
14939 if (operand_equal_p (op0
, op1
, 0))
14941 return (tree_expr_nonnegative_warnv_p (op0
,
14943 && tree_expr_nonnegative_warnv_p (op1
,
14944 strict_overflow_p
));
14947 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14948 both unsigned and their total bits is shorter than the result. */
14949 if (TREE_CODE (type
) == INTEGER_TYPE
14950 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
14951 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
14953 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
14954 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
14956 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
14957 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
14960 bool unsigned0
= TYPE_UNSIGNED (inner0
);
14961 bool unsigned1
= TYPE_UNSIGNED (inner1
);
14963 if (TREE_CODE (op0
) == INTEGER_CST
)
14964 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
14966 if (TREE_CODE (op1
) == INTEGER_CST
)
14967 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
14969 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
14970 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
14972 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
14973 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
14974 : TYPE_PRECISION (inner0
);
14976 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
14977 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
14978 : TYPE_PRECISION (inner1
);
14980 return precision0
+ precision1
< TYPE_PRECISION (type
);
14987 return (tree_expr_nonnegative_warnv_p (op0
,
14989 || tree_expr_nonnegative_warnv_p (op1
,
14990 strict_overflow_p
));
14996 case TRUNC_DIV_EXPR
:
14997 case CEIL_DIV_EXPR
:
14998 case FLOOR_DIV_EXPR
:
14999 case ROUND_DIV_EXPR
:
15000 return (tree_expr_nonnegative_warnv_p (op0
,
15002 && tree_expr_nonnegative_warnv_p (op1
,
15003 strict_overflow_p
));
15005 case TRUNC_MOD_EXPR
:
15006 case CEIL_MOD_EXPR
:
15007 case FLOOR_MOD_EXPR
:
15008 case ROUND_MOD_EXPR
:
15009 return tree_expr_nonnegative_warnv_p (op0
,
15010 strict_overflow_p
);
15012 return tree_simple_nonnegative_warnv_p (code
, type
);
15015 /* We don't know sign of `t', so be conservative and return false. */
15019 /* Return true if T is known to be non-negative. If the return
15020 value is based on the assumption that signed overflow is undefined,
15021 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15022 *STRICT_OVERFLOW_P. */
15025 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15027 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15030 switch (TREE_CODE (t
))
15033 return tree_int_cst_sgn (t
) >= 0;
15036 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
15039 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
15042 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15044 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
15045 strict_overflow_p
));
15047 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15050 /* We don't know sign of `t', so be conservative and return false. */
15054 /* Return true if T is known to be non-negative. If the return
15055 value is based on the assumption that signed overflow is undefined,
15056 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15057 *STRICT_OVERFLOW_P. */
15060 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
15061 tree arg0
, tree arg1
, bool *strict_overflow_p
)
15063 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
15064 switch (DECL_FUNCTION_CODE (fndecl
))
15066 CASE_FLT_FN (BUILT_IN_ACOS
):
15067 CASE_FLT_FN (BUILT_IN_ACOSH
):
15068 CASE_FLT_FN (BUILT_IN_CABS
):
15069 CASE_FLT_FN (BUILT_IN_COSH
):
15070 CASE_FLT_FN (BUILT_IN_ERFC
):
15071 CASE_FLT_FN (BUILT_IN_EXP
):
15072 CASE_FLT_FN (BUILT_IN_EXP10
):
15073 CASE_FLT_FN (BUILT_IN_EXP2
):
15074 CASE_FLT_FN (BUILT_IN_FABS
):
15075 CASE_FLT_FN (BUILT_IN_FDIM
):
15076 CASE_FLT_FN (BUILT_IN_HYPOT
):
15077 CASE_FLT_FN (BUILT_IN_POW10
):
15078 CASE_INT_FN (BUILT_IN_FFS
):
15079 CASE_INT_FN (BUILT_IN_PARITY
):
15080 CASE_INT_FN (BUILT_IN_POPCOUNT
):
15081 case BUILT_IN_BSWAP32
:
15082 case BUILT_IN_BSWAP64
:
15086 CASE_FLT_FN (BUILT_IN_SQRT
):
15087 /* sqrt(-0.0) is -0.0. */
15088 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
15090 return tree_expr_nonnegative_warnv_p (arg0
,
15091 strict_overflow_p
);
15093 CASE_FLT_FN (BUILT_IN_ASINH
):
15094 CASE_FLT_FN (BUILT_IN_ATAN
):
15095 CASE_FLT_FN (BUILT_IN_ATANH
):
15096 CASE_FLT_FN (BUILT_IN_CBRT
):
15097 CASE_FLT_FN (BUILT_IN_CEIL
):
15098 CASE_FLT_FN (BUILT_IN_ERF
):
15099 CASE_FLT_FN (BUILT_IN_EXPM1
):
15100 CASE_FLT_FN (BUILT_IN_FLOOR
):
15101 CASE_FLT_FN (BUILT_IN_FMOD
):
15102 CASE_FLT_FN (BUILT_IN_FREXP
):
15103 CASE_FLT_FN (BUILT_IN_ICEIL
):
15104 CASE_FLT_FN (BUILT_IN_IFLOOR
):
15105 CASE_FLT_FN (BUILT_IN_IRINT
):
15106 CASE_FLT_FN (BUILT_IN_IROUND
):
15107 CASE_FLT_FN (BUILT_IN_LCEIL
):
15108 CASE_FLT_FN (BUILT_IN_LDEXP
):
15109 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15110 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15111 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15112 CASE_FLT_FN (BUILT_IN_LLRINT
):
15113 CASE_FLT_FN (BUILT_IN_LLROUND
):
15114 CASE_FLT_FN (BUILT_IN_LRINT
):
15115 CASE_FLT_FN (BUILT_IN_LROUND
):
15116 CASE_FLT_FN (BUILT_IN_MODF
):
15117 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15118 CASE_FLT_FN (BUILT_IN_RINT
):
15119 CASE_FLT_FN (BUILT_IN_ROUND
):
15120 CASE_FLT_FN (BUILT_IN_SCALB
):
15121 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15122 CASE_FLT_FN (BUILT_IN_SCALBN
):
15123 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15124 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15125 CASE_FLT_FN (BUILT_IN_SINH
):
15126 CASE_FLT_FN (BUILT_IN_TANH
):
15127 CASE_FLT_FN (BUILT_IN_TRUNC
):
15128 /* True if the 1st argument is nonnegative. */
15129 return tree_expr_nonnegative_warnv_p (arg0
,
15130 strict_overflow_p
);
15132 CASE_FLT_FN (BUILT_IN_FMAX
):
15133 /* True if the 1st OR 2nd arguments are nonnegative. */
15134 return (tree_expr_nonnegative_warnv_p (arg0
,
15136 || (tree_expr_nonnegative_warnv_p (arg1
,
15137 strict_overflow_p
)));
15139 CASE_FLT_FN (BUILT_IN_FMIN
):
15140 /* True if the 1st AND 2nd arguments are nonnegative. */
15141 return (tree_expr_nonnegative_warnv_p (arg0
,
15143 && (tree_expr_nonnegative_warnv_p (arg1
,
15144 strict_overflow_p
)));
15146 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15147 /* True if the 2nd argument is nonnegative. */
15148 return tree_expr_nonnegative_warnv_p (arg1
,
15149 strict_overflow_p
);
15151 CASE_FLT_FN (BUILT_IN_POWI
):
15152 /* True if the 1st argument is nonnegative or the second
15153 argument is an even integer. */
15154 if (TREE_CODE (arg1
) == INTEGER_CST
15155 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15157 return tree_expr_nonnegative_warnv_p (arg0
,
15158 strict_overflow_p
);
15160 CASE_FLT_FN (BUILT_IN_POW
):
15161 /* True if the 1st argument is nonnegative or the second
15162 argument is an even integer valued real. */
15163 if (TREE_CODE (arg1
) == REAL_CST
)
15168 c
= TREE_REAL_CST (arg1
);
15169 n
= real_to_integer (&c
);
15172 REAL_VALUE_TYPE cint
;
15173 real_from_integer (&cint
, VOIDmode
, n
,
15174 n
< 0 ? -1 : 0, 0);
15175 if (real_identical (&c
, &cint
))
15179 return tree_expr_nonnegative_warnv_p (arg0
,
15180 strict_overflow_p
);
15185 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15189 /* Return true if T is known to be non-negative. If the return
15190 value is based on the assumption that signed overflow is undefined,
15191 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15192 *STRICT_OVERFLOW_P. */
15195 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15197 enum tree_code code
= TREE_CODE (t
);
15198 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15205 tree temp
= TARGET_EXPR_SLOT (t
);
15206 t
= TARGET_EXPR_INITIAL (t
);
15208 /* If the initializer is non-void, then it's a normal expression
15209 that will be assigned to the slot. */
15210 if (!VOID_TYPE_P (t
))
15211 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15213 /* Otherwise, the initializer sets the slot in some way. One common
15214 way is an assignment statement at the end of the initializer. */
15217 if (TREE_CODE (t
) == BIND_EXPR
)
15218 t
= expr_last (BIND_EXPR_BODY (t
));
15219 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15220 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15221 t
= expr_last (TREE_OPERAND (t
, 0));
15222 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15227 if (TREE_CODE (t
) == MODIFY_EXPR
15228 && TREE_OPERAND (t
, 0) == temp
)
15229 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15230 strict_overflow_p
);
15237 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15238 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15240 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15241 get_callee_fndecl (t
),
15244 strict_overflow_p
);
15246 case COMPOUND_EXPR
:
15248 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15249 strict_overflow_p
);
15251 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15252 strict_overflow_p
);
15254 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15255 strict_overflow_p
);
15258 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15262 /* We don't know sign of `t', so be conservative and return false. */
15266 /* Return true if T is known to be non-negative. If the return
15267 value is based on the assumption that signed overflow is undefined,
15268 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15269 *STRICT_OVERFLOW_P. */
15272 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15274 enum tree_code code
;
15275 if (t
== error_mark_node
)
15278 code
= TREE_CODE (t
);
15279 switch (TREE_CODE_CLASS (code
))
15282 case tcc_comparison
:
15283 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15285 TREE_OPERAND (t
, 0),
15286 TREE_OPERAND (t
, 1),
15287 strict_overflow_p
);
15290 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15292 TREE_OPERAND (t
, 0),
15293 strict_overflow_p
);
15296 case tcc_declaration
:
15297 case tcc_reference
:
15298 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15306 case TRUTH_AND_EXPR
:
15307 case TRUTH_OR_EXPR
:
15308 case TRUTH_XOR_EXPR
:
15309 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15311 TREE_OPERAND (t
, 0),
15312 TREE_OPERAND (t
, 1),
15313 strict_overflow_p
);
15314 case TRUTH_NOT_EXPR
:
15315 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15317 TREE_OPERAND (t
, 0),
15318 strict_overflow_p
);
15325 case WITH_SIZE_EXPR
:
15327 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15330 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15334 /* Return true if `t' is known to be non-negative. Handle warnings
15335 about undefined signed overflow. */
15338 tree_expr_nonnegative_p (tree t
)
15340 bool ret
, strict_overflow_p
;
15342 strict_overflow_p
= false;
15343 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15344 if (strict_overflow_p
)
15345 fold_overflow_warning (("assuming signed overflow does not occur when "
15346 "determining that expression is always "
15348 WARN_STRICT_OVERFLOW_MISC
);
15353 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15354 For floating point we further ensure that T is not denormal.
15355 Similar logic is present in nonzero_address in rtlanal.h.
15357 If the return value is based on the assumption that signed overflow
15358 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15359 change *STRICT_OVERFLOW_P. */
15362 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15363 bool *strict_overflow_p
)
15368 return tree_expr_nonzero_warnv_p (op0
,
15369 strict_overflow_p
);
15373 tree inner_type
= TREE_TYPE (op0
);
15374 tree outer_type
= type
;
15376 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15377 && tree_expr_nonzero_warnv_p (op0
,
15378 strict_overflow_p
));
15382 case NON_LVALUE_EXPR
:
15383 return tree_expr_nonzero_warnv_p (op0
,
15384 strict_overflow_p
);
15393 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15394 For floating point we further ensure that T is not denormal.
15395 Similar logic is present in nonzero_address in rtlanal.h.
15397 If the return value is based on the assumption that signed overflow
15398 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15399 change *STRICT_OVERFLOW_P. */
15402 tree_binary_nonzero_warnv_p (enum tree_code code
,
15405 tree op1
, bool *strict_overflow_p
)
15407 bool sub_strict_overflow_p
;
15410 case POINTER_PLUS_EXPR
:
15412 if (TYPE_OVERFLOW_UNDEFINED (type
))
15414 /* With the presence of negative values it is hard
15415 to say something. */
15416 sub_strict_overflow_p
= false;
15417 if (!tree_expr_nonnegative_warnv_p (op0
,
15418 &sub_strict_overflow_p
)
15419 || !tree_expr_nonnegative_warnv_p (op1
,
15420 &sub_strict_overflow_p
))
15422 /* One of operands must be positive and the other non-negative. */
15423 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15424 overflows, on a twos-complement machine the sum of two
15425 nonnegative numbers can never be zero. */
15426 return (tree_expr_nonzero_warnv_p (op0
,
15428 || tree_expr_nonzero_warnv_p (op1
,
15429 strict_overflow_p
));
15434 if (TYPE_OVERFLOW_UNDEFINED (type
))
15436 if (tree_expr_nonzero_warnv_p (op0
,
15438 && tree_expr_nonzero_warnv_p (op1
,
15439 strict_overflow_p
))
15441 *strict_overflow_p
= true;
15448 sub_strict_overflow_p
= false;
15449 if (tree_expr_nonzero_warnv_p (op0
,
15450 &sub_strict_overflow_p
)
15451 && tree_expr_nonzero_warnv_p (op1
,
15452 &sub_strict_overflow_p
))
15454 if (sub_strict_overflow_p
)
15455 *strict_overflow_p
= true;
15460 sub_strict_overflow_p
= false;
15461 if (tree_expr_nonzero_warnv_p (op0
,
15462 &sub_strict_overflow_p
))
15464 if (sub_strict_overflow_p
)
15465 *strict_overflow_p
= true;
15467 /* When both operands are nonzero, then MAX must be too. */
15468 if (tree_expr_nonzero_warnv_p (op1
,
15469 strict_overflow_p
))
15472 /* MAX where operand 0 is positive is positive. */
15473 return tree_expr_nonnegative_warnv_p (op0
,
15474 strict_overflow_p
);
15476 /* MAX where operand 1 is positive is positive. */
15477 else if (tree_expr_nonzero_warnv_p (op1
,
15478 &sub_strict_overflow_p
)
15479 && tree_expr_nonnegative_warnv_p (op1
,
15480 &sub_strict_overflow_p
))
15482 if (sub_strict_overflow_p
)
15483 *strict_overflow_p
= true;
15489 return (tree_expr_nonzero_warnv_p (op1
,
15491 || tree_expr_nonzero_warnv_p (op0
,
15492 strict_overflow_p
));
15501 /* Return true when T is an address and is known to be nonzero.
15502 For floating point we further ensure that T is not denormal.
15503 Similar logic is present in nonzero_address in rtlanal.h.
15505 If the return value is based on the assumption that signed overflow
15506 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15507 change *STRICT_OVERFLOW_P. */
15510 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15512 bool sub_strict_overflow_p
;
15513 switch (TREE_CODE (t
))
15516 return !integer_zerop (t
);
15520 tree base
= TREE_OPERAND (t
, 0);
15521 if (!DECL_P (base
))
15522 base
= get_base_address (base
);
15527 /* Weak declarations may link to NULL. Other things may also be NULL
15528 so protect with -fdelete-null-pointer-checks; but not variables
15529 allocated on the stack. */
15531 && (flag_delete_null_pointer_checks
15532 || (DECL_CONTEXT (base
)
15533 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15534 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
)))))
15535 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
15537 /* Constants are never weak. */
15538 if (CONSTANT_CLASS_P (base
))
15545 sub_strict_overflow_p
= false;
15546 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15547 &sub_strict_overflow_p
)
15548 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15549 &sub_strict_overflow_p
))
15551 if (sub_strict_overflow_p
)
15552 *strict_overflow_p
= true;
15563 /* Return true when T is an address and is known to be nonzero.
15564 For floating point we further ensure that T is not denormal.
15565 Similar logic is present in nonzero_address in rtlanal.h.
15567 If the return value is based on the assumption that signed overflow
15568 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15569 change *STRICT_OVERFLOW_P. */
15572 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15574 tree type
= TREE_TYPE (t
);
15575 enum tree_code code
;
15577 /* Doing something useful for floating point would need more work. */
15578 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
15581 code
= TREE_CODE (t
);
15582 switch (TREE_CODE_CLASS (code
))
15585 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15586 strict_overflow_p
);
15588 case tcc_comparison
:
15589 return tree_binary_nonzero_warnv_p (code
, type
,
15590 TREE_OPERAND (t
, 0),
15591 TREE_OPERAND (t
, 1),
15592 strict_overflow_p
);
15594 case tcc_declaration
:
15595 case tcc_reference
:
15596 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15604 case TRUTH_NOT_EXPR
:
15605 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15606 strict_overflow_p
);
15608 case TRUTH_AND_EXPR
:
15609 case TRUTH_OR_EXPR
:
15610 case TRUTH_XOR_EXPR
:
15611 return tree_binary_nonzero_warnv_p (code
, type
,
15612 TREE_OPERAND (t
, 0),
15613 TREE_OPERAND (t
, 1),
15614 strict_overflow_p
);
15621 case WITH_SIZE_EXPR
:
15623 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15625 case COMPOUND_EXPR
:
15628 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15629 strict_overflow_p
);
15632 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
15633 strict_overflow_p
);
15636 return alloca_call_p (t
);
15644 /* Return true when T is an address and is known to be nonzero.
15645 Handle warnings about undefined signed overflow. */
15648 tree_expr_nonzero_p (tree t
)
15650 bool ret
, strict_overflow_p
;
15652 strict_overflow_p
= false;
15653 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
15654 if (strict_overflow_p
)
15655 fold_overflow_warning (("assuming signed overflow does not occur when "
15656 "determining that expression is always "
15658 WARN_STRICT_OVERFLOW_MISC
);
15662 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15663 attempt to fold the expression to a constant without modifying TYPE,
15666 If the expression could be simplified to a constant, then return
15667 the constant. If the expression would not be simplified to a
15668 constant, then return NULL_TREE. */
15671 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15673 tree tem
= fold_binary (code
, type
, op0
, op1
);
15674 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15677 /* Given the components of a unary expression CODE, TYPE and OP0,
15678 attempt to fold the expression to a constant without modifying
15681 If the expression could be simplified to a constant, then return
15682 the constant. If the expression would not be simplified to a
15683 constant, then return NULL_TREE. */
15686 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15688 tree tem
= fold_unary (code
, type
, op0
);
15689 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15692 /* If EXP represents referencing an element in a constant string
15693 (either via pointer arithmetic or array indexing), return the
15694 tree representing the value accessed, otherwise return NULL. */
15697 fold_read_from_constant_string (tree exp
)
15699 if ((TREE_CODE (exp
) == INDIRECT_REF
15700 || TREE_CODE (exp
) == ARRAY_REF
)
15701 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15703 tree exp1
= TREE_OPERAND (exp
, 0);
15706 location_t loc
= EXPR_LOCATION (exp
);
15708 if (TREE_CODE (exp
) == INDIRECT_REF
)
15709 string
= string_constant (exp1
, &index
);
15712 tree low_bound
= array_ref_low_bound (exp
);
15713 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15715 /* Optimize the special-case of a zero lower bound.
15717 We convert the low_bound to sizetype to avoid some problems
15718 with constant folding. (E.g. suppose the lower bound is 1,
15719 and its mode is QI. Without the conversion,l (ARRAY
15720 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15721 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15722 if (! integer_zerop (low_bound
))
15723 index
= size_diffop_loc (loc
, index
,
15724 fold_convert_loc (loc
, sizetype
, low_bound
));
15730 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15731 && TREE_CODE (string
) == STRING_CST
15732 && TREE_CODE (index
) == INTEGER_CST
15733 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15734 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15736 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15737 return build_int_cst_type (TREE_TYPE (exp
),
15738 (TREE_STRING_POINTER (string
)
15739 [TREE_INT_CST_LOW (index
)]));
15744 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15745 an integer constant, real, or fixed-point constant.
15747 TYPE is the type of the result. */
15750 fold_negate_const (tree arg0
, tree type
)
15752 tree t
= NULL_TREE
;
15754 switch (TREE_CODE (arg0
))
15758 double_int val
= tree_to_double_int (arg0
);
15759 int overflow
= neg_double (val
.low
, val
.high
, &val
.low
, &val
.high
);
15761 t
= force_fit_type_double (type
, val
, 1,
15762 (overflow
| TREE_OVERFLOW (arg0
))
15763 && !TYPE_UNSIGNED (type
));
15768 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15773 FIXED_VALUE_TYPE f
;
15774 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15775 &(TREE_FIXED_CST (arg0
)), NULL
,
15776 TYPE_SATURATING (type
));
15777 t
= build_fixed (type
, f
);
15778 /* Propagate overflow flags. */
15779 if (overflow_p
| TREE_OVERFLOW (arg0
))
15780 TREE_OVERFLOW (t
) = 1;
15785 gcc_unreachable ();
15791 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15792 an integer constant or real constant.
15794 TYPE is the type of the result. */
15797 fold_abs_const (tree arg0
, tree type
)
15799 tree t
= NULL_TREE
;
15801 switch (TREE_CODE (arg0
))
15805 double_int val
= tree_to_double_int (arg0
);
15807 /* If the value is unsigned or non-negative, then the absolute value
15808 is the same as the ordinary value. */
15809 if (TYPE_UNSIGNED (type
)
15810 || !double_int_negative_p (val
))
15813 /* If the value is negative, then the absolute value is
15819 overflow
= neg_double (val
.low
, val
.high
, &val
.low
, &val
.high
);
15820 t
= force_fit_type_double (type
, val
, -1,
15821 overflow
| TREE_OVERFLOW (arg0
));
15827 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
15828 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15834 gcc_unreachable ();
15840 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15841 constant. TYPE is the type of the result. */
15844 fold_not_const (const_tree arg0
, tree type
)
15848 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
15850 val
= double_int_not (tree_to_double_int (arg0
));
15851 return force_fit_type_double (type
, val
, 0, TREE_OVERFLOW (arg0
));
15854 /* Given CODE, a relational operator, the target type, TYPE and two
15855 constant operands OP0 and OP1, return the result of the
15856 relational operation. If the result is not a compile time
15857 constant, then return NULL_TREE. */
15860 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
15862 int result
, invert
;
15864 /* From here on, the only cases we handle are when the result is
15865 known to be a constant. */
15867 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
15869 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
15870 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
15872 /* Handle the cases where either operand is a NaN. */
15873 if (real_isnan (c0
) || real_isnan (c1
))
15883 case UNORDERED_EXPR
:
15897 if (flag_trapping_math
)
15903 gcc_unreachable ();
15906 return constant_boolean_node (result
, type
);
15909 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
15912 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
15914 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
15915 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
15916 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
15919 /* Handle equality/inequality of complex constants. */
15920 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
15922 tree rcond
= fold_relational_const (code
, type
,
15923 TREE_REALPART (op0
),
15924 TREE_REALPART (op1
));
15925 tree icond
= fold_relational_const (code
, type
,
15926 TREE_IMAGPART (op0
),
15927 TREE_IMAGPART (op1
));
15928 if (code
== EQ_EXPR
)
15929 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
15930 else if (code
== NE_EXPR
)
15931 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
15936 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15938 To compute GT, swap the arguments and do LT.
15939 To compute GE, do LT and invert the result.
15940 To compute LE, swap the arguments, do LT and invert the result.
15941 To compute NE, do EQ and invert the result.
15943 Therefore, the code below must handle only EQ and LT. */
15945 if (code
== LE_EXPR
|| code
== GT_EXPR
)
15950 code
= swap_tree_comparison (code
);
15953 /* Note that it is safe to invert for real values here because we
15954 have already handled the one case that it matters. */
15957 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15960 code
= invert_tree_comparison (code
, false);
15963 /* Compute a result for LT or EQ if args permit;
15964 Otherwise return T. */
15965 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15967 if (code
== EQ_EXPR
)
15968 result
= tree_int_cst_equal (op0
, op1
);
15969 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
15970 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
15972 result
= INT_CST_LT (op0
, op1
);
15979 return constant_boolean_node (result
, type
);
15982 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15983 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15987 fold_build_cleanup_point_expr (tree type
, tree expr
)
15989 /* If the expression does not have side effects then we don't have to wrap
15990 it with a cleanup point expression. */
15991 if (!TREE_SIDE_EFFECTS (expr
))
15994 /* If the expression is a return, check to see if the expression inside the
15995 return has no side effects or the right hand side of the modify expression
15996 inside the return. If either don't have side effects set we don't need to
15997 wrap the expression in a cleanup point expression. Note we don't check the
15998 left hand side of the modify because it should always be a return decl. */
15999 if (TREE_CODE (expr
) == RETURN_EXPR
)
16001 tree op
= TREE_OPERAND (expr
, 0);
16002 if (!op
|| !TREE_SIDE_EFFECTS (op
))
16004 op
= TREE_OPERAND (op
, 1);
16005 if (!TREE_SIDE_EFFECTS (op
))
16009 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
16012 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16013 of an indirection through OP0, or NULL_TREE if no simplification is
16017 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
16023 subtype
= TREE_TYPE (sub
);
16024 if (!POINTER_TYPE_P (subtype
))
16027 if (TREE_CODE (sub
) == ADDR_EXPR
)
16029 tree op
= TREE_OPERAND (sub
, 0);
16030 tree optype
= TREE_TYPE (op
);
16031 /* *&CONST_DECL -> to the value of the const decl. */
16032 if (TREE_CODE (op
) == CONST_DECL
)
16033 return DECL_INITIAL (op
);
16034 /* *&p => p; make sure to handle *&"str"[cst] here. */
16035 if (type
== optype
)
16037 tree fop
= fold_read_from_constant_string (op
);
16043 /* *(foo *)&fooarray => fooarray[0] */
16044 else if (TREE_CODE (optype
) == ARRAY_TYPE
16045 && type
== TREE_TYPE (optype
)
16046 && (!in_gimple_form
16047 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16049 tree type_domain
= TYPE_DOMAIN (optype
);
16050 tree min_val
= size_zero_node
;
16051 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16052 min_val
= TYPE_MIN_VALUE (type_domain
);
16054 && TREE_CODE (min_val
) != INTEGER_CST
)
16056 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
16057 NULL_TREE
, NULL_TREE
);
16059 /* *(foo *)&complexfoo => __real__ complexfoo */
16060 else if (TREE_CODE (optype
) == COMPLEX_TYPE
16061 && type
== TREE_TYPE (optype
))
16062 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
16063 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16064 else if (TREE_CODE (optype
) == VECTOR_TYPE
16065 && type
== TREE_TYPE (optype
))
16067 tree part_width
= TYPE_SIZE (type
);
16068 tree index
= bitsize_int (0);
16069 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
16073 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
16074 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
16076 tree op00
= TREE_OPERAND (sub
, 0);
16077 tree op01
= TREE_OPERAND (sub
, 1);
16080 if (TREE_CODE (op00
) == ADDR_EXPR
)
16083 op00
= TREE_OPERAND (op00
, 0);
16084 op00type
= TREE_TYPE (op00
);
16086 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16087 if (TREE_CODE (op00type
) == VECTOR_TYPE
16088 && type
== TREE_TYPE (op00type
))
16090 HOST_WIDE_INT offset
= tree_low_cst (op01
, 0);
16091 tree part_width
= TYPE_SIZE (type
);
16092 unsigned HOST_WIDE_INT part_widthi
= tree_low_cst (part_width
, 0)/BITS_PER_UNIT
;
16093 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
16094 tree index
= bitsize_int (indexi
);
16096 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (op00type
))
16097 return fold_build3_loc (loc
,
16098 BIT_FIELD_REF
, type
, op00
,
16099 part_width
, index
);
16102 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16103 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
16104 && type
== TREE_TYPE (op00type
))
16106 tree size
= TYPE_SIZE_UNIT (type
);
16107 if (tree_int_cst_equal (size
, op01
))
16108 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16110 /* ((foo *)&fooarray)[1] => fooarray[1] */
16111 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16112 && type
== TREE_TYPE (op00type
))
16114 tree type_domain
= TYPE_DOMAIN (op00type
);
16115 tree min_val
= size_zero_node
;
16116 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16117 min_val
= TYPE_MIN_VALUE (type_domain
);
16118 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
16119 TYPE_SIZE_UNIT (type
));
16120 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
16121 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16122 NULL_TREE
, NULL_TREE
);
16127 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16128 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16129 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16130 && (!in_gimple_form
16131 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16134 tree min_val
= size_zero_node
;
16135 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16136 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16137 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16138 min_val
= TYPE_MIN_VALUE (type_domain
);
16140 && TREE_CODE (min_val
) != INTEGER_CST
)
16142 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16149 /* Builds an expression for an indirection through T, simplifying some
16153 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16155 tree type
= TREE_TYPE (TREE_TYPE (t
));
16156 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16161 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16164 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16167 fold_indirect_ref_loc (location_t loc
, tree t
)
16169 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16177 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16178 whose result is ignored. The type of the returned tree need not be
16179 the same as the original expression. */
16182 fold_ignored_result (tree t
)
16184 if (!TREE_SIDE_EFFECTS (t
))
16185 return integer_zero_node
;
16188 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16191 t
= TREE_OPERAND (t
, 0);
16195 case tcc_comparison
:
16196 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16197 t
= TREE_OPERAND (t
, 0);
16198 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16199 t
= TREE_OPERAND (t
, 1);
16204 case tcc_expression
:
16205 switch (TREE_CODE (t
))
16207 case COMPOUND_EXPR
:
16208 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16210 t
= TREE_OPERAND (t
, 0);
16214 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16215 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16217 t
= TREE_OPERAND (t
, 0);
16230 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16231 This can only be applied to objects of a sizetype. */
16234 round_up_loc (location_t loc
, tree value
, int divisor
)
16236 tree div
= NULL_TREE
;
16238 gcc_assert (divisor
> 0);
16242 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16243 have to do anything. Only do this when we are not given a const,
16244 because in that case, this check is more expensive than just
16246 if (TREE_CODE (value
) != INTEGER_CST
)
16248 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16250 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16254 /* If divisor is a power of two, simplify this to bit manipulation. */
16255 if (divisor
== (divisor
& -divisor
))
16257 if (TREE_CODE (value
) == INTEGER_CST
)
16259 double_int val
= tree_to_double_int (value
);
16262 if ((val
.low
& (divisor
- 1)) == 0)
16265 overflow_p
= TREE_OVERFLOW (value
);
16266 val
.low
&= ~(divisor
- 1);
16267 val
.low
+= divisor
;
16275 return force_fit_type_double (TREE_TYPE (value
), val
,
16282 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16283 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16284 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16285 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16291 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16292 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16293 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16299 /* Likewise, but round down. */
16302 round_down_loc (location_t loc
, tree value
, int divisor
)
16304 tree div
= NULL_TREE
;
16306 gcc_assert (divisor
> 0);
16310 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16311 have to do anything. Only do this when we are not given a const,
16312 because in that case, this check is more expensive than just
16314 if (TREE_CODE (value
) != INTEGER_CST
)
16316 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16318 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16322 /* If divisor is a power of two, simplify this to bit manipulation. */
16323 if (divisor
== (divisor
& -divisor
))
16327 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16328 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16333 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16334 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16335 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16341 /* Returns the pointer to the base of the object addressed by EXP and
16342 extracts the information about the offset of the access, storing it
16343 to PBITPOS and POFFSET. */
16346 split_address_to_core_and_offset (tree exp
,
16347 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16350 enum machine_mode mode
;
16351 int unsignedp
, volatilep
;
16352 HOST_WIDE_INT bitsize
;
16353 location_t loc
= EXPR_LOCATION (exp
);
16355 if (TREE_CODE (exp
) == ADDR_EXPR
)
16357 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16358 poffset
, &mode
, &unsignedp
, &volatilep
,
16360 core
= build_fold_addr_expr_loc (loc
, core
);
16366 *poffset
= NULL_TREE
;
16372 /* Returns true if addresses of E1 and E2 differ by a constant, false
16373 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16376 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16379 HOST_WIDE_INT bitpos1
, bitpos2
;
16380 tree toffset1
, toffset2
, tdiff
, type
;
16382 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16383 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16385 if (bitpos1
% BITS_PER_UNIT
!= 0
16386 || bitpos2
% BITS_PER_UNIT
!= 0
16387 || !operand_equal_p (core1
, core2
, 0))
16390 if (toffset1
&& toffset2
)
16392 type
= TREE_TYPE (toffset1
);
16393 if (type
!= TREE_TYPE (toffset2
))
16394 toffset2
= fold_convert (type
, toffset2
);
16396 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16397 if (!cst_and_fits_in_hwi (tdiff
))
16400 *diff
= int_cst_value (tdiff
);
16402 else if (toffset1
|| toffset2
)
16404 /* If only one of the offsets is non-constant, the difference cannot
16411 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16415 /* Simplify the floating point expression EXP when the sign of the
16416 result is not significant. Return NULL_TREE if no simplification
16420 fold_strip_sign_ops (tree exp
)
16423 location_t loc
= EXPR_LOCATION (exp
);
16425 switch (TREE_CODE (exp
))
16429 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16430 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16434 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16436 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16437 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16438 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16439 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16440 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16441 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16444 case COMPOUND_EXPR
:
16445 arg0
= TREE_OPERAND (exp
, 0);
16446 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16448 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16452 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16453 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16455 return fold_build3_loc (loc
,
16456 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16457 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16458 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16463 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16466 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16467 /* Strip copysign function call, return the 1st argument. */
16468 arg0
= CALL_EXPR_ARG (exp
, 0);
16469 arg1
= CALL_EXPR_ARG (exp
, 1);
16470 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16473 /* Strip sign ops from the argument of "odd" math functions. */
16474 if (negate_mathfn_p (fcode
))
16476 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16478 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);