1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer
= 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code
{
91 static bool negate_mathfn_p (enum built_in_function
);
92 static bool negate_expr_p (tree
);
93 static tree
negate_expr (tree
);
94 static tree
split_tree (tree
, enum tree_code
, tree
*, tree
*, tree
*, int);
95 static tree
associate_trees (location_t
, tree
, tree
, enum tree_code
, tree
);
96 static tree
const_binop (enum tree_code
, tree
, tree
);
97 static enum comparison_code
comparison_to_compcode (enum tree_code
);
98 static enum tree_code
compcode_to_comparison (enum comparison_code
);
99 static int operand_equal_for_comparison_p (tree
, tree
, tree
);
100 static int twoval_comparison_p (tree
, tree
*, tree
*, int *);
101 static tree
eval_subst (location_t
, tree
, tree
, tree
, tree
, tree
);
102 static tree
pedantic_omit_one_operand_loc (location_t
, tree
, tree
, tree
);
103 static tree
distribute_bit_expr (location_t
, enum tree_code
, tree
, tree
, tree
);
104 static tree
make_bit_field_ref (location_t
, tree
, tree
,
105 HOST_WIDE_INT
, HOST_WIDE_INT
, int);
106 static tree
optimize_bit_field_compare (location_t
, enum tree_code
,
108 static tree
decode_field_reference (location_t
, tree
, HOST_WIDE_INT
*,
110 enum machine_mode
*, int *, int *,
112 static int all_ones_mask_p (const_tree
, int);
113 static tree
sign_bit_p (tree
, const_tree
);
114 static int simple_operand_p (const_tree
);
115 static bool simple_operand_p_2 (tree
);
116 static tree
range_binop (enum tree_code
, tree
, tree
, int, tree
, int);
117 static tree
range_predecessor (tree
);
118 static tree
range_successor (tree
);
119 static tree
fold_range_test (location_t
, enum tree_code
, tree
, tree
, tree
);
120 static tree
fold_cond_expr_with_comparison (location_t
, tree
, tree
, tree
, tree
);
121 static tree
unextend (tree
, int, int, tree
);
122 static tree
optimize_minmax_comparison (location_t
, enum tree_code
,
124 static tree
extract_muldiv (tree
, tree
, enum tree_code
, tree
, bool *);
125 static tree
extract_muldiv_1 (tree
, tree
, enum tree_code
, tree
, bool *);
126 static tree
fold_binary_op_with_conditional_arg (location_t
,
127 enum tree_code
, tree
,
130 static tree
fold_mathfn_compare (location_t
,
131 enum built_in_function
, enum tree_code
,
133 static tree
fold_inf_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
134 static tree
fold_div_compare (location_t
, enum tree_code
, tree
, tree
, tree
);
135 static bool reorder_operands_p (const_tree
, const_tree
);
136 static tree
fold_negate_const (tree
, tree
);
137 static tree
fold_not_const (const_tree
, tree
);
138 static tree
fold_relational_const (enum tree_code
, tree
, tree
, tree
);
139 static tree
fold_convert_const (enum tree_code
, tree
, tree
);
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
145 expr_location_or (tree t
, location_t loc
)
147 location_t tloc
= EXPR_LOCATION (t
);
148 return tloc
!= UNKNOWN_LOCATION
? tloc
: loc
;
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
155 protected_set_expr_location_unshare (tree x
, location_t loc
)
157 if (CAN_HAVE_LOCATION_P (x
)
158 && EXPR_LOCATION (x
) != loc
159 && !(TREE_CODE (x
) == SAVE_EXPR
160 || TREE_CODE (x
) == TARGET_EXPR
161 || TREE_CODE (x
) == BIND_EXPR
))
164 SET_EXPR_LOCATION (x
, loc
);
170 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
171 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
172 and SUM1. Then this yields nonzero if overflow occurred during the
175 Overflow occurs if A and B have the same sign, but A and SUM differ in
176 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
178 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
180 /* If ARG2 divides ARG1 with zero remainder, carries out the division
181 of type CODE and returns the quotient.
182 Otherwise returns NULL_TREE. */
185 div_if_zero_remainder (enum tree_code code
, const_tree arg1
, const_tree arg2
)
190 /* The sign of the division is according to operand two, that
191 does the correct thing for POINTER_PLUS_EXPR where we want
192 a signed division. */
193 uns
= TYPE_UNSIGNED (TREE_TYPE (arg2
));
194 if (TREE_CODE (TREE_TYPE (arg2
)) == INTEGER_TYPE
195 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2
)))
198 quo
= double_int_divmod (tree_to_double_int (arg1
),
199 tree_to_double_int (arg2
),
202 if (double_int_zero_p (rem
))
203 return build_int_cst_wide (TREE_TYPE (arg1
), quo
.low
, quo
.high
);
208 /* This is nonzero if we should defer warnings about undefined
209 overflow. This facility exists because these warnings are a
210 special case. The code to estimate loop iterations does not want
211 to issue any warnings, since it works with expressions which do not
212 occur in user code. Various bits of cleanup code call fold(), but
213 only use the result if it has certain characteristics (e.g., is a
214 constant); that code only wants to issue a warning if the result is
217 static int fold_deferring_overflow_warnings
;
219 /* If a warning about undefined overflow is deferred, this is the
220 warning. Note that this may cause us to turn two warnings into
221 one, but that is fine since it is sufficient to only give one
222 warning per expression. */
224 static const char* fold_deferred_overflow_warning
;
226 /* If a warning about undefined overflow is deferred, this is the
227 level at which the warning should be emitted. */
229 static enum warn_strict_overflow_code fold_deferred_overflow_code
;
231 /* Start deferring overflow warnings. We could use a stack here to
232 permit nested calls, but at present it is not necessary. */
235 fold_defer_overflow_warnings (void)
237 ++fold_deferring_overflow_warnings
;
240 /* Stop deferring overflow warnings. If there is a pending warning,
241 and ISSUE is true, then issue the warning if appropriate. STMT is
242 the statement with which the warning should be associated (used for
243 location information); STMT may be NULL. CODE is the level of the
244 warning--a warn_strict_overflow_code value. This function will use
245 the smaller of CODE and the deferred code when deciding whether to
246 issue the warning. CODE may be zero to mean to always use the
250 fold_undefer_overflow_warnings (bool issue
, const_gimple stmt
, int code
)
255 gcc_assert (fold_deferring_overflow_warnings
> 0);
256 --fold_deferring_overflow_warnings
;
257 if (fold_deferring_overflow_warnings
> 0)
259 if (fold_deferred_overflow_warning
!= NULL
261 && code
< (int) fold_deferred_overflow_code
)
262 fold_deferred_overflow_code
= (enum warn_strict_overflow_code
) code
;
266 warnmsg
= fold_deferred_overflow_warning
;
267 fold_deferred_overflow_warning
= NULL
;
269 if (!issue
|| warnmsg
== NULL
)
272 if (gimple_no_warning_p (stmt
))
275 /* Use the smallest code level when deciding to issue the
277 if (code
== 0 || code
> (int) fold_deferred_overflow_code
)
278 code
= fold_deferred_overflow_code
;
280 if (!issue_strict_overflow_warning (code
))
284 locus
= input_location
;
286 locus
= gimple_location (stmt
);
287 warning_at (locus
, OPT_Wstrict_overflow
, "%s", warnmsg
);
290 /* Stop deferring overflow warnings, ignoring any deferred
294 fold_undefer_and_ignore_overflow_warnings (void)
296 fold_undefer_overflow_warnings (false, NULL
, 0);
299 /* Whether we are deferring overflow warnings. */
302 fold_deferring_overflow_warnings_p (void)
304 return fold_deferring_overflow_warnings
> 0;
307 /* This is called when we fold something based on the fact that signed
308 overflow is undefined. */
311 fold_overflow_warning (const char* gmsgid
, enum warn_strict_overflow_code wc
)
313 if (fold_deferring_overflow_warnings
> 0)
315 if (fold_deferred_overflow_warning
== NULL
316 || wc
< fold_deferred_overflow_code
)
318 fold_deferred_overflow_warning
= gmsgid
;
319 fold_deferred_overflow_code
= wc
;
322 else if (issue_strict_overflow_warning (wc
))
323 warning (OPT_Wstrict_overflow
, gmsgid
);
326 /* Return true if the built-in mathematical function specified by CODE
327 is odd, i.e. -f(x) == f(-x). */
330 negate_mathfn_p (enum built_in_function code
)
334 CASE_FLT_FN (BUILT_IN_ASIN
):
335 CASE_FLT_FN (BUILT_IN_ASINH
):
336 CASE_FLT_FN (BUILT_IN_ATAN
):
337 CASE_FLT_FN (BUILT_IN_ATANH
):
338 CASE_FLT_FN (BUILT_IN_CASIN
):
339 CASE_FLT_FN (BUILT_IN_CASINH
):
340 CASE_FLT_FN (BUILT_IN_CATAN
):
341 CASE_FLT_FN (BUILT_IN_CATANH
):
342 CASE_FLT_FN (BUILT_IN_CBRT
):
343 CASE_FLT_FN (BUILT_IN_CPROJ
):
344 CASE_FLT_FN (BUILT_IN_CSIN
):
345 CASE_FLT_FN (BUILT_IN_CSINH
):
346 CASE_FLT_FN (BUILT_IN_CTAN
):
347 CASE_FLT_FN (BUILT_IN_CTANH
):
348 CASE_FLT_FN (BUILT_IN_ERF
):
349 CASE_FLT_FN (BUILT_IN_LLROUND
):
350 CASE_FLT_FN (BUILT_IN_LROUND
):
351 CASE_FLT_FN (BUILT_IN_ROUND
):
352 CASE_FLT_FN (BUILT_IN_SIN
):
353 CASE_FLT_FN (BUILT_IN_SINH
):
354 CASE_FLT_FN (BUILT_IN_TAN
):
355 CASE_FLT_FN (BUILT_IN_TANH
):
356 CASE_FLT_FN (BUILT_IN_TRUNC
):
359 CASE_FLT_FN (BUILT_IN_LLRINT
):
360 CASE_FLT_FN (BUILT_IN_LRINT
):
361 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
362 CASE_FLT_FN (BUILT_IN_RINT
):
363 return !flag_rounding_math
;
371 /* Check whether we may negate an integer constant T without causing
375 may_negate_without_overflow_p (const_tree t
)
377 unsigned HOST_WIDE_INT val
;
381 gcc_assert (TREE_CODE (t
) == INTEGER_CST
);
383 type
= TREE_TYPE (t
);
384 if (TYPE_UNSIGNED (type
))
387 prec
= TYPE_PRECISION (type
);
388 if (prec
> HOST_BITS_PER_WIDE_INT
)
390 if (TREE_INT_CST_LOW (t
) != 0)
392 prec
-= HOST_BITS_PER_WIDE_INT
;
393 val
= TREE_INT_CST_HIGH (t
);
396 val
= TREE_INT_CST_LOW (t
);
397 if (prec
< HOST_BITS_PER_WIDE_INT
)
398 val
&= ((unsigned HOST_WIDE_INT
) 1 << prec
) - 1;
399 return val
!= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1));
402 /* Determine whether an expression T can be cheaply negated using
403 the function negate_expr without introducing undefined overflow. */
406 negate_expr_p (tree t
)
413 type
= TREE_TYPE (t
);
416 switch (TREE_CODE (t
))
419 if (TYPE_OVERFLOW_WRAPS (type
))
422 /* Check that -CST will not overflow type. */
423 return may_negate_without_overflow_p (t
);
425 return (INTEGRAL_TYPE_P (type
)
426 && TYPE_OVERFLOW_WRAPS (type
));
433 /* We want to canonicalize to positive real constants. Pretend
434 that only negative ones can be easily negated. */
435 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
438 return negate_expr_p (TREE_REALPART (t
))
439 && negate_expr_p (TREE_IMAGPART (t
));
442 return negate_expr_p (TREE_OPERAND (t
, 0))
443 && negate_expr_p (TREE_OPERAND (t
, 1));
446 return negate_expr_p (TREE_OPERAND (t
, 0));
449 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
450 || HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
452 /* -(A + B) -> (-B) - A. */
453 if (negate_expr_p (TREE_OPERAND (t
, 1))
454 && reorder_operands_p (TREE_OPERAND (t
, 0),
455 TREE_OPERAND (t
, 1)))
457 /* -(A + B) -> (-A) - B. */
458 return negate_expr_p (TREE_OPERAND (t
, 0));
461 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
462 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
463 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
464 && reorder_operands_p (TREE_OPERAND (t
, 0),
465 TREE_OPERAND (t
, 1));
468 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
474 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t
))))
475 return negate_expr_p (TREE_OPERAND (t
, 1))
476 || negate_expr_p (TREE_OPERAND (t
, 0));
484 /* In general we can't negate A / B, because if A is INT_MIN and
485 B is 1, we may turn this into INT_MIN / -1 which is undefined
486 and actually traps on some architectures. But if overflow is
487 undefined, we can negate, because - (INT_MIN / 1) is an
489 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
490 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
)))
492 return negate_expr_p (TREE_OPERAND (t
, 1))
493 || negate_expr_p (TREE_OPERAND (t
, 0));
496 /* Negate -((double)float) as (double)(-float). */
497 if (TREE_CODE (type
) == REAL_TYPE
)
499 tree tem
= strip_float_extensions (t
);
501 return negate_expr_p (tem
);
506 /* Negate -f(x) as f(-x). */
507 if (negate_mathfn_p (builtin_mathfn_code (t
)))
508 return negate_expr_p (CALL_EXPR_ARG (t
, 0));
512 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
513 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
515 tree op1
= TREE_OPERAND (t
, 1);
516 if (TREE_INT_CST_HIGH (op1
) == 0
517 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
518 == TREE_INT_CST_LOW (op1
))
529 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
530 simplification is possible.
531 If negate_expr_p would return true for T, NULL_TREE will never be
535 fold_negate_expr (location_t loc
, tree t
)
537 tree type
= TREE_TYPE (t
);
540 switch (TREE_CODE (t
))
542 /* Convert - (~A) to A + 1. */
544 if (INTEGRAL_TYPE_P (type
))
545 return fold_build2_loc (loc
, PLUS_EXPR
, type
, TREE_OPERAND (t
, 0),
546 build_int_cst (type
, 1));
550 tem
= fold_negate_const (t
, type
);
551 if (TREE_OVERFLOW (tem
) == TREE_OVERFLOW (t
)
552 || !TYPE_OVERFLOW_TRAPS (type
))
557 tem
= fold_negate_const (t
, type
);
558 /* Two's complement FP formats, such as c4x, may overflow. */
559 if (!TREE_OVERFLOW (tem
) || !flag_trapping_math
)
564 tem
= fold_negate_const (t
, type
);
569 tree rpart
= negate_expr (TREE_REALPART (t
));
570 tree ipart
= negate_expr (TREE_IMAGPART (t
));
572 if ((TREE_CODE (rpart
) == REAL_CST
573 && TREE_CODE (ipart
) == REAL_CST
)
574 || (TREE_CODE (rpart
) == INTEGER_CST
575 && TREE_CODE (ipart
) == INTEGER_CST
))
576 return build_complex (type
, rpart
, ipart
);
581 if (negate_expr_p (t
))
582 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
583 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)),
584 fold_negate_expr (loc
, TREE_OPERAND (t
, 1)));
588 if (negate_expr_p (t
))
589 return fold_build1_loc (loc
, CONJ_EXPR
, type
,
590 fold_negate_expr (loc
, TREE_OPERAND (t
, 0)));
594 return TREE_OPERAND (t
, 0);
597 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
600 /* -(A + B) -> (-B) - A. */
601 if (negate_expr_p (TREE_OPERAND (t
, 1))
602 && reorder_operands_p (TREE_OPERAND (t
, 0),
603 TREE_OPERAND (t
, 1)))
605 tem
= negate_expr (TREE_OPERAND (t
, 1));
606 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
607 tem
, TREE_OPERAND (t
, 0));
610 /* -(A + B) -> (-A) - B. */
611 if (negate_expr_p (TREE_OPERAND (t
, 0)))
613 tem
= negate_expr (TREE_OPERAND (t
, 0));
614 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
615 tem
, TREE_OPERAND (t
, 1));
621 /* - (A - B) -> B - A */
622 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
))
623 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
624 && reorder_operands_p (TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1)))
625 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
626 TREE_OPERAND (t
, 1), TREE_OPERAND (t
, 0));
630 if (TYPE_UNSIGNED (type
))
636 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
)))
638 tem
= TREE_OPERAND (t
, 1);
639 if (negate_expr_p (tem
))
640 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
641 TREE_OPERAND (t
, 0), negate_expr (tem
));
642 tem
= TREE_OPERAND (t
, 0);
643 if (negate_expr_p (tem
))
644 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
645 negate_expr (tem
), TREE_OPERAND (t
, 1));
654 /* In general we can't negate A / B, because if A is INT_MIN and
655 B is 1, we may turn this into INT_MIN / -1 which is undefined
656 and actually traps on some architectures. But if overflow is
657 undefined, we can negate, because - (INT_MIN / 1) is an
659 if (!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
661 const char * const warnmsg
= G_("assuming signed overflow does not "
662 "occur when negating a division");
663 tem
= TREE_OPERAND (t
, 1);
664 if (negate_expr_p (tem
))
666 if (INTEGRAL_TYPE_P (type
)
667 && (TREE_CODE (tem
) != INTEGER_CST
668 || integer_onep (tem
)))
669 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
670 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
671 TREE_OPERAND (t
, 0), negate_expr (tem
));
673 tem
= TREE_OPERAND (t
, 0);
674 if (negate_expr_p (tem
))
676 if (INTEGRAL_TYPE_P (type
)
677 && (TREE_CODE (tem
) != INTEGER_CST
678 || tree_int_cst_equal (tem
, TYPE_MIN_VALUE (type
))))
679 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MISC
);
680 return fold_build2_loc (loc
, TREE_CODE (t
), type
,
681 negate_expr (tem
), TREE_OPERAND (t
, 1));
687 /* Convert -((double)float) into (double)(-float). */
688 if (TREE_CODE (type
) == REAL_TYPE
)
690 tem
= strip_float_extensions (t
);
691 if (tem
!= t
&& negate_expr_p (tem
))
692 return fold_convert_loc (loc
, type
, negate_expr (tem
));
697 /* Negate -f(x) as f(-x). */
698 if (negate_mathfn_p (builtin_mathfn_code (t
))
699 && negate_expr_p (CALL_EXPR_ARG (t
, 0)))
703 fndecl
= get_callee_fndecl (t
);
704 arg
= negate_expr (CALL_EXPR_ARG (t
, 0));
705 return build_call_expr_loc (loc
, fndecl
, 1, arg
);
710 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
711 if (TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
)
713 tree op1
= TREE_OPERAND (t
, 1);
714 if (TREE_INT_CST_HIGH (op1
) == 0
715 && (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (type
) - 1)
716 == TREE_INT_CST_LOW (op1
))
718 tree ntype
= TYPE_UNSIGNED (type
)
719 ? signed_type_for (type
)
720 : unsigned_type_for (type
);
721 tree temp
= fold_convert_loc (loc
, ntype
, TREE_OPERAND (t
, 0));
722 temp
= fold_build2_loc (loc
, RSHIFT_EXPR
, ntype
, temp
, op1
);
723 return fold_convert_loc (loc
, type
, temp
);
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
748 loc
= EXPR_LOCATION (t
);
749 type
= TREE_TYPE (t
);
752 tem
= fold_negate_expr (loc
, t
);
754 tem
= build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (t
), t
);
755 return fold_convert_loc (loc
, type
, tem
);
758 /* Split a tree IN into a constant, literal and variable parts that could be
759 combined with CODE to make IN. "constant" means an expression with
760 TREE_CONSTANT but that isn't an actual constant. CODE must be a
761 commutative arithmetic operation. Store the constant part into *CONP,
762 the literal in *LITP and return the variable part. If a part isn't
763 present, set it to null. If the tree does not decompose in this way,
764 return the entire tree as the variable part and the other parts as null.
766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
767 case, we negate an operand that was subtracted. Except if it is a
768 literal for which we use *MINUS_LITP instead.
770 If NEGATE_P is true, we are negating all of IN, again except a literal
771 for which we use *MINUS_LITP instead.
773 If IN is itself a literal or constant, return it as appropriate.
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
779 split_tree (tree in
, enum tree_code code
, tree
*conp
, tree
*litp
,
780 tree
*minus_litp
, int negate_p
)
788 /* Strip any conversions that don't change the machine mode or signedness. */
789 STRIP_SIGN_NOPS (in
);
791 if (TREE_CODE (in
) == INTEGER_CST
|| TREE_CODE (in
) == REAL_CST
792 || TREE_CODE (in
) == FIXED_CST
)
794 else if (TREE_CODE (in
) == code
795 || ((! FLOAT_TYPE_P (TREE_TYPE (in
)) || flag_associative_math
)
796 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in
))
797 /* We can associate addition and subtraction together (even
798 though the C standard doesn't say so) for integers because
799 the value is not affected. For reals, the value might be
800 affected, so we can't. */
801 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
802 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
804 tree op0
= TREE_OPERAND (in
, 0);
805 tree op1
= TREE_OPERAND (in
, 1);
806 int neg1_p
= TREE_CODE (in
) == MINUS_EXPR
;
807 int neg_litp_p
= 0, neg_conp_p
= 0, neg_var_p
= 0;
809 /* First see if either of the operands is a literal, then a constant. */
810 if (TREE_CODE (op0
) == INTEGER_CST
|| TREE_CODE (op0
) == REAL_CST
811 || TREE_CODE (op0
) == FIXED_CST
)
812 *litp
= op0
, op0
= 0;
813 else if (TREE_CODE (op1
) == INTEGER_CST
|| TREE_CODE (op1
) == REAL_CST
814 || TREE_CODE (op1
) == FIXED_CST
)
815 *litp
= op1
, neg_litp_p
= neg1_p
, op1
= 0;
817 if (op0
!= 0 && TREE_CONSTANT (op0
))
818 *conp
= op0
, op0
= 0;
819 else if (op1
!= 0 && TREE_CONSTANT (op1
))
820 *conp
= op1
, neg_conp_p
= neg1_p
, op1
= 0;
822 /* If we haven't dealt with either operand, this is not a case we can
823 decompose. Otherwise, VAR is either of the ones remaining, if any. */
824 if (op0
!= 0 && op1
!= 0)
829 var
= op1
, neg_var_p
= neg1_p
;
831 /* Now do any needed negations. */
833 *minus_litp
= *litp
, *litp
= 0;
835 *conp
= negate_expr (*conp
);
837 var
= negate_expr (var
);
839 else if (TREE_CONSTANT (in
))
847 *minus_litp
= *litp
, *litp
= 0;
848 else if (*minus_litp
)
849 *litp
= *minus_litp
, *minus_litp
= 0;
850 *conp
= negate_expr (*conp
);
851 var
= negate_expr (var
);
857 /* Re-associate trees split by the above function. T1 and T2 are
858 either expressions to associate or null. Return the new
859 expression, if any. LOC is the location of the new expression. If
860 we build an operation, do it in TYPE and with CODE. */
863 associate_trees (location_t loc
, tree t1
, tree t2
, enum tree_code code
, tree type
)
870 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
871 try to fold this since we will have infinite recursion. But do
872 deal with any NEGATE_EXPRs. */
873 if (TREE_CODE (t1
) == code
|| TREE_CODE (t2
) == code
874 || TREE_CODE (t1
) == MINUS_EXPR
|| TREE_CODE (t2
) == MINUS_EXPR
)
876 if (code
== PLUS_EXPR
)
878 if (TREE_CODE (t1
) == NEGATE_EXPR
)
879 return build2_loc (loc
, MINUS_EXPR
, type
,
880 fold_convert_loc (loc
, type
, t2
),
881 fold_convert_loc (loc
, type
,
882 TREE_OPERAND (t1
, 0)));
883 else if (TREE_CODE (t2
) == NEGATE_EXPR
)
884 return build2_loc (loc
, MINUS_EXPR
, type
,
885 fold_convert_loc (loc
, type
, t1
),
886 fold_convert_loc (loc
, type
,
887 TREE_OPERAND (t2
, 0)));
888 else if (integer_zerop (t2
))
889 return fold_convert_loc (loc
, type
, t1
);
891 else if (code
== MINUS_EXPR
)
893 if (integer_zerop (t2
))
894 return fold_convert_loc (loc
, type
, t1
);
897 return build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
898 fold_convert_loc (loc
, type
, t2
));
901 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, t1
),
902 fold_convert_loc (loc
, type
, t2
));
905 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
906 for use in int_const_binop, size_binop and size_diffop. */
909 int_binop_types_match_p (enum tree_code code
, const_tree type1
, const_tree type2
)
911 if (TREE_CODE (type1
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type1
))
913 if (TREE_CODE (type2
) != INTEGER_TYPE
&& !POINTER_TYPE_P (type2
))
928 return TYPE_UNSIGNED (type1
) == TYPE_UNSIGNED (type2
)
929 && TYPE_PRECISION (type1
) == TYPE_PRECISION (type2
)
930 && TYPE_MODE (type1
) == TYPE_MODE (type2
);
934 /* Combine two integer constants ARG1 and ARG2 under operation CODE
935 to produce a new constant. Return NULL_TREE if we don't know how
936 to evaluate CODE at compile-time. */
939 int_const_binop (enum tree_code code
, const_tree arg1
, const_tree arg2
)
941 double_int op1
, op2
, res
, tmp
;
943 tree type
= TREE_TYPE (arg1
);
944 bool uns
= TYPE_UNSIGNED (type
);
946 = (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (type
));
947 bool overflow
= false;
949 op1
= tree_to_double_int (arg1
);
950 op2
= tree_to_double_int (arg2
);
955 res
= double_int_ior (op1
, op2
);
959 res
= double_int_xor (op1
, op2
);
963 res
= double_int_and (op1
, op2
);
967 res
= double_int_rshift (op1
, double_int_to_shwi (op2
),
968 TYPE_PRECISION (type
), !uns
);
972 /* It's unclear from the C standard whether shifts can overflow.
973 The following code ignores overflow; perhaps a C standard
974 interpretation ruling is needed. */
975 res
= double_int_lshift (op1
, double_int_to_shwi (op2
),
976 TYPE_PRECISION (type
), !uns
);
980 res
= double_int_rrotate (op1
, double_int_to_shwi (op2
),
981 TYPE_PRECISION (type
));
985 res
= double_int_lrotate (op1
, double_int_to_shwi (op2
),
986 TYPE_PRECISION (type
));
990 overflow
= add_double (op1
.low
, op1
.high
, op2
.low
, op2
.high
,
991 &res
.low
, &res
.high
);
995 neg_double (op2
.low
, op2
.high
, &res
.low
, &res
.high
);
996 add_double (op1
.low
, op1
.high
, res
.low
, res
.high
,
997 &res
.low
, &res
.high
);
998 overflow
= OVERFLOW_SUM_SIGN (res
.high
, op2
.high
, op1
.high
);
1002 overflow
= mul_double (op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1003 &res
.low
, &res
.high
);
1006 case TRUNC_DIV_EXPR
:
1007 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1008 case EXACT_DIV_EXPR
:
1009 /* This is a shortcut for a common special case. */
1010 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1011 && !TREE_OVERFLOW (arg1
)
1012 && !TREE_OVERFLOW (arg2
)
1013 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1015 if (code
== CEIL_DIV_EXPR
)
1016 op1
.low
+= op2
.low
- 1;
1018 res
.low
= op1
.low
/ op2
.low
, res
.high
= 0;
1022 /* ... fall through ... */
1024 case ROUND_DIV_EXPR
:
1025 if (double_int_zero_p (op2
))
1027 if (double_int_one_p (op2
))
1032 if (double_int_equal_p (op1
, op2
)
1033 && ! double_int_zero_p (op1
))
1035 res
= double_int_one
;
1038 overflow
= div_and_round_double (code
, uns
,
1039 op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1040 &res
.low
, &res
.high
,
1041 &tmp
.low
, &tmp
.high
);
1044 case TRUNC_MOD_EXPR
:
1045 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1046 /* This is a shortcut for a common special case. */
1047 if (op2
.high
== 0 && (HOST_WIDE_INT
) op2
.low
> 0
1048 && !TREE_OVERFLOW (arg1
)
1049 && !TREE_OVERFLOW (arg2
)
1050 && op1
.high
== 0 && (HOST_WIDE_INT
) op1
.low
>= 0)
1052 if (code
== CEIL_MOD_EXPR
)
1053 op1
.low
+= op2
.low
- 1;
1054 res
.low
= op1
.low
% op2
.low
, res
.high
= 0;
1058 /* ... fall through ... */
1060 case ROUND_MOD_EXPR
:
1061 if (double_int_zero_p (op2
))
1063 overflow
= div_and_round_double (code
, uns
,
1064 op1
.low
, op1
.high
, op2
.low
, op2
.high
,
1065 &tmp
.low
, &tmp
.high
,
1066 &res
.low
, &res
.high
);
1070 res
= double_int_min (op1
, op2
, uns
);
1074 res
= double_int_max (op1
, op2
, uns
);
1081 t
= force_fit_type_double (TREE_TYPE (arg1
), res
, 1,
1082 ((!uns
|| is_sizetype
) && overflow
)
1083 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1088 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1089 constant. We assume ARG1 and ARG2 have the same data type, or at least
1090 are the same kind of constant and the same machine mode. Return zero if
1091 combining the constants is not allowed in the current operating mode. */
1094 const_binop (enum tree_code code
, tree arg1
, tree arg2
)
1096 /* Sanity check for the recursive cases. */
1103 if (TREE_CODE (arg1
) == INTEGER_CST
)
1104 return int_const_binop (code
, arg1
, arg2
);
1106 if (TREE_CODE (arg1
) == REAL_CST
)
1108 enum machine_mode mode
;
1111 REAL_VALUE_TYPE value
;
1112 REAL_VALUE_TYPE result
;
1116 /* The following codes are handled by real_arithmetic. */
1131 d1
= TREE_REAL_CST (arg1
);
1132 d2
= TREE_REAL_CST (arg2
);
1134 type
= TREE_TYPE (arg1
);
1135 mode
= TYPE_MODE (type
);
1137 /* Don't perform operation if we honor signaling NaNs and
1138 either operand is a NaN. */
1139 if (HONOR_SNANS (mode
)
1140 && (REAL_VALUE_ISNAN (d1
) || REAL_VALUE_ISNAN (d2
)))
1143 /* Don't perform operation if it would raise a division
1144 by zero exception. */
1145 if (code
== RDIV_EXPR
1146 && REAL_VALUES_EQUAL (d2
, dconst0
)
1147 && (flag_trapping_math
|| ! MODE_HAS_INFINITIES (mode
)))
1150 /* If either operand is a NaN, just return it. Otherwise, set up
1151 for floating-point trap; we return an overflow. */
1152 if (REAL_VALUE_ISNAN (d1
))
1154 else if (REAL_VALUE_ISNAN (d2
))
1157 inexact
= real_arithmetic (&value
, code
, &d1
, &d2
);
1158 real_convert (&result
, mode
, &value
);
1160 /* Don't constant fold this floating point operation if
1161 the result has overflowed and flag_trapping_math. */
1162 if (flag_trapping_math
1163 && MODE_HAS_INFINITIES (mode
)
1164 && REAL_VALUE_ISINF (result
)
1165 && !REAL_VALUE_ISINF (d1
)
1166 && !REAL_VALUE_ISINF (d2
))
1169 /* Don't constant fold this floating point operation if the
1170 result may dependent upon the run-time rounding mode and
1171 flag_rounding_math is set, or if GCC's software emulation
1172 is unable to accurately represent the result. */
1173 if ((flag_rounding_math
1174 || (MODE_COMPOSITE_P (mode
) && !flag_unsafe_math_optimizations
))
1175 && (inexact
|| !real_identical (&result
, &value
)))
1178 t
= build_real (type
, result
);
1180 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1184 if (TREE_CODE (arg1
) == FIXED_CST
)
1186 FIXED_VALUE_TYPE f1
;
1187 FIXED_VALUE_TYPE f2
;
1188 FIXED_VALUE_TYPE result
;
1193 /* The following codes are handled by fixed_arithmetic. */
1199 case TRUNC_DIV_EXPR
:
1200 f2
= TREE_FIXED_CST (arg2
);
1205 f2
.data
.high
= TREE_INT_CST_HIGH (arg2
);
1206 f2
.data
.low
= TREE_INT_CST_LOW (arg2
);
1214 f1
= TREE_FIXED_CST (arg1
);
1215 type
= TREE_TYPE (arg1
);
1216 sat_p
= TYPE_SATURATING (type
);
1217 overflow_p
= fixed_arithmetic (&result
, code
, &f1
, &f2
, sat_p
);
1218 t
= build_fixed (type
, result
);
1219 /* Propagate overflow flags. */
1220 if (overflow_p
| TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
))
1221 TREE_OVERFLOW (t
) = 1;
1225 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1227 tree type
= TREE_TYPE (arg1
);
1228 tree r1
= TREE_REALPART (arg1
);
1229 tree i1
= TREE_IMAGPART (arg1
);
1230 tree r2
= TREE_REALPART (arg2
);
1231 tree i2
= TREE_IMAGPART (arg2
);
1238 real
= const_binop (code
, r1
, r2
);
1239 imag
= const_binop (code
, i1
, i2
);
1243 if (COMPLEX_FLOAT_TYPE_P (type
))
1244 return do_mpc_arg2 (arg1
, arg2
, type
,
1245 /* do_nonfinite= */ folding_initializer
,
1248 real
= const_binop (MINUS_EXPR
,
1249 const_binop (MULT_EXPR
, r1
, r2
),
1250 const_binop (MULT_EXPR
, i1
, i2
));
1251 imag
= const_binop (PLUS_EXPR
,
1252 const_binop (MULT_EXPR
, r1
, i2
),
1253 const_binop (MULT_EXPR
, i1
, r2
));
1257 if (COMPLEX_FLOAT_TYPE_P (type
))
1258 return do_mpc_arg2 (arg1
, arg2
, type
,
1259 /* do_nonfinite= */ folding_initializer
,
1262 case TRUNC_DIV_EXPR
:
1264 case FLOOR_DIV_EXPR
:
1265 case ROUND_DIV_EXPR
:
1266 if (flag_complex_method
== 0)
1268 /* Keep this algorithm in sync with
1269 tree-complex.c:expand_complex_div_straight().
1271 Expand complex division to scalars, straightforward algorithm.
1272 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1276 = const_binop (PLUS_EXPR
,
1277 const_binop (MULT_EXPR
, r2
, r2
),
1278 const_binop (MULT_EXPR
, i2
, i2
));
1280 = const_binop (PLUS_EXPR
,
1281 const_binop (MULT_EXPR
, r1
, r2
),
1282 const_binop (MULT_EXPR
, i1
, i2
));
1284 = const_binop (MINUS_EXPR
,
1285 const_binop (MULT_EXPR
, i1
, r2
),
1286 const_binop (MULT_EXPR
, r1
, i2
));
1288 real
= const_binop (code
, t1
, magsquared
);
1289 imag
= const_binop (code
, t2
, magsquared
);
1293 /* Keep this algorithm in sync with
1294 tree-complex.c:expand_complex_div_wide().
1296 Expand complex division to scalars, modified algorithm to minimize
1297 overflow with wide input ranges. */
1298 tree compare
= fold_build2 (LT_EXPR
, boolean_type_node
,
1299 fold_abs_const (r2
, TREE_TYPE (type
)),
1300 fold_abs_const (i2
, TREE_TYPE (type
)));
1302 if (integer_nonzerop (compare
))
1304 /* In the TRUE branch, we compute
1306 div = (br * ratio) + bi;
1307 tr = (ar * ratio) + ai;
1308 ti = (ai * ratio) - ar;
1311 tree ratio
= const_binop (code
, r2
, i2
);
1312 tree div
= const_binop (PLUS_EXPR
, i2
,
1313 const_binop (MULT_EXPR
, r2
, ratio
));
1314 real
= const_binop (MULT_EXPR
, r1
, ratio
);
1315 real
= const_binop (PLUS_EXPR
, real
, i1
);
1316 real
= const_binop (code
, real
, div
);
1318 imag
= const_binop (MULT_EXPR
, i1
, ratio
);
1319 imag
= const_binop (MINUS_EXPR
, imag
, r1
);
1320 imag
= const_binop (code
, imag
, div
);
1324 /* In the FALSE branch, we compute
1326 divisor = (d * ratio) + c;
1327 tr = (b * ratio) + a;
1328 ti = b - (a * ratio);
1331 tree ratio
= const_binop (code
, i2
, r2
);
1332 tree div
= const_binop (PLUS_EXPR
, r2
,
1333 const_binop (MULT_EXPR
, i2
, ratio
));
1335 real
= const_binop (MULT_EXPR
, i1
, ratio
);
1336 real
= const_binop (PLUS_EXPR
, real
, r1
);
1337 real
= const_binop (code
, real
, div
);
1339 imag
= const_binop (MULT_EXPR
, r1
, ratio
);
1340 imag
= const_binop (MINUS_EXPR
, i1
, imag
);
1341 imag
= const_binop (code
, imag
, div
);
1351 return build_complex (type
, real
, imag
);
1354 if (TREE_CODE (arg1
) == VECTOR_CST
)
1356 tree type
= TREE_TYPE(arg1
);
1357 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
1358 tree elements1
, elements2
, list
= NULL_TREE
;
1360 if(TREE_CODE(arg2
) != VECTOR_CST
)
1363 elements1
= TREE_VECTOR_CST_ELTS (arg1
);
1364 elements2
= TREE_VECTOR_CST_ELTS (arg2
);
1366 for (i
= 0; i
< count
; i
++)
1368 tree elem1
, elem2
, elem
;
1370 /* The trailing elements can be empty and should be treated as 0 */
1372 elem1
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1375 elem1
= TREE_VALUE(elements1
);
1376 elements1
= TREE_CHAIN (elements1
);
1380 elem2
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1383 elem2
= TREE_VALUE(elements2
);
1384 elements2
= TREE_CHAIN (elements2
);
1387 elem
= const_binop (code
, elem1
, elem2
);
1389 /* It is possible that const_binop cannot handle the given
1390 code and return NULL_TREE */
1391 if(elem
== NULL_TREE
)
1394 list
= tree_cons (NULL_TREE
, elem
, list
);
1396 return build_vector(type
, nreverse(list
));
1401 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1402 indicates which particular sizetype to create. */
1405 size_int_kind (HOST_WIDE_INT number
, enum size_type_kind kind
)
1407 return build_int_cst (sizetype_tab
[(int) kind
], number
);
1410 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1411 is a tree code. The type of the result is taken from the operands.
1412 Both must be equivalent integer types, ala int_binop_types_match_p.
1413 If the operands are constant, so is the result. */
1416 size_binop_loc (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
)
1418 tree type
= TREE_TYPE (arg0
);
1420 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1421 return error_mark_node
;
1423 gcc_assert (int_binop_types_match_p (code
, TREE_TYPE (arg0
),
1426 /* Handle the special case of two integer constants faster. */
1427 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1429 /* And some specific cases even faster than that. */
1430 if (code
== PLUS_EXPR
)
1432 if (integer_zerop (arg0
) && !TREE_OVERFLOW (arg0
))
1434 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1437 else if (code
== MINUS_EXPR
)
1439 if (integer_zerop (arg1
) && !TREE_OVERFLOW (arg1
))
1442 else if (code
== MULT_EXPR
)
1444 if (integer_onep (arg0
) && !TREE_OVERFLOW (arg0
))
1448 /* Handle general case of two integer constants. */
1449 return int_const_binop (code
, arg0
, arg1
);
1452 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
1455 /* Given two values, either both of sizetype or both of bitsizetype,
1456 compute the difference between the two values. Return the value
1457 in signed type corresponding to the type of the operands. */
1460 size_diffop_loc (location_t loc
, tree arg0
, tree arg1
)
1462 tree type
= TREE_TYPE (arg0
);
1465 gcc_assert (int_binop_types_match_p (MINUS_EXPR
, TREE_TYPE (arg0
),
1468 /* If the type is already signed, just do the simple thing. */
1469 if (!TYPE_UNSIGNED (type
))
1470 return size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
);
1472 if (type
== sizetype
)
1474 else if (type
== bitsizetype
)
1475 ctype
= sbitsizetype
;
1477 ctype
= signed_type_for (type
);
1479 /* If either operand is not a constant, do the conversions to the signed
1480 type and subtract. The hardware will do the right thing with any
1481 overflow in the subtraction. */
1482 if (TREE_CODE (arg0
) != INTEGER_CST
|| TREE_CODE (arg1
) != INTEGER_CST
)
1483 return size_binop_loc (loc
, MINUS_EXPR
,
1484 fold_convert_loc (loc
, ctype
, arg0
),
1485 fold_convert_loc (loc
, ctype
, arg1
));
1487 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1488 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1489 overflow) and negate (which can't either). Special-case a result
1490 of zero while we're here. */
1491 if (tree_int_cst_equal (arg0
, arg1
))
1492 return build_int_cst (ctype
, 0);
1493 else if (tree_int_cst_lt (arg1
, arg0
))
1494 return fold_convert_loc (loc
, ctype
,
1495 size_binop_loc (loc
, MINUS_EXPR
, arg0
, arg1
));
1497 return size_binop_loc (loc
, MINUS_EXPR
, build_int_cst (ctype
, 0),
1498 fold_convert_loc (loc
, ctype
,
1499 size_binop_loc (loc
,
1504 /* A subroutine of fold_convert_const handling conversions of an
1505 INTEGER_CST to another integer type. */
1508 fold_convert_const_int_from_int (tree type
, const_tree arg1
)
1512 /* Given an integer constant, make new constant with new type,
1513 appropriately sign-extended or truncated. */
1514 t
= force_fit_type_double (type
, tree_to_double_int (arg1
),
1515 !POINTER_TYPE_P (TREE_TYPE (arg1
)),
1516 (TREE_INT_CST_HIGH (arg1
) < 0
1517 && (TYPE_UNSIGNED (type
)
1518 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1519 | TREE_OVERFLOW (arg1
));
1524 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1525 to an integer type. */
1528 fold_convert_const_int_from_real (enum tree_code code
, tree type
, const_tree arg1
)
1533 /* The following code implements the floating point to integer
1534 conversion rules required by the Java Language Specification,
1535 that IEEE NaNs are mapped to zero and values that overflow
1536 the target precision saturate, i.e. values greater than
1537 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1538 are mapped to INT_MIN. These semantics are allowed by the
1539 C and C++ standards that simply state that the behavior of
1540 FP-to-integer conversion is unspecified upon overflow. */
1544 REAL_VALUE_TYPE x
= TREE_REAL_CST (arg1
);
1548 case FIX_TRUNC_EXPR
:
1549 real_trunc (&r
, VOIDmode
, &x
);
1556 /* If R is NaN, return zero and show we have an overflow. */
1557 if (REAL_VALUE_ISNAN (r
))
1560 val
= double_int_zero
;
1563 /* See if R is less than the lower bound or greater than the
1568 tree lt
= TYPE_MIN_VALUE (type
);
1569 REAL_VALUE_TYPE l
= real_value_from_int_cst (NULL_TREE
, lt
);
1570 if (REAL_VALUES_LESS (r
, l
))
1573 val
= tree_to_double_int (lt
);
1579 tree ut
= TYPE_MAX_VALUE (type
);
1582 REAL_VALUE_TYPE u
= real_value_from_int_cst (NULL_TREE
, ut
);
1583 if (REAL_VALUES_LESS (u
, r
))
1586 val
= tree_to_double_int (ut
);
1592 real_to_integer2 ((HOST_WIDE_INT
*) &val
.low
, &val
.high
, &r
);
1594 t
= force_fit_type_double (type
, val
, -1, overflow
| TREE_OVERFLOW (arg1
));
1598 /* A subroutine of fold_convert_const handling conversions of a
1599 FIXED_CST to an integer type. */
1602 fold_convert_const_int_from_fixed (tree type
, const_tree arg1
)
1605 double_int temp
, temp_trunc
;
1608 /* Right shift FIXED_CST to temp by fbit. */
1609 temp
= TREE_FIXED_CST (arg1
).data
;
1610 mode
= TREE_FIXED_CST (arg1
).mode
;
1611 if (GET_MODE_FBIT (mode
) < 2 * HOST_BITS_PER_WIDE_INT
)
1613 temp
= double_int_rshift (temp
, GET_MODE_FBIT (mode
),
1614 HOST_BITS_PER_DOUBLE_INT
,
1615 SIGNED_FIXED_POINT_MODE_P (mode
));
1617 /* Left shift temp to temp_trunc by fbit. */
1618 temp_trunc
= double_int_lshift (temp
, GET_MODE_FBIT (mode
),
1619 HOST_BITS_PER_DOUBLE_INT
,
1620 SIGNED_FIXED_POINT_MODE_P (mode
));
1624 temp
= double_int_zero
;
1625 temp_trunc
= double_int_zero
;
1628 /* If FIXED_CST is negative, we need to round the value toward 0.
1629 By checking if the fractional bits are not zero to add 1 to temp. */
1630 if (SIGNED_FIXED_POINT_MODE_P (mode
)
1631 && double_int_negative_p (temp_trunc
)
1632 && !double_int_equal_p (TREE_FIXED_CST (arg1
).data
, temp_trunc
))
1633 temp
= double_int_add (temp
, double_int_one
);
1635 /* Given a fixed-point constant, make new constant with new type,
1636 appropriately sign-extended or truncated. */
1637 t
= force_fit_type_double (type
, temp
, -1,
1638 (double_int_negative_p (temp
)
1639 && (TYPE_UNSIGNED (type
)
1640 < TYPE_UNSIGNED (TREE_TYPE (arg1
))))
1641 | TREE_OVERFLOW (arg1
));
1646 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1647 to another floating point type. */
1650 fold_convert_const_real_from_real (tree type
, const_tree arg1
)
1652 REAL_VALUE_TYPE value
;
1655 real_convert (&value
, TYPE_MODE (type
), &TREE_REAL_CST (arg1
));
1656 t
= build_real (type
, value
);
1658 /* If converting an infinity or NAN to a representation that doesn't
1659 have one, set the overflow bit so that we can produce some kind of
1660 error message at the appropriate point if necessary. It's not the
1661 most user-friendly message, but it's better than nothing. */
1662 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1
))
1663 && !MODE_HAS_INFINITIES (TYPE_MODE (type
)))
1664 TREE_OVERFLOW (t
) = 1;
1665 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
1666 && !MODE_HAS_NANS (TYPE_MODE (type
)))
1667 TREE_OVERFLOW (t
) = 1;
1668 /* Regular overflow, conversion produced an infinity in a mode that
1669 can't represent them. */
1670 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type
))
1671 && REAL_VALUE_ISINF (value
)
1672 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1
)))
1673 TREE_OVERFLOW (t
) = 1;
1675 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1679 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1680 to a floating point type. */
1683 fold_convert_const_real_from_fixed (tree type
, const_tree arg1
)
1685 REAL_VALUE_TYPE value
;
1688 real_convert_from_fixed (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
));
1689 t
= build_real (type
, value
);
1691 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
);
1695 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1696 to another fixed-point type. */
1699 fold_convert_const_fixed_from_fixed (tree type
, const_tree arg1
)
1701 FIXED_VALUE_TYPE value
;
1705 overflow_p
= fixed_convert (&value
, TYPE_MODE (type
), &TREE_FIXED_CST (arg1
),
1706 TYPE_SATURATING (type
));
1707 t
= build_fixed (type
, value
);
1709 /* Propagate overflow flags. */
1710 if (overflow_p
| TREE_OVERFLOW (arg1
))
1711 TREE_OVERFLOW (t
) = 1;
1715 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1716 to a fixed-point type. */
1719 fold_convert_const_fixed_from_int (tree type
, const_tree arg1
)
1721 FIXED_VALUE_TYPE value
;
1725 overflow_p
= fixed_convert_from_int (&value
, TYPE_MODE (type
),
1726 TREE_INT_CST (arg1
),
1727 TYPE_UNSIGNED (TREE_TYPE (arg1
)),
1728 TYPE_SATURATING (type
));
1729 t
= build_fixed (type
, value
);
1731 /* Propagate overflow flags. */
1732 if (overflow_p
| TREE_OVERFLOW (arg1
))
1733 TREE_OVERFLOW (t
) = 1;
1737 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1738 to a fixed-point type. */
1741 fold_convert_const_fixed_from_real (tree type
, const_tree arg1
)
1743 FIXED_VALUE_TYPE value
;
1747 overflow_p
= fixed_convert_from_real (&value
, TYPE_MODE (type
),
1748 &TREE_REAL_CST (arg1
),
1749 TYPE_SATURATING (type
));
1750 t
= build_fixed (type
, value
);
1752 /* Propagate overflow flags. */
1753 if (overflow_p
| TREE_OVERFLOW (arg1
))
1754 TREE_OVERFLOW (t
) = 1;
1758 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1759 type TYPE. If no simplification can be done return NULL_TREE. */
1762 fold_convert_const (enum tree_code code
, tree type
, tree arg1
)
1764 if (TREE_TYPE (arg1
) == type
)
1767 if (POINTER_TYPE_P (type
) || INTEGRAL_TYPE_P (type
)
1768 || TREE_CODE (type
) == OFFSET_TYPE
)
1770 if (TREE_CODE (arg1
) == INTEGER_CST
)
1771 return fold_convert_const_int_from_int (type
, arg1
);
1772 else if (TREE_CODE (arg1
) == REAL_CST
)
1773 return fold_convert_const_int_from_real (code
, type
, arg1
);
1774 else if (TREE_CODE (arg1
) == FIXED_CST
)
1775 return fold_convert_const_int_from_fixed (type
, arg1
);
1777 else if (TREE_CODE (type
) == REAL_TYPE
)
1779 if (TREE_CODE (arg1
) == INTEGER_CST
)
1780 return build_real_from_int_cst (type
, arg1
);
1781 else if (TREE_CODE (arg1
) == REAL_CST
)
1782 return fold_convert_const_real_from_real (type
, arg1
);
1783 else if (TREE_CODE (arg1
) == FIXED_CST
)
1784 return fold_convert_const_real_from_fixed (type
, arg1
);
1786 else if (TREE_CODE (type
) == FIXED_POINT_TYPE
)
1788 if (TREE_CODE (arg1
) == FIXED_CST
)
1789 return fold_convert_const_fixed_from_fixed (type
, arg1
);
1790 else if (TREE_CODE (arg1
) == INTEGER_CST
)
1791 return fold_convert_const_fixed_from_int (type
, arg1
);
1792 else if (TREE_CODE (arg1
) == REAL_CST
)
1793 return fold_convert_const_fixed_from_real (type
, arg1
);
1798 /* Construct a vector of zero elements of vector type TYPE. */
1801 build_zero_vector (tree type
)
1805 t
= fold_convert_const (NOP_EXPR
, TREE_TYPE (type
), integer_zero_node
);
1806 return build_vector_from_val (type
, t
);
1809 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1812 fold_convertible_p (const_tree type
, const_tree arg
)
1814 tree orig
= TREE_TYPE (arg
);
1819 if (TREE_CODE (arg
) == ERROR_MARK
1820 || TREE_CODE (type
) == ERROR_MARK
1821 || TREE_CODE (orig
) == ERROR_MARK
)
1824 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
1827 switch (TREE_CODE (type
))
1829 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1830 case POINTER_TYPE
: case REFERENCE_TYPE
:
1832 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1833 || TREE_CODE (orig
) == OFFSET_TYPE
)
1835 return (TREE_CODE (orig
) == VECTOR_TYPE
1836 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1839 case FIXED_POINT_TYPE
:
1843 return TREE_CODE (type
) == TREE_CODE (orig
);
1850 /* Convert expression ARG to type TYPE. Used by the middle-end for
1851 simple conversions in preference to calling the front-end's convert. */
1854 fold_convert_loc (location_t loc
, tree type
, tree arg
)
1856 tree orig
= TREE_TYPE (arg
);
1862 if (TREE_CODE (arg
) == ERROR_MARK
1863 || TREE_CODE (type
) == ERROR_MARK
1864 || TREE_CODE (orig
) == ERROR_MARK
)
1865 return error_mark_node
;
1867 switch (TREE_CODE (type
))
1870 case REFERENCE_TYPE
:
1871 /* Handle conversions between pointers to different address spaces. */
1872 if (POINTER_TYPE_P (orig
)
1873 && (TYPE_ADDR_SPACE (TREE_TYPE (type
))
1874 != TYPE_ADDR_SPACE (TREE_TYPE (orig
))))
1875 return fold_build1_loc (loc
, ADDR_SPACE_CONVERT_EXPR
, type
, arg
);
1878 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1880 if (TREE_CODE (arg
) == INTEGER_CST
)
1882 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1883 if (tem
!= NULL_TREE
)
1886 if (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
1887 || TREE_CODE (orig
) == OFFSET_TYPE
)
1888 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1889 if (TREE_CODE (orig
) == COMPLEX_TYPE
)
1890 return fold_convert_loc (loc
, type
,
1891 fold_build1_loc (loc
, REALPART_EXPR
,
1892 TREE_TYPE (orig
), arg
));
1893 gcc_assert (TREE_CODE (orig
) == VECTOR_TYPE
1894 && tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
1895 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1898 if (TREE_CODE (arg
) == INTEGER_CST
)
1900 tem
= fold_convert_const (FLOAT_EXPR
, type
, arg
);
1901 if (tem
!= NULL_TREE
)
1904 else if (TREE_CODE (arg
) == REAL_CST
)
1906 tem
= fold_convert_const (NOP_EXPR
, type
, arg
);
1907 if (tem
!= NULL_TREE
)
1910 else if (TREE_CODE (arg
) == FIXED_CST
)
1912 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1913 if (tem
!= NULL_TREE
)
1917 switch (TREE_CODE (orig
))
1920 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1921 case POINTER_TYPE
: case REFERENCE_TYPE
:
1922 return fold_build1_loc (loc
, FLOAT_EXPR
, type
, arg
);
1925 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
1927 case FIXED_POINT_TYPE
:
1928 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1931 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1932 return fold_convert_loc (loc
, type
, tem
);
1938 case FIXED_POINT_TYPE
:
1939 if (TREE_CODE (arg
) == FIXED_CST
|| TREE_CODE (arg
) == INTEGER_CST
1940 || TREE_CODE (arg
) == REAL_CST
)
1942 tem
= fold_convert_const (FIXED_CONVERT_EXPR
, type
, arg
);
1943 if (tem
!= NULL_TREE
)
1944 goto fold_convert_exit
;
1947 switch (TREE_CODE (orig
))
1949 case FIXED_POINT_TYPE
:
1954 return fold_build1_loc (loc
, FIXED_CONVERT_EXPR
, type
, arg
);
1957 tem
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1958 return fold_convert_loc (loc
, type
, tem
);
1965 switch (TREE_CODE (orig
))
1968 case BOOLEAN_TYPE
: case ENUMERAL_TYPE
:
1969 case POINTER_TYPE
: case REFERENCE_TYPE
:
1971 case FIXED_POINT_TYPE
:
1972 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
1973 fold_convert_loc (loc
, TREE_TYPE (type
), arg
),
1974 fold_convert_loc (loc
, TREE_TYPE (type
),
1975 integer_zero_node
));
1980 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
1982 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1983 TREE_OPERAND (arg
, 0));
1984 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
),
1985 TREE_OPERAND (arg
, 1));
1986 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
1989 arg
= save_expr (arg
);
1990 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, TREE_TYPE (orig
), arg
);
1991 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, TREE_TYPE (orig
), arg
);
1992 rpart
= fold_convert_loc (loc
, TREE_TYPE (type
), rpart
);
1993 ipart
= fold_convert_loc (loc
, TREE_TYPE (type
), ipart
);
1994 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
, ipart
);
2002 if (integer_zerop (arg
))
2003 return build_zero_vector (type
);
2004 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (orig
)));
2005 gcc_assert (INTEGRAL_TYPE_P (orig
) || POINTER_TYPE_P (orig
)
2006 || TREE_CODE (orig
) == VECTOR_TYPE
);
2007 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, type
, arg
);
2010 tem
= fold_ignored_result (arg
);
2011 return fold_build1_loc (loc
, NOP_EXPR
, type
, tem
);
2014 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig
))
2015 return fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
2019 protected_set_expr_location_unshare (tem
, loc
);
2023 /* Return false if expr can be assumed not to be an lvalue, true
2027 maybe_lvalue_p (const_tree x
)
2029 /* We only need to wrap lvalue tree codes. */
2030 switch (TREE_CODE (x
))
2043 case ARRAY_RANGE_REF
:
2049 case PREINCREMENT_EXPR
:
2050 case PREDECREMENT_EXPR
:
2052 case TRY_CATCH_EXPR
:
2053 case WITH_CLEANUP_EXPR
:
2062 /* Assume the worst for front-end tree codes. */
2063 if ((int)TREE_CODE (x
) >= NUM_TREE_CODES
)
2071 /* Return an expr equal to X but certainly not valid as an lvalue. */
2074 non_lvalue_loc (location_t loc
, tree x
)
2076 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2081 if (! maybe_lvalue_p (x
))
2083 return build1_loc (loc
, NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
2086 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2087 Zero means allow extended lvalues. */
2089 int pedantic_lvalues
;
2091 /* When pedantic, return an expr equal to X but certainly not valid as a
2092 pedantic lvalue. Otherwise, return X. */
2095 pedantic_non_lvalue_loc (location_t loc
, tree x
)
2097 if (pedantic_lvalues
)
2098 return non_lvalue_loc (loc
, x
);
2100 return protected_set_expr_location_unshare (x
, loc
);
2103 /* Given a tree comparison code, return the code that is the logical inverse.
2104 It is generally not safe to do this for floating-point comparisons, except
2105 for EQ_EXPR and NE_EXPR, so we return ERROR_MARK in this case. */
2108 invert_tree_comparison (enum tree_code code
, bool honor_nans
)
2110 if (honor_nans
&& flag_trapping_math
&& code
!= EQ_EXPR
&& code
!= NE_EXPR
)
2120 return honor_nans
? UNLE_EXPR
: LE_EXPR
;
2122 return honor_nans
? UNLT_EXPR
: LT_EXPR
;
2124 return honor_nans
? UNGE_EXPR
: GE_EXPR
;
2126 return honor_nans
? UNGT_EXPR
: GT_EXPR
;
2140 return UNORDERED_EXPR
;
2141 case UNORDERED_EXPR
:
2142 return ORDERED_EXPR
;
2148 /* Similar, but return the comparison that results if the operands are
2149 swapped. This is safe for floating-point. */
2152 swap_tree_comparison (enum tree_code code
)
2159 case UNORDERED_EXPR
:
2185 /* Convert a comparison tree code from an enum tree_code representation
2186 into a compcode bit-based encoding. This function is the inverse of
2187 compcode_to_comparison. */
2189 static enum comparison_code
2190 comparison_to_compcode (enum tree_code code
)
2207 return COMPCODE_ORD
;
2208 case UNORDERED_EXPR
:
2209 return COMPCODE_UNORD
;
2211 return COMPCODE_UNLT
;
2213 return COMPCODE_UNEQ
;
2215 return COMPCODE_UNLE
;
2217 return COMPCODE_UNGT
;
2219 return COMPCODE_LTGT
;
2221 return COMPCODE_UNGE
;
2227 /* Convert a compcode bit-based encoding of a comparison operator back
2228 to GCC's enum tree_code representation. This function is the
2229 inverse of comparison_to_compcode. */
2231 static enum tree_code
2232 compcode_to_comparison (enum comparison_code code
)
2249 return ORDERED_EXPR
;
2250 case COMPCODE_UNORD
:
2251 return UNORDERED_EXPR
;
2269 /* Return a tree for the comparison which is the combination of
2270 doing the AND or OR (depending on CODE) of the two operations LCODE
2271 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2272 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2273 if this makes the transformation invalid. */
2276 combine_comparisons (location_t loc
,
2277 enum tree_code code
, enum tree_code lcode
,
2278 enum tree_code rcode
, tree truth_type
,
2279 tree ll_arg
, tree lr_arg
)
2281 bool honor_nans
= HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg
)));
2282 enum comparison_code lcompcode
= comparison_to_compcode (lcode
);
2283 enum comparison_code rcompcode
= comparison_to_compcode (rcode
);
2288 case TRUTH_AND_EXPR
: case TRUTH_ANDIF_EXPR
:
2289 compcode
= lcompcode
& rcompcode
;
2292 case TRUTH_OR_EXPR
: case TRUTH_ORIF_EXPR
:
2293 compcode
= lcompcode
| rcompcode
;
2302 /* Eliminate unordered comparisons, as well as LTGT and ORD
2303 which are not used unless the mode has NaNs. */
2304 compcode
&= ~COMPCODE_UNORD
;
2305 if (compcode
== COMPCODE_LTGT
)
2306 compcode
= COMPCODE_NE
;
2307 else if (compcode
== COMPCODE_ORD
)
2308 compcode
= COMPCODE_TRUE
;
2310 else if (flag_trapping_math
)
2312 /* Check that the original operation and the optimized ones will trap
2313 under the same condition. */
2314 bool ltrap
= (lcompcode
& COMPCODE_UNORD
) == 0
2315 && (lcompcode
!= COMPCODE_EQ
)
2316 && (lcompcode
!= COMPCODE_ORD
);
2317 bool rtrap
= (rcompcode
& COMPCODE_UNORD
) == 0
2318 && (rcompcode
!= COMPCODE_EQ
)
2319 && (rcompcode
!= COMPCODE_ORD
);
2320 bool trap
= (compcode
& COMPCODE_UNORD
) == 0
2321 && (compcode
!= COMPCODE_EQ
)
2322 && (compcode
!= COMPCODE_ORD
);
2324 /* In a short-circuited boolean expression the LHS might be
2325 such that the RHS, if evaluated, will never trap. For
2326 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2327 if neither x nor y is NaN. (This is a mixed blessing: for
2328 example, the expression above will never trap, hence
2329 optimizing it to x < y would be invalid). */
2330 if ((code
== TRUTH_ORIF_EXPR
&& (lcompcode
& COMPCODE_UNORD
))
2331 || (code
== TRUTH_ANDIF_EXPR
&& !(lcompcode
& COMPCODE_UNORD
)))
2334 /* If the comparison was short-circuited, and only the RHS
2335 trapped, we may now generate a spurious trap. */
2337 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2340 /* If we changed the conditions that cause a trap, we lose. */
2341 if ((ltrap
|| rtrap
) != trap
)
2345 if (compcode
== COMPCODE_TRUE
)
2346 return constant_boolean_node (true, truth_type
);
2347 else if (compcode
== COMPCODE_FALSE
)
2348 return constant_boolean_node (false, truth_type
);
2351 enum tree_code tcode
;
2353 tcode
= compcode_to_comparison ((enum comparison_code
) compcode
);
2354 return fold_build2_loc (loc
, tcode
, truth_type
, ll_arg
, lr_arg
);
2358 /* Return nonzero if two operands (typically of the same tree node)
2359 are necessarily equal. If either argument has side-effects this
2360 function returns zero. FLAGS modifies behavior as follows:
2362 If OEP_ONLY_CONST is set, only return nonzero for constants.
2363 This function tests whether the operands are indistinguishable;
2364 it does not test whether they are equal using C's == operation.
2365 The distinction is important for IEEE floating point, because
2366 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2367 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2369 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2370 even though it may hold multiple values during a function.
2371 This is because a GCC tree node guarantees that nothing else is
2372 executed between the evaluation of its "operands" (which may often
2373 be evaluated in arbitrary order). Hence if the operands themselves
2374 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2375 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2376 unset means assuming isochronic (or instantaneous) tree equivalence.
2377 Unless comparing arbitrary expression trees, such as from different
2378 statements, this flag can usually be left unset.
2380 If OEP_PURE_SAME is set, then pure functions with identical arguments
2381 are considered the same. It is used when the caller has other ways
2382 to ensure that global memory is unchanged in between. */
2385 operand_equal_p (const_tree arg0
, const_tree arg1
, unsigned int flags
)
2387 /* If either is ERROR_MARK, they aren't equal. */
2388 if (TREE_CODE (arg0
) == ERROR_MARK
|| TREE_CODE (arg1
) == ERROR_MARK
2389 || TREE_TYPE (arg0
) == error_mark_node
2390 || TREE_TYPE (arg1
) == error_mark_node
)
2393 /* Similar, if either does not have a type (like a released SSA name),
2394 they aren't equal. */
2395 if (!TREE_TYPE (arg0
) || !TREE_TYPE (arg1
))
2398 /* Check equality of integer constants before bailing out due to
2399 precision differences. */
2400 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
2401 return tree_int_cst_equal (arg0
, arg1
);
2403 /* If both types don't have the same signedness, then we can't consider
2404 them equal. We must check this before the STRIP_NOPS calls
2405 because they may change the signedness of the arguments. As pointers
2406 strictly don't have a signedness, require either two pointers or
2407 two non-pointers as well. */
2408 if (TYPE_UNSIGNED (TREE_TYPE (arg0
)) != TYPE_UNSIGNED (TREE_TYPE (arg1
))
2409 || POINTER_TYPE_P (TREE_TYPE (arg0
)) != POINTER_TYPE_P (TREE_TYPE (arg1
)))
2412 /* We cannot consider pointers to different address space equal. */
2413 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && POINTER_TYPE_P (TREE_TYPE (arg1
))
2414 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0
)))
2415 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1
)))))
2418 /* If both types don't have the same precision, then it is not safe
2420 if (TYPE_PRECISION (TREE_TYPE (arg0
)) != TYPE_PRECISION (TREE_TYPE (arg1
)))
2426 /* In case both args are comparisons but with different comparison
2427 code, try to swap the comparison operands of one arg to produce
2428 a match and compare that variant. */
2429 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2430 && COMPARISON_CLASS_P (arg0
)
2431 && COMPARISON_CLASS_P (arg1
))
2433 enum tree_code swap_code
= swap_tree_comparison (TREE_CODE (arg1
));
2435 if (TREE_CODE (arg0
) == swap_code
)
2436 return operand_equal_p (TREE_OPERAND (arg0
, 0),
2437 TREE_OPERAND (arg1
, 1), flags
)
2438 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2439 TREE_OPERAND (arg1
, 0), flags
);
2442 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2443 /* This is needed for conversions and for COMPONENT_REF.
2444 Might as well play it safe and always test this. */
2445 || TREE_CODE (TREE_TYPE (arg0
)) == ERROR_MARK
2446 || TREE_CODE (TREE_TYPE (arg1
)) == ERROR_MARK
2447 || TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
2450 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2451 We don't care about side effects in that case because the SAVE_EXPR
2452 takes care of that for us. In all other cases, two expressions are
2453 equal if they have no side effects. If we have two identical
2454 expressions with side effects that should be treated the same due
2455 to the only side effects being identical SAVE_EXPR's, that will
2456 be detected in the recursive calls below.
2457 If we are taking an invariant address of two identical objects
2458 they are necessarily equal as well. */
2459 if (arg0
== arg1
&& ! (flags
& OEP_ONLY_CONST
)
2460 && (TREE_CODE (arg0
) == SAVE_EXPR
2461 || (flags
& OEP_CONSTANT_ADDRESS_OF
)
2462 || (! TREE_SIDE_EFFECTS (arg0
) && ! TREE_SIDE_EFFECTS (arg1
))))
2465 /* Next handle constant cases, those for which we can return 1 even
2466 if ONLY_CONST is set. */
2467 if (TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
))
2468 switch (TREE_CODE (arg0
))
2471 return tree_int_cst_equal (arg0
, arg1
);
2474 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0
),
2475 TREE_FIXED_CST (arg1
));
2478 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0
),
2479 TREE_REAL_CST (arg1
)))
2483 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
))))
2485 /* If we do not distinguish between signed and unsigned zero,
2486 consider them equal. */
2487 if (real_zerop (arg0
) && real_zerop (arg1
))
2496 v1
= TREE_VECTOR_CST_ELTS (arg0
);
2497 v2
= TREE_VECTOR_CST_ELTS (arg1
);
2500 if (!operand_equal_p (TREE_VALUE (v1
), TREE_VALUE (v2
),
2503 v1
= TREE_CHAIN (v1
);
2504 v2
= TREE_CHAIN (v2
);
2511 return (operand_equal_p (TREE_REALPART (arg0
), TREE_REALPART (arg1
),
2513 && operand_equal_p (TREE_IMAGPART (arg0
), TREE_IMAGPART (arg1
),
2517 return (TREE_STRING_LENGTH (arg0
) == TREE_STRING_LENGTH (arg1
)
2518 && ! memcmp (TREE_STRING_POINTER (arg0
),
2519 TREE_STRING_POINTER (arg1
),
2520 TREE_STRING_LENGTH (arg0
)));
2523 return operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0),
2524 TREE_CONSTANT (arg0
) && TREE_CONSTANT (arg1
)
2525 ? OEP_CONSTANT_ADDRESS_OF
: 0);
2530 if (flags
& OEP_ONLY_CONST
)
2533 /* Define macros to test an operand from arg0 and arg1 for equality and a
2534 variant that allows null and views null as being different from any
2535 non-null value. In the latter case, if either is null, the both
2536 must be; otherwise, do the normal comparison. */
2537 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2538 TREE_OPERAND (arg1, N), flags)
2540 #define OP_SAME_WITH_NULL(N) \
2541 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2542 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2544 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
2547 /* Two conversions are equal only if signedness and modes match. */
2548 switch (TREE_CODE (arg0
))
2551 case FIX_TRUNC_EXPR
:
2552 if (TYPE_UNSIGNED (TREE_TYPE (arg0
))
2553 != TYPE_UNSIGNED (TREE_TYPE (arg1
)))
2563 case tcc_comparison
:
2565 if (OP_SAME (0) && OP_SAME (1))
2568 /* For commutative ops, allow the other order. */
2569 return (commutative_tree_code (TREE_CODE (arg0
))
2570 && operand_equal_p (TREE_OPERAND (arg0
, 0),
2571 TREE_OPERAND (arg1
, 1), flags
)
2572 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2573 TREE_OPERAND (arg1
, 0), flags
));
2576 /* If either of the pointer (or reference) expressions we are
2577 dereferencing contain a side effect, these cannot be equal. */
2578 if (TREE_SIDE_EFFECTS (arg0
)
2579 || TREE_SIDE_EFFECTS (arg1
))
2582 switch (TREE_CODE (arg0
))
2590 /* Require equal access sizes, and similar pointer types.
2591 We can have incomplete types for array references of
2592 variable-sized arrays from the Fortran frontent
2594 return ((TYPE_SIZE (TREE_TYPE (arg0
)) == TYPE_SIZE (TREE_TYPE (arg1
))
2595 || (TYPE_SIZE (TREE_TYPE (arg0
))
2596 && TYPE_SIZE (TREE_TYPE (arg1
))
2597 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0
)),
2598 TYPE_SIZE (TREE_TYPE (arg1
)), flags
)))
2599 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
2600 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1
, 1))))
2601 && OP_SAME (0) && OP_SAME (1));
2604 case ARRAY_RANGE_REF
:
2605 /* Operands 2 and 3 may be null.
2606 Compare the array index by value if it is constant first as we
2607 may have different types but same value here. */
2609 && (tree_int_cst_equal (TREE_OPERAND (arg0
, 1),
2610 TREE_OPERAND (arg1
, 1))
2612 && OP_SAME_WITH_NULL (2)
2613 && OP_SAME_WITH_NULL (3));
2616 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2617 may be NULL when we're called to compare MEM_EXPRs. */
2618 return OP_SAME_WITH_NULL (0)
2620 && OP_SAME_WITH_NULL (2);
2623 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2629 case tcc_expression
:
2630 switch (TREE_CODE (arg0
))
2633 case TRUTH_NOT_EXPR
:
2636 case TRUTH_ANDIF_EXPR
:
2637 case TRUTH_ORIF_EXPR
:
2638 return OP_SAME (0) && OP_SAME (1);
2641 case WIDEN_MULT_PLUS_EXPR
:
2642 case WIDEN_MULT_MINUS_EXPR
:
2645 /* The multiplcation operands are commutative. */
2648 case TRUTH_AND_EXPR
:
2650 case TRUTH_XOR_EXPR
:
2651 if (OP_SAME (0) && OP_SAME (1))
2654 /* Otherwise take into account this is a commutative operation. */
2655 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
2656 TREE_OPERAND (arg1
, 1), flags
)
2657 && operand_equal_p (TREE_OPERAND (arg0
, 1),
2658 TREE_OPERAND (arg1
, 0), flags
));
2663 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2670 switch (TREE_CODE (arg0
))
2673 /* If the CALL_EXPRs call different functions, then they
2674 clearly can not be equal. */
2675 if (! operand_equal_p (CALL_EXPR_FN (arg0
), CALL_EXPR_FN (arg1
),
2680 unsigned int cef
= call_expr_flags (arg0
);
2681 if (flags
& OEP_PURE_SAME
)
2682 cef
&= ECF_CONST
| ECF_PURE
;
2689 /* Now see if all the arguments are the same. */
2691 const_call_expr_arg_iterator iter0
, iter1
;
2693 for (a0
= first_const_call_expr_arg (arg0
, &iter0
),
2694 a1
= first_const_call_expr_arg (arg1
, &iter1
);
2696 a0
= next_const_call_expr_arg (&iter0
),
2697 a1
= next_const_call_expr_arg (&iter1
))
2698 if (! operand_equal_p (a0
, a1
, flags
))
2701 /* If we get here and both argument lists are exhausted
2702 then the CALL_EXPRs are equal. */
2703 return ! (a0
|| a1
);
2709 case tcc_declaration
:
2710 /* Consider __builtin_sqrt equal to sqrt. */
2711 return (TREE_CODE (arg0
) == FUNCTION_DECL
2712 && DECL_BUILT_IN (arg0
) && DECL_BUILT_IN (arg1
)
2713 && DECL_BUILT_IN_CLASS (arg0
) == DECL_BUILT_IN_CLASS (arg1
)
2714 && DECL_FUNCTION_CODE (arg0
) == DECL_FUNCTION_CODE (arg1
));
2721 #undef OP_SAME_WITH_NULL
2724 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2725 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2727 When in doubt, return 0. */
2730 operand_equal_for_comparison_p (tree arg0
, tree arg1
, tree other
)
2732 int unsignedp1
, unsignedpo
;
2733 tree primarg0
, primarg1
, primother
;
2734 unsigned int correct_width
;
2736 if (operand_equal_p (arg0
, arg1
, 0))
2739 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
2740 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
2743 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2744 and see if the inner values are the same. This removes any
2745 signedness comparison, which doesn't matter here. */
2746 primarg0
= arg0
, primarg1
= arg1
;
2747 STRIP_NOPS (primarg0
);
2748 STRIP_NOPS (primarg1
);
2749 if (operand_equal_p (primarg0
, primarg1
, 0))
2752 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2753 actual comparison operand, ARG0.
2755 First throw away any conversions to wider types
2756 already present in the operands. */
2758 primarg1
= get_narrower (arg1
, &unsignedp1
);
2759 primother
= get_narrower (other
, &unsignedpo
);
2761 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
2762 if (unsignedp1
== unsignedpo
2763 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
2764 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
2766 tree type
= TREE_TYPE (arg0
);
2768 /* Make sure shorter operand is extended the right way
2769 to match the longer operand. */
2770 primarg1
= fold_convert (signed_or_unsigned_type_for
2771 (unsignedp1
, TREE_TYPE (primarg1
)), primarg1
);
2773 if (operand_equal_p (arg0
, fold_convert (type
, primarg1
), 0))
2780 /* See if ARG is an expression that is either a comparison or is performing
2781 arithmetic on comparisons. The comparisons must only be comparing
2782 two different values, which will be stored in *CVAL1 and *CVAL2; if
2783 they are nonzero it means that some operands have already been found.
2784 No variables may be used anywhere else in the expression except in the
2785 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2786 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2788 If this is true, return 1. Otherwise, return zero. */
2791 twoval_comparison_p (tree arg
, tree
*cval1
, tree
*cval2
, int *save_p
)
2793 enum tree_code code
= TREE_CODE (arg
);
2794 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2796 /* We can handle some of the tcc_expression cases here. */
2797 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2799 else if (tclass
== tcc_expression
2800 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
2801 || code
== COMPOUND_EXPR
))
2802 tclass
= tcc_binary
;
2804 else if (tclass
== tcc_expression
&& code
== SAVE_EXPR
2805 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg
, 0)))
2807 /* If we've already found a CVAL1 or CVAL2, this expression is
2808 two complex to handle. */
2809 if (*cval1
|| *cval2
)
2819 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
2822 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
2823 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2824 cval1
, cval2
, save_p
));
2829 case tcc_expression
:
2830 if (code
== COND_EXPR
)
2831 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
2832 cval1
, cval2
, save_p
)
2833 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
2834 cval1
, cval2
, save_p
)
2835 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
2836 cval1
, cval2
, save_p
));
2839 case tcc_comparison
:
2840 /* First see if we can handle the first operand, then the second. For
2841 the second operand, we know *CVAL1 can't be zero. It must be that
2842 one side of the comparison is each of the values; test for the
2843 case where this isn't true by failing if the two operands
2846 if (operand_equal_p (TREE_OPERAND (arg
, 0),
2847 TREE_OPERAND (arg
, 1), 0))
2851 *cval1
= TREE_OPERAND (arg
, 0);
2852 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
2854 else if (*cval2
== 0)
2855 *cval2
= TREE_OPERAND (arg
, 0);
2856 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
2861 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
2863 else if (*cval2
== 0)
2864 *cval2
= TREE_OPERAND (arg
, 1);
2865 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
2877 /* ARG is a tree that is known to contain just arithmetic operations and
2878 comparisons. Evaluate the operations in the tree substituting NEW0 for
2879 any occurrence of OLD0 as an operand of a comparison and likewise for
2883 eval_subst (location_t loc
, tree arg
, tree old0
, tree new0
,
2884 tree old1
, tree new1
)
2886 tree type
= TREE_TYPE (arg
);
2887 enum tree_code code
= TREE_CODE (arg
);
2888 enum tree_code_class tclass
= TREE_CODE_CLASS (code
);
2890 /* We can handle some of the tcc_expression cases here. */
2891 if (tclass
== tcc_expression
&& code
== TRUTH_NOT_EXPR
)
2893 else if (tclass
== tcc_expression
2894 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
2895 tclass
= tcc_binary
;
2900 return fold_build1_loc (loc
, code
, type
,
2901 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2902 old0
, new0
, old1
, new1
));
2905 return fold_build2_loc (loc
, code
, type
,
2906 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2907 old0
, new0
, old1
, new1
),
2908 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2909 old0
, new0
, old1
, new1
));
2911 case tcc_expression
:
2915 return eval_subst (loc
, TREE_OPERAND (arg
, 0), old0
, new0
,
2919 return eval_subst (loc
, TREE_OPERAND (arg
, 1), old0
, new0
,
2923 return fold_build3_loc (loc
, code
, type
,
2924 eval_subst (loc
, TREE_OPERAND (arg
, 0),
2925 old0
, new0
, old1
, new1
),
2926 eval_subst (loc
, TREE_OPERAND (arg
, 1),
2927 old0
, new0
, old1
, new1
),
2928 eval_subst (loc
, TREE_OPERAND (arg
, 2),
2929 old0
, new0
, old1
, new1
));
2933 /* Fall through - ??? */
2935 case tcc_comparison
:
2937 tree arg0
= TREE_OPERAND (arg
, 0);
2938 tree arg1
= TREE_OPERAND (arg
, 1);
2940 /* We need to check both for exact equality and tree equality. The
2941 former will be true if the operand has a side-effect. In that
2942 case, we know the operand occurred exactly once. */
2944 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
2946 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
2949 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
2951 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
2954 return fold_build2_loc (loc
, code
, type
, arg0
, arg1
);
2962 /* Return a tree for the case when the result of an expression is RESULT
2963 converted to TYPE and OMITTED was previously an operand of the expression
2964 but is now not needed (e.g., we folded OMITTED * 0).
2966 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2967 the conversion of RESULT to TYPE. */
2970 omit_one_operand_loc (location_t loc
, tree type
, tree result
, tree omitted
)
2972 tree t
= fold_convert_loc (loc
, type
, result
);
2974 /* If the resulting operand is an empty statement, just return the omitted
2975 statement casted to void. */
2976 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2977 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
2978 fold_ignored_result (omitted
));
2980 if (TREE_SIDE_EFFECTS (omitted
))
2981 return build2_loc (loc
, COMPOUND_EXPR
, type
,
2982 fold_ignored_result (omitted
), t
);
2984 return non_lvalue_loc (loc
, t
);
2987 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2990 pedantic_omit_one_operand_loc (location_t loc
, tree type
, tree result
,
2993 tree t
= fold_convert_loc (loc
, type
, result
);
2995 /* If the resulting operand is an empty statement, just return the omitted
2996 statement casted to void. */
2997 if (IS_EMPTY_STMT (t
) && TREE_SIDE_EFFECTS (omitted
))
2998 return build1_loc (loc
, NOP_EXPR
, void_type_node
,
2999 fold_ignored_result (omitted
));
3001 if (TREE_SIDE_EFFECTS (omitted
))
3002 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3003 fold_ignored_result (omitted
), t
);
3005 return pedantic_non_lvalue_loc (loc
, t
);
3008 /* Return a tree for the case when the result of an expression is RESULT
3009 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3010 of the expression but are now not needed.
3012 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3013 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3014 evaluated before OMITTED2. Otherwise, if neither has side effects,
3015 just do the conversion of RESULT to TYPE. */
3018 omit_two_operands_loc (location_t loc
, tree type
, tree result
,
3019 tree omitted1
, tree omitted2
)
3021 tree t
= fold_convert_loc (loc
, type
, result
);
3023 if (TREE_SIDE_EFFECTS (omitted2
))
3024 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted2
, t
);
3025 if (TREE_SIDE_EFFECTS (omitted1
))
3026 t
= build2_loc (loc
, COMPOUND_EXPR
, type
, omitted1
, t
);
3028 return TREE_CODE (t
) != COMPOUND_EXPR
? non_lvalue_loc (loc
, t
) : t
;
3032 /* Return a simplified tree node for the truth-negation of ARG. This
3033 never alters ARG itself. We assume that ARG is an operation that
3034 returns a truth value (0 or 1).
3036 FIXME: one would think we would fold the result, but it causes
3037 problems with the dominator optimizer. */
3040 fold_truth_not_expr (location_t loc
, tree arg
)
3042 tree type
= TREE_TYPE (arg
);
3043 enum tree_code code
= TREE_CODE (arg
);
3044 location_t loc1
, loc2
;
3046 /* If this is a comparison, we can simply invert it, except for
3047 floating-point non-equality comparisons, in which case we just
3048 enclose a TRUTH_NOT_EXPR around what we have. */
3050 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3052 tree op_type
= TREE_TYPE (TREE_OPERAND (arg
, 0));
3053 if (FLOAT_TYPE_P (op_type
)
3054 && flag_trapping_math
3055 && code
!= ORDERED_EXPR
&& code
!= UNORDERED_EXPR
3056 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
3059 code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (op_type
)));
3060 if (code
== ERROR_MARK
)
3063 return build2_loc (loc
, code
, type
, TREE_OPERAND (arg
, 0),
3064 TREE_OPERAND (arg
, 1));
3070 return constant_boolean_node (integer_zerop (arg
), type
);
3072 case TRUTH_AND_EXPR
:
3073 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3074 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3075 return build2_loc (loc
, TRUTH_OR_EXPR
, type
,
3076 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3077 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3080 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3081 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3082 return build2_loc (loc
, TRUTH_AND_EXPR
, type
,
3083 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3084 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3086 case TRUTH_XOR_EXPR
:
3087 /* Here we can invert either operand. We invert the first operand
3088 unless the second operand is a TRUTH_NOT_EXPR in which case our
3089 result is the XOR of the first operand with the inside of the
3090 negation of the second operand. */
3092 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
3093 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
3094 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
3096 return build2_loc (loc
, TRUTH_XOR_EXPR
, type
,
3097 invert_truthvalue_loc (loc
, TREE_OPERAND (arg
, 0)),
3098 TREE_OPERAND (arg
, 1));
3100 case TRUTH_ANDIF_EXPR
:
3101 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3102 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3103 return build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
3104 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3105 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3107 case TRUTH_ORIF_EXPR
:
3108 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3109 loc2
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3110 return build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
3111 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)),
3112 invert_truthvalue_loc (loc2
, TREE_OPERAND (arg
, 1)));
3114 case TRUTH_NOT_EXPR
:
3115 return TREE_OPERAND (arg
, 0);
3119 tree arg1
= TREE_OPERAND (arg
, 1);
3120 tree arg2
= TREE_OPERAND (arg
, 2);
3122 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3123 loc2
= expr_location_or (TREE_OPERAND (arg
, 2), loc
);
3125 /* A COND_EXPR may have a throw as one operand, which
3126 then has void type. Just leave void operands
3128 return build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
3129 VOID_TYPE_P (TREE_TYPE (arg1
))
3130 ? arg1
: invert_truthvalue_loc (loc1
, arg1
),
3131 VOID_TYPE_P (TREE_TYPE (arg2
))
3132 ? arg2
: invert_truthvalue_loc (loc2
, arg2
));
3136 loc1
= expr_location_or (TREE_OPERAND (arg
, 1), loc
);
3137 return build2_loc (loc
, COMPOUND_EXPR
, type
,
3138 TREE_OPERAND (arg
, 0),
3139 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 1)));
3141 case NON_LVALUE_EXPR
:
3142 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3143 return invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0));
3146 if (TREE_CODE (TREE_TYPE (arg
)) == BOOLEAN_TYPE
)
3147 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3149 /* ... fall through ... */
3152 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3153 return build1_loc (loc
, TREE_CODE (arg
), type
,
3154 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3157 if (!integer_onep (TREE_OPERAND (arg
, 1)))
3159 return build2_loc (loc
, EQ_EXPR
, type
, arg
, build_int_cst (type
, 0));
3162 return build1_loc (loc
, TRUTH_NOT_EXPR
, type
, arg
);
3164 case CLEANUP_POINT_EXPR
:
3165 loc1
= expr_location_or (TREE_OPERAND (arg
, 0), loc
);
3166 return build1_loc (loc
, CLEANUP_POINT_EXPR
, type
,
3167 invert_truthvalue_loc (loc1
, TREE_OPERAND (arg
, 0)));
3174 /* Return a simplified tree node for the truth-negation of ARG. This
3175 never alters ARG itself. We assume that ARG is an operation that
3176 returns a truth value (0 or 1).
3178 FIXME: one would think we would fold the result, but it causes
3179 problems with the dominator optimizer. */
3182 invert_truthvalue_loc (location_t loc
, tree arg
)
3186 if (TREE_CODE (arg
) == ERROR_MARK
)
3189 tem
= fold_truth_not_expr (loc
, arg
);
3191 tem
= build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg
), arg
);
3196 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3197 operands are another bit-wise operation with a common input. If so,
3198 distribute the bit operations to save an operation and possibly two if
3199 constants are involved. For example, convert
3200 (A | B) & (A | C) into A | (B & C)
3201 Further simplification will occur if B and C are constants.
3203 If this optimization cannot be done, 0 will be returned. */
3206 distribute_bit_expr (location_t loc
, enum tree_code code
, tree type
,
3207 tree arg0
, tree arg1
)
3212 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
3213 || TREE_CODE (arg0
) == code
3214 || (TREE_CODE (arg0
) != BIT_AND_EXPR
3215 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
3218 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
3220 common
= TREE_OPERAND (arg0
, 0);
3221 left
= TREE_OPERAND (arg0
, 1);
3222 right
= TREE_OPERAND (arg1
, 1);
3224 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
3226 common
= TREE_OPERAND (arg0
, 0);
3227 left
= TREE_OPERAND (arg0
, 1);
3228 right
= TREE_OPERAND (arg1
, 0);
3230 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
3232 common
= TREE_OPERAND (arg0
, 1);
3233 left
= TREE_OPERAND (arg0
, 0);
3234 right
= TREE_OPERAND (arg1
, 1);
3236 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
3238 common
= TREE_OPERAND (arg0
, 1);
3239 left
= TREE_OPERAND (arg0
, 0);
3240 right
= TREE_OPERAND (arg1
, 0);
3245 common
= fold_convert_loc (loc
, type
, common
);
3246 left
= fold_convert_loc (loc
, type
, left
);
3247 right
= fold_convert_loc (loc
, type
, right
);
3248 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, common
,
3249 fold_build2_loc (loc
, code
, type
, left
, right
));
3252 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3253 with code CODE. This optimization is unsafe. */
3255 distribute_real_division (location_t loc
, enum tree_code code
, tree type
,
3256 tree arg0
, tree arg1
)
3258 bool mul0
= TREE_CODE (arg0
) == MULT_EXPR
;
3259 bool mul1
= TREE_CODE (arg1
) == MULT_EXPR
;
3261 /* (A / C) +- (B / C) -> (A +- B) / C. */
3263 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3264 TREE_OPERAND (arg1
, 1), 0))
3265 return fold_build2_loc (loc
, mul0
? MULT_EXPR
: RDIV_EXPR
, type
,
3266 fold_build2_loc (loc
, code
, type
,
3267 TREE_OPERAND (arg0
, 0),
3268 TREE_OPERAND (arg1
, 0)),
3269 TREE_OPERAND (arg0
, 1));
3271 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3272 if (operand_equal_p (TREE_OPERAND (arg0
, 0),
3273 TREE_OPERAND (arg1
, 0), 0)
3274 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
3275 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
3277 REAL_VALUE_TYPE r0
, r1
;
3278 r0
= TREE_REAL_CST (TREE_OPERAND (arg0
, 1));
3279 r1
= TREE_REAL_CST (TREE_OPERAND (arg1
, 1));
3281 real_arithmetic (&r0
, RDIV_EXPR
, &dconst1
, &r0
);
3283 real_arithmetic (&r1
, RDIV_EXPR
, &dconst1
, &r1
);
3284 real_arithmetic (&r0
, code
, &r0
, &r1
);
3285 return fold_build2_loc (loc
, MULT_EXPR
, type
,
3286 TREE_OPERAND (arg0
, 0),
3287 build_real (type
, r0
));
3293 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3294 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3297 make_bit_field_ref (location_t loc
, tree inner
, tree type
,
3298 HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
, int unsignedp
)
3300 tree result
, bftype
;
3304 tree size
= TYPE_SIZE (TREE_TYPE (inner
));
3305 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner
))
3306 || POINTER_TYPE_P (TREE_TYPE (inner
)))
3307 && host_integerp (size
, 0)
3308 && tree_low_cst (size
, 0) == bitsize
)
3309 return fold_convert_loc (loc
, type
, inner
);
3313 if (TYPE_PRECISION (bftype
) != bitsize
3314 || TYPE_UNSIGNED (bftype
) == !unsignedp
)
3315 bftype
= build_nonstandard_integer_type (bitsize
, 0);
3317 result
= build3_loc (loc
, BIT_FIELD_REF
, bftype
, inner
,
3318 size_int (bitsize
), bitsize_int (bitpos
));
3321 result
= fold_convert_loc (loc
, type
, result
);
3326 /* Optimize a bit-field compare.
3328 There are two cases: First is a compare against a constant and the
3329 second is a comparison of two items where the fields are at the same
3330 bit position relative to the start of a chunk (byte, halfword, word)
3331 large enough to contain it. In these cases we can avoid the shift
3332 implicit in bitfield extractions.
3334 For constants, we emit a compare of the shifted constant with the
3335 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3336 compared. For two fields at the same position, we do the ANDs with the
3337 similar mask and compare the result of the ANDs.
3339 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3340 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3341 are the left and right operands of the comparison, respectively.
3343 If the optimization described above can be done, we return the resulting
3344 tree. Otherwise we return zero. */
3347 optimize_bit_field_compare (location_t loc
, enum tree_code code
,
3348 tree compare_type
, tree lhs
, tree rhs
)
3350 HOST_WIDE_INT lbitpos
, lbitsize
, rbitpos
, rbitsize
, nbitpos
, nbitsize
;
3351 tree type
= TREE_TYPE (lhs
);
3352 tree signed_type
, unsigned_type
;
3353 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
3354 enum machine_mode lmode
, rmode
, nmode
;
3355 int lunsignedp
, runsignedp
;
3356 int lvolatilep
= 0, rvolatilep
= 0;
3357 tree linner
, rinner
= NULL_TREE
;
3361 /* Get all the information about the extractions being done. If the bit size
3362 if the same as the size of the underlying object, we aren't doing an
3363 extraction at all and so can do nothing. We also don't want to
3364 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3365 then will no longer be able to replace it. */
3366 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
3367 &lunsignedp
, &lvolatilep
, false);
3368 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
3369 || offset
!= 0 || TREE_CODE (linner
) == PLACEHOLDER_EXPR
)
3374 /* If this is not a constant, we can only do something if bit positions,
3375 sizes, and signedness are the same. */
3376 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
, &rmode
,
3377 &runsignedp
, &rvolatilep
, false);
3379 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
3380 || lunsignedp
!= runsignedp
|| offset
!= 0
3381 || TREE_CODE (rinner
) == PLACEHOLDER_EXPR
)
3385 /* See if we can find a mode to refer to this field. We should be able to,
3386 but fail if we can't. */
3388 && GET_MODE_BITSIZE (lmode
) > 0
3389 && flag_strict_volatile_bitfields
> 0)
3392 nmode
= get_best_mode (lbitsize
, lbitpos
, 0, 0,
3393 const_p
? TYPE_ALIGN (TREE_TYPE (linner
))
3394 : MIN (TYPE_ALIGN (TREE_TYPE (linner
)),
3395 TYPE_ALIGN (TREE_TYPE (rinner
))),
3396 word_mode
, lvolatilep
|| rvolatilep
);
3397 if (nmode
== VOIDmode
)
3400 /* Set signed and unsigned types of the precision of this mode for the
3402 signed_type
= lang_hooks
.types
.type_for_mode (nmode
, 0);
3403 unsigned_type
= lang_hooks
.types
.type_for_mode (nmode
, 1);
3405 /* Compute the bit position and size for the new reference and our offset
3406 within it. If the new reference is the same size as the original, we
3407 won't optimize anything, so return zero. */
3408 nbitsize
= GET_MODE_BITSIZE (nmode
);
3409 nbitpos
= lbitpos
& ~ (nbitsize
- 1);
3411 if (nbitsize
== lbitsize
)
3414 if (BYTES_BIG_ENDIAN
)
3415 lbitpos
= nbitsize
- lbitsize
- lbitpos
;
3417 /* Make the mask to be used against the extracted field. */
3418 mask
= build_int_cst_type (unsigned_type
, -1);
3419 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (nbitsize
- lbitsize
));
3420 mask
= const_binop (RSHIFT_EXPR
, mask
,
3421 size_int (nbitsize
- lbitsize
- lbitpos
));
3424 /* If not comparing with constant, just rework the comparison
3426 return fold_build2_loc (loc
, code
, compare_type
,
3427 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3428 make_bit_field_ref (loc
, linner
,
3433 fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3434 make_bit_field_ref (loc
, rinner
,
3440 /* Otherwise, we are handling the constant case. See if the constant is too
3441 big for the field. Warn and return a tree of for 0 (false) if so. We do
3442 this not only for its own sake, but to avoid having to test for this
3443 error case below. If we didn't, we might generate wrong code.
3445 For unsigned fields, the constant shifted right by the field length should
3446 be all zero. For signed fields, the high-order bits should agree with
3451 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
3452 fold_convert_loc (loc
,
3453 unsigned_type
, rhs
),
3454 size_int (lbitsize
))))
3456 warning (0, "comparison is always %d due to width of bit-field",
3458 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3463 tree tem
= const_binop (RSHIFT_EXPR
,
3464 fold_convert_loc (loc
, signed_type
, rhs
),
3465 size_int (lbitsize
- 1));
3466 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
3468 warning (0, "comparison is always %d due to width of bit-field",
3470 return constant_boolean_node (code
== NE_EXPR
, compare_type
);
3474 /* Single-bit compares should always be against zero. */
3475 if (lbitsize
== 1 && ! integer_zerop (rhs
))
3477 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
3478 rhs
= build_int_cst (type
, 0);
3481 /* Make a new bitfield reference, shift the constant over the
3482 appropriate number of bits and mask it with the computed mask
3483 (in case this was a signed field). If we changed it, make a new one. */
3484 lhs
= make_bit_field_ref (loc
, linner
, unsigned_type
, nbitsize
, nbitpos
, 1);
3487 TREE_SIDE_EFFECTS (lhs
) = 1;
3488 TREE_THIS_VOLATILE (lhs
) = 1;
3491 rhs
= const_binop (BIT_AND_EXPR
,
3492 const_binop (LSHIFT_EXPR
,
3493 fold_convert_loc (loc
, unsigned_type
, rhs
),
3494 size_int (lbitpos
)),
3497 lhs
= build2_loc (loc
, code
, compare_type
,
3498 build2 (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
), rhs
);
3502 /* Subroutine for fold_truth_andor_1: decode a field reference.
3504 If EXP is a comparison reference, we return the innermost reference.
3506 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3507 set to the starting bit number.
3509 If the innermost field can be completely contained in a mode-sized
3510 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3512 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3513 otherwise it is not changed.
3515 *PUNSIGNEDP is set to the signedness of the field.
3517 *PMASK is set to the mask used. This is either contained in a
3518 BIT_AND_EXPR or derived from the width of the field.
3520 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3522 Return 0 if this is not a component reference or is one that we can't
3523 do anything with. */
3526 decode_field_reference (location_t loc
, tree exp
, HOST_WIDE_INT
*pbitsize
,
3527 HOST_WIDE_INT
*pbitpos
, enum machine_mode
*pmode
,
3528 int *punsignedp
, int *pvolatilep
,
3529 tree
*pmask
, tree
*pand_mask
)
3531 tree outer_type
= 0;
3533 tree mask
, inner
, offset
;
3535 unsigned int precision
;
3537 /* All the optimizations using this function assume integer fields.
3538 There are problems with FP fields since the type_for_size call
3539 below can fail for, e.g., XFmode. */
3540 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
3543 /* We are interested in the bare arrangement of bits, so strip everything
3544 that doesn't affect the machine mode. However, record the type of the
3545 outermost expression if it may matter below. */
3546 if (CONVERT_EXPR_P (exp
)
3547 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3548 outer_type
= TREE_TYPE (exp
);
3551 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
3553 and_mask
= TREE_OPERAND (exp
, 1);
3554 exp
= TREE_OPERAND (exp
, 0);
3555 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
3556 if (TREE_CODE (and_mask
) != INTEGER_CST
)
3560 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
3561 punsignedp
, pvolatilep
, false);
3562 if ((inner
== exp
&& and_mask
== 0)
3563 || *pbitsize
< 0 || offset
!= 0
3564 || TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3567 /* If the number of bits in the reference is the same as the bitsize of
3568 the outer type, then the outer type gives the signedness. Otherwise
3569 (in case of a small bitfield) the signedness is unchanged. */
3570 if (outer_type
&& *pbitsize
== TYPE_PRECISION (outer_type
))
3571 *punsignedp
= TYPE_UNSIGNED (outer_type
);
3573 /* Compute the mask to access the bitfield. */
3574 unsigned_type
= lang_hooks
.types
.type_for_size (*pbitsize
, 1);
3575 precision
= TYPE_PRECISION (unsigned_type
);
3577 mask
= build_int_cst_type (unsigned_type
, -1);
3579 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3580 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
));
3582 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3584 mask
= fold_build2_loc (loc
, BIT_AND_EXPR
, unsigned_type
,
3585 fold_convert_loc (loc
, unsigned_type
, and_mask
), mask
);
3588 *pand_mask
= and_mask
;
3592 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3596 all_ones_mask_p (const_tree mask
, int size
)
3598 tree type
= TREE_TYPE (mask
);
3599 unsigned int precision
= TYPE_PRECISION (type
);
3602 tmask
= build_int_cst_type (signed_type_for (type
), -1);
3605 tree_int_cst_equal (mask
,
3606 const_binop (RSHIFT_EXPR
,
3607 const_binop (LSHIFT_EXPR
, tmask
,
3608 size_int (precision
- size
)),
3609 size_int (precision
- size
)));
3612 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3613 represents the sign bit of EXP's type. If EXP represents a sign
3614 or zero extension, also test VAL against the unextended type.
3615 The return value is the (sub)expression whose sign bit is VAL,
3616 or NULL_TREE otherwise. */
3619 sign_bit_p (tree exp
, const_tree val
)
3621 unsigned HOST_WIDE_INT mask_lo
, lo
;
3622 HOST_WIDE_INT mask_hi
, hi
;
3626 /* Tree EXP must have an integral type. */
3627 t
= TREE_TYPE (exp
);
3628 if (! INTEGRAL_TYPE_P (t
))
3631 /* Tree VAL must be an integer constant. */
3632 if (TREE_CODE (val
) != INTEGER_CST
3633 || TREE_OVERFLOW (val
))
3636 width
= TYPE_PRECISION (t
);
3637 if (width
> HOST_BITS_PER_WIDE_INT
)
3639 hi
= (unsigned HOST_WIDE_INT
) 1 << (width
- HOST_BITS_PER_WIDE_INT
- 1);
3642 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
3643 >> (2 * HOST_BITS_PER_WIDE_INT
- width
));
3649 lo
= (unsigned HOST_WIDE_INT
) 1 << (width
- 1);
3652 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
3653 >> (HOST_BITS_PER_WIDE_INT
- width
));
3656 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3657 treat VAL as if it were unsigned. */
3658 if ((TREE_INT_CST_HIGH (val
) & mask_hi
) == hi
3659 && (TREE_INT_CST_LOW (val
) & mask_lo
) == lo
)
3662 /* Handle extension from a narrower type. */
3663 if (TREE_CODE (exp
) == NOP_EXPR
3664 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0))) < width
)
3665 return sign_bit_p (TREE_OPERAND (exp
, 0), val
);
3670 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3671 to be evaluated unconditionally. */
3674 simple_operand_p (const_tree exp
)
3676 /* Strip any conversions that don't change the machine mode. */
3679 return (CONSTANT_CLASS_P (exp
)
3680 || TREE_CODE (exp
) == SSA_NAME
3682 && ! TREE_ADDRESSABLE (exp
)
3683 && ! TREE_THIS_VOLATILE (exp
)
3684 && ! DECL_NONLOCAL (exp
)
3685 /* Don't regard global variables as simple. They may be
3686 allocated in ways unknown to the compiler (shared memory,
3687 #pragma weak, etc). */
3688 && ! TREE_PUBLIC (exp
)
3689 && ! DECL_EXTERNAL (exp
)
3690 /* Loading a static variable is unduly expensive, but global
3691 registers aren't expensive. */
3692 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
3695 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3696 to be evaluated unconditionally.
3697 I addition to simple_operand_p, we assume that comparisons, conversions,
3698 and logic-not operations are simple, if their operands are simple, too. */
3701 simple_operand_p_2 (tree exp
)
3703 enum tree_code code
;
3705 if (TREE_SIDE_EFFECTS (exp
)
3706 || tree_could_trap_p (exp
))
3709 while (CONVERT_EXPR_P (exp
))
3710 exp
= TREE_OPERAND (exp
, 0);
3712 code
= TREE_CODE (exp
);
3714 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3715 return (simple_operand_p (TREE_OPERAND (exp
, 0))
3716 && simple_operand_p (TREE_OPERAND (exp
, 1)));
3718 if (code
== TRUTH_NOT_EXPR
)
3719 return simple_operand_p_2 (TREE_OPERAND (exp
, 0));
3721 return simple_operand_p (exp
);
3725 /* The following functions are subroutines to fold_range_test and allow it to
3726 try to change a logical combination of comparisons into a range test.
3729 X == 2 || X == 3 || X == 4 || X == 5
3733 (unsigned) (X - 2) <= 3
3735 We describe each set of comparisons as being either inside or outside
3736 a range, using a variable named like IN_P, and then describe the
3737 range with a lower and upper bound. If one of the bounds is omitted,
3738 it represents either the highest or lowest value of the type.
3740 In the comments below, we represent a range by two numbers in brackets
3741 preceded by a "+" to designate being inside that range, or a "-" to
3742 designate being outside that range, so the condition can be inverted by
3743 flipping the prefix. An omitted bound is represented by a "-". For
3744 example, "- [-, 10]" means being outside the range starting at the lowest
3745 possible value and ending at 10, in other words, being greater than 10.
3746 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3749 We set up things so that the missing bounds are handled in a consistent
3750 manner so neither a missing bound nor "true" and "false" need to be
3751 handled using a special case. */
3753 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3754 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3755 and UPPER1_P are nonzero if the respective argument is an upper bound
3756 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3757 must be specified for a comparison. ARG1 will be converted to ARG0's
3758 type if both are specified. */
3761 range_binop (enum tree_code code
, tree type
, tree arg0
, int upper0_p
,
3762 tree arg1
, int upper1_p
)
3768 /* If neither arg represents infinity, do the normal operation.
3769 Else, if not a comparison, return infinity. Else handle the special
3770 comparison rules. Note that most of the cases below won't occur, but
3771 are handled for consistency. */
3773 if (arg0
!= 0 && arg1
!= 0)
3775 tem
= fold_build2 (code
, type
!= 0 ? type
: TREE_TYPE (arg0
),
3776 arg0
, fold_convert (TREE_TYPE (arg0
), arg1
));
3778 return TREE_CODE (tem
) == INTEGER_CST
? tem
: 0;
3781 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
3784 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3785 for neither. In real maths, we cannot assume open ended ranges are
3786 the same. But, this is computer arithmetic, where numbers are finite.
3787 We can therefore make the transformation of any unbounded range with
3788 the value Z, Z being greater than any representable number. This permits
3789 us to treat unbounded ranges as equal. */
3790 sgn0
= arg0
!= 0 ? 0 : (upper0_p
? 1 : -1);
3791 sgn1
= arg1
!= 0 ? 0 : (upper1_p
? 1 : -1);
3795 result
= sgn0
== sgn1
;
3798 result
= sgn0
!= sgn1
;
3801 result
= sgn0
< sgn1
;
3804 result
= sgn0
<= sgn1
;
3807 result
= sgn0
> sgn1
;
3810 result
= sgn0
>= sgn1
;
3816 return constant_boolean_node (result
, type
);
3819 /* Helper routine for make_range. Perform one step for it, return
3820 new expression if the loop should continue or NULL_TREE if it should
3824 make_range_step (location_t loc
, enum tree_code code
, tree arg0
, tree arg1
,
3825 tree exp_type
, tree
*p_low
, tree
*p_high
, int *p_in_p
,
3826 bool *strict_overflow_p
)
3828 tree arg0_type
= TREE_TYPE (arg0
);
3829 tree n_low
, n_high
, low
= *p_low
, high
= *p_high
;
3830 int in_p
= *p_in_p
, n_in_p
;
3834 case TRUTH_NOT_EXPR
:
3838 case EQ_EXPR
: case NE_EXPR
:
3839 case LT_EXPR
: case LE_EXPR
: case GE_EXPR
: case GT_EXPR
:
3840 /* We can only do something if the range is testing for zero
3841 and if the second operand is an integer constant. Note that
3842 saying something is "in" the range we make is done by
3843 complementing IN_P since it will set in the initial case of
3844 being not equal to zero; "out" is leaving it alone. */
3845 if (low
== NULL_TREE
|| high
== NULL_TREE
3846 || ! integer_zerop (low
) || ! integer_zerop (high
)
3847 || TREE_CODE (arg1
) != INTEGER_CST
)
3852 case NE_EXPR
: /* - [c, c] */
3855 case EQ_EXPR
: /* + [c, c] */
3856 in_p
= ! in_p
, low
= high
= arg1
;
3858 case GT_EXPR
: /* - [-, c] */
3859 low
= 0, high
= arg1
;
3861 case GE_EXPR
: /* + [c, -] */
3862 in_p
= ! in_p
, low
= arg1
, high
= 0;
3864 case LT_EXPR
: /* - [c, -] */
3865 low
= arg1
, high
= 0;
3867 case LE_EXPR
: /* + [-, c] */
3868 in_p
= ! in_p
, low
= 0, high
= arg1
;
3874 /* If this is an unsigned comparison, we also know that EXP is
3875 greater than or equal to zero. We base the range tests we make
3876 on that fact, so we record it here so we can parse existing
3877 range tests. We test arg0_type since often the return type
3878 of, e.g. EQ_EXPR, is boolean. */
3879 if (TYPE_UNSIGNED (arg0_type
) && (low
== 0 || high
== 0))
3881 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
,
3883 build_int_cst (arg0_type
, 0),
3887 in_p
= n_in_p
, low
= n_low
, high
= n_high
;
3889 /* If the high bound is missing, but we have a nonzero low
3890 bound, reverse the range so it goes from zero to the low bound
3892 if (high
== 0 && low
&& ! integer_zerop (low
))
3895 high
= range_binop (MINUS_EXPR
, NULL_TREE
, low
, 0,
3896 integer_one_node
, 0);
3897 low
= build_int_cst (arg0_type
, 0);
3907 /* (-x) IN [a,b] -> x in [-b, -a] */
3908 n_low
= range_binop (MINUS_EXPR
, exp_type
,
3909 build_int_cst (exp_type
, 0),
3911 n_high
= range_binop (MINUS_EXPR
, exp_type
,
3912 build_int_cst (exp_type
, 0),
3914 if (n_high
!= 0 && TREE_OVERFLOW (n_high
))
3920 return build2_loc (loc
, MINUS_EXPR
, exp_type
, negate_expr (arg0
),
3921 build_int_cst (exp_type
, 1));
3925 if (TREE_CODE (arg1
) != INTEGER_CST
)
3928 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3929 move a constant to the other side. */
3930 if (!TYPE_UNSIGNED (arg0_type
)
3931 && !TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3934 /* If EXP is signed, any overflow in the computation is undefined,
3935 so we don't worry about it so long as our computations on
3936 the bounds don't overflow. For unsigned, overflow is defined
3937 and this is exactly the right thing. */
3938 n_low
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3939 arg0_type
, low
, 0, arg1
, 0);
3940 n_high
= range_binop (code
== MINUS_EXPR
? PLUS_EXPR
: MINUS_EXPR
,
3941 arg0_type
, high
, 1, arg1
, 0);
3942 if ((n_low
!= 0 && TREE_OVERFLOW (n_low
))
3943 || (n_high
!= 0 && TREE_OVERFLOW (n_high
)))
3946 if (TYPE_OVERFLOW_UNDEFINED (arg0_type
))
3947 *strict_overflow_p
= true;
3950 /* Check for an unsigned range which has wrapped around the maximum
3951 value thus making n_high < n_low, and normalize it. */
3952 if (n_low
&& n_high
&& tree_int_cst_lt (n_high
, n_low
))
3954 low
= range_binop (PLUS_EXPR
, arg0_type
, n_high
, 0,
3955 integer_one_node
, 0);
3956 high
= range_binop (MINUS_EXPR
, arg0_type
, n_low
, 0,
3957 integer_one_node
, 0);
3959 /* If the range is of the form +/- [ x+1, x ], we won't
3960 be able to normalize it. But then, it represents the
3961 whole range or the empty set, so make it
3963 if (tree_int_cst_equal (n_low
, low
)
3964 && tree_int_cst_equal (n_high
, high
))
3970 low
= n_low
, high
= n_high
;
3978 case NON_LVALUE_EXPR
:
3979 if (TYPE_PRECISION (arg0_type
) > TYPE_PRECISION (exp_type
))
3982 if (! INTEGRAL_TYPE_P (arg0_type
)
3983 || (low
!= 0 && ! int_fits_type_p (low
, arg0_type
))
3984 || (high
!= 0 && ! int_fits_type_p (high
, arg0_type
)))
3987 n_low
= low
, n_high
= high
;
3990 n_low
= fold_convert_loc (loc
, arg0_type
, n_low
);
3993 n_high
= fold_convert_loc (loc
, arg0_type
, n_high
);
3995 /* If we're converting arg0 from an unsigned type, to exp,
3996 a signed type, we will be doing the comparison as unsigned.
3997 The tests above have already verified that LOW and HIGH
4000 So we have to ensure that we will handle large unsigned
4001 values the same way that the current signed bounds treat
4004 if (!TYPE_UNSIGNED (exp_type
) && TYPE_UNSIGNED (arg0_type
))
4008 /* For fixed-point modes, we need to pass the saturating flag
4009 as the 2nd parameter. */
4010 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type
)))
4012 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
),
4013 TYPE_SATURATING (arg0_type
));
4016 = lang_hooks
.types
.type_for_mode (TYPE_MODE (arg0_type
), 1);
4018 /* A range without an upper bound is, naturally, unbounded.
4019 Since convert would have cropped a very large value, use
4020 the max value for the destination type. */
4022 = TYPE_MAX_VALUE (equiv_type
) ? TYPE_MAX_VALUE (equiv_type
)
4023 : TYPE_MAX_VALUE (arg0_type
);
4025 if (TYPE_PRECISION (exp_type
) == TYPE_PRECISION (arg0_type
))
4026 high_positive
= fold_build2_loc (loc
, RSHIFT_EXPR
, arg0_type
,
4027 fold_convert_loc (loc
, arg0_type
,
4029 build_int_cst (arg0_type
, 1));
4031 /* If the low bound is specified, "and" the range with the
4032 range for which the original unsigned value will be
4036 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 1, n_low
, n_high
,
4037 1, fold_convert_loc (loc
, arg0_type
,
4042 in_p
= (n_in_p
== in_p
);
4046 /* Otherwise, "or" the range with the range of the input
4047 that will be interpreted as negative. */
4048 if (! merge_ranges (&n_in_p
, &n_low
, &n_high
, 0, n_low
, n_high
,
4049 1, fold_convert_loc (loc
, arg0_type
,
4054 in_p
= (in_p
!= n_in_p
);
4068 /* Given EXP, a logical expression, set the range it is testing into
4069 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4070 actually being tested. *PLOW and *PHIGH will be made of the same
4071 type as the returned expression. If EXP is not a comparison, we
4072 will most likely not be returning a useful value and range. Set
4073 *STRICT_OVERFLOW_P to true if the return value is only valid
4074 because signed overflow is undefined; otherwise, do not change
4075 *STRICT_OVERFLOW_P. */
4078 make_range (tree exp
, int *pin_p
, tree
*plow
, tree
*phigh
,
4079 bool *strict_overflow_p
)
4081 enum tree_code code
;
4082 tree arg0
, arg1
= NULL_TREE
;
4083 tree exp_type
, nexp
;
4086 location_t loc
= EXPR_LOCATION (exp
);
4088 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4089 and see if we can refine the range. Some of the cases below may not
4090 happen, but it doesn't seem worth worrying about this. We "continue"
4091 the outer loop when we've changed something; otherwise we "break"
4092 the switch, which will "break" the while. */
4095 low
= high
= build_int_cst (TREE_TYPE (exp
), 0);
4099 code
= TREE_CODE (exp
);
4100 exp_type
= TREE_TYPE (exp
);
4103 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
4105 if (TREE_OPERAND_LENGTH (exp
) > 0)
4106 arg0
= TREE_OPERAND (exp
, 0);
4107 if (TREE_CODE_CLASS (code
) == tcc_binary
4108 || TREE_CODE_CLASS (code
) == tcc_comparison
4109 || (TREE_CODE_CLASS (code
) == tcc_expression
4110 && TREE_OPERAND_LENGTH (exp
) > 1))
4111 arg1
= TREE_OPERAND (exp
, 1);
4113 if (arg0
== NULL_TREE
)
4116 nexp
= make_range_step (loc
, code
, arg0
, arg1
, exp_type
, &low
,
4117 &high
, &in_p
, strict_overflow_p
);
4118 if (nexp
== NULL_TREE
)
4123 /* If EXP is a constant, we can evaluate whether this is true or false. */
4124 if (TREE_CODE (exp
) == INTEGER_CST
)
4126 in_p
= in_p
== (integer_onep (range_binop (GE_EXPR
, integer_type_node
,
4128 && integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4134 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4138 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4139 type, TYPE, return an expression to test if EXP is in (or out of, depending
4140 on IN_P) the range. Return 0 if the test couldn't be created. */
4143 build_range_check (location_t loc
, tree type
, tree exp
, int in_p
,
4144 tree low
, tree high
)
4146 tree etype
= TREE_TYPE (exp
), value
;
4148 #ifdef HAVE_canonicalize_funcptr_for_compare
4149 /* Disable this optimization for function pointer expressions
4150 on targets that require function pointer canonicalization. */
4151 if (HAVE_canonicalize_funcptr_for_compare
4152 && TREE_CODE (etype
) == POINTER_TYPE
4153 && TREE_CODE (TREE_TYPE (etype
)) == FUNCTION_TYPE
)
4159 value
= build_range_check (loc
, type
, exp
, 1, low
, high
);
4161 return invert_truthvalue_loc (loc
, value
);
4166 if (low
== 0 && high
== 0)
4167 return build_int_cst (type
, 1);
4170 return fold_build2_loc (loc
, LE_EXPR
, type
, exp
,
4171 fold_convert_loc (loc
, etype
, high
));
4174 return fold_build2_loc (loc
, GE_EXPR
, type
, exp
,
4175 fold_convert_loc (loc
, etype
, low
));
4177 if (operand_equal_p (low
, high
, 0))
4178 return fold_build2_loc (loc
, EQ_EXPR
, type
, exp
,
4179 fold_convert_loc (loc
, etype
, low
));
4181 if (integer_zerop (low
))
4183 if (! TYPE_UNSIGNED (etype
))
4185 etype
= unsigned_type_for (etype
);
4186 high
= fold_convert_loc (loc
, etype
, high
);
4187 exp
= fold_convert_loc (loc
, etype
, exp
);
4189 return build_range_check (loc
, type
, exp
, 1, 0, high
);
4192 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4193 if (integer_onep (low
) && TREE_CODE (high
) == INTEGER_CST
)
4195 unsigned HOST_WIDE_INT lo
;
4199 prec
= TYPE_PRECISION (etype
);
4200 if (prec
<= HOST_BITS_PER_WIDE_INT
)
4203 lo
= ((unsigned HOST_WIDE_INT
) 1 << (prec
- 1)) - 1;
4207 hi
= ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)) - 1;
4208 lo
= (unsigned HOST_WIDE_INT
) -1;
4211 if (TREE_INT_CST_HIGH (high
) == hi
&& TREE_INT_CST_LOW (high
) == lo
)
4213 if (TYPE_UNSIGNED (etype
))
4215 tree signed_etype
= signed_type_for (etype
);
4216 if (TYPE_PRECISION (signed_etype
) != TYPE_PRECISION (etype
))
4218 = build_nonstandard_integer_type (TYPE_PRECISION (etype
), 0);
4220 etype
= signed_etype
;
4221 exp
= fold_convert_loc (loc
, etype
, exp
);
4223 return fold_build2_loc (loc
, GT_EXPR
, type
, exp
,
4224 build_int_cst (etype
, 0));
4228 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4229 This requires wrap-around arithmetics for the type of the expression.
4230 First make sure that arithmetics in this type is valid, then make sure
4231 that it wraps around. */
4232 if (TREE_CODE (etype
) == ENUMERAL_TYPE
|| TREE_CODE (etype
) == BOOLEAN_TYPE
)
4233 etype
= lang_hooks
.types
.type_for_size (TYPE_PRECISION (etype
),
4234 TYPE_UNSIGNED (etype
));
4236 if (TREE_CODE (etype
) == INTEGER_TYPE
&& !TYPE_OVERFLOW_WRAPS (etype
))
4238 tree utype
, minv
, maxv
;
4240 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4241 for the type in question, as we rely on this here. */
4242 utype
= unsigned_type_for (etype
);
4243 maxv
= fold_convert_loc (loc
, utype
, TYPE_MAX_VALUE (etype
));
4244 maxv
= range_binop (PLUS_EXPR
, NULL_TREE
, maxv
, 1,
4245 integer_one_node
, 1);
4246 minv
= fold_convert_loc (loc
, utype
, TYPE_MIN_VALUE (etype
));
4248 if (integer_zerop (range_binop (NE_EXPR
, integer_type_node
,
4255 high
= fold_convert_loc (loc
, etype
, high
);
4256 low
= fold_convert_loc (loc
, etype
, low
);
4257 exp
= fold_convert_loc (loc
, etype
, exp
);
4259 value
= const_binop (MINUS_EXPR
, high
, low
);
4262 if (POINTER_TYPE_P (etype
))
4264 if (value
!= 0 && !TREE_OVERFLOW (value
))
4266 low
= fold_build1_loc (loc
, NEGATE_EXPR
, TREE_TYPE (low
), low
);
4267 return build_range_check (loc
, type
,
4268 fold_build_pointer_plus_loc (loc
, exp
, low
),
4269 1, build_int_cst (etype
, 0), value
);
4274 if (value
!= 0 && !TREE_OVERFLOW (value
))
4275 return build_range_check (loc
, type
,
4276 fold_build2_loc (loc
, MINUS_EXPR
, etype
, exp
, low
),
4277 1, build_int_cst (etype
, 0), value
);
4282 /* Return the predecessor of VAL in its type, handling the infinite case. */
4285 range_predecessor (tree val
)
4287 tree type
= TREE_TYPE (val
);
4289 if (INTEGRAL_TYPE_P (type
)
4290 && operand_equal_p (val
, TYPE_MIN_VALUE (type
), 0))
4293 return range_binop (MINUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4296 /* Return the successor of VAL in its type, handling the infinite case. */
4299 range_successor (tree val
)
4301 tree type
= TREE_TYPE (val
);
4303 if (INTEGRAL_TYPE_P (type
)
4304 && operand_equal_p (val
, TYPE_MAX_VALUE (type
), 0))
4307 return range_binop (PLUS_EXPR
, NULL_TREE
, val
, 0, integer_one_node
, 0);
4310 /* Given two ranges, see if we can merge them into one. Return 1 if we
4311 can, 0 if we can't. Set the output range into the specified parameters. */
4314 merge_ranges (int *pin_p
, tree
*plow
, tree
*phigh
, int in0_p
, tree low0
,
4315 tree high0
, int in1_p
, tree low1
, tree high1
)
4323 int lowequal
= ((low0
== 0 && low1
== 0)
4324 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4325 low0
, 0, low1
, 0)));
4326 int highequal
= ((high0
== 0 && high1
== 0)
4327 || integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4328 high0
, 1, high1
, 1)));
4330 /* Make range 0 be the range that starts first, or ends last if they
4331 start at the same value. Swap them if it isn't. */
4332 if (integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4335 && integer_onep (range_binop (GT_EXPR
, integer_type_node
,
4336 high1
, 1, high0
, 1))))
4338 temp
= in0_p
, in0_p
= in1_p
, in1_p
= temp
;
4339 tem
= low0
, low0
= low1
, low1
= tem
;
4340 tem
= high0
, high0
= high1
, high1
= tem
;
4343 /* Now flag two cases, whether the ranges are disjoint or whether the
4344 second range is totally subsumed in the first. Note that the tests
4345 below are simplified by the ones above. */
4346 no_overlap
= integer_onep (range_binop (LT_EXPR
, integer_type_node
,
4347 high0
, 1, low1
, 0));
4348 subset
= integer_onep (range_binop (LE_EXPR
, integer_type_node
,
4349 high1
, 1, high0
, 1));
4351 /* We now have four cases, depending on whether we are including or
4352 excluding the two ranges. */
4355 /* If they don't overlap, the result is false. If the second range
4356 is a subset it is the result. Otherwise, the range is from the start
4357 of the second to the end of the first. */
4359 in_p
= 0, low
= high
= 0;
4361 in_p
= 1, low
= low1
, high
= high1
;
4363 in_p
= 1, low
= low1
, high
= high0
;
4366 else if (in0_p
&& ! in1_p
)
4368 /* If they don't overlap, the result is the first range. If they are
4369 equal, the result is false. If the second range is a subset of the
4370 first, and the ranges begin at the same place, we go from just after
4371 the end of the second range to the end of the first. If the second
4372 range is not a subset of the first, or if it is a subset and both
4373 ranges end at the same place, the range starts at the start of the
4374 first range and ends just before the second range.
4375 Otherwise, we can't describe this as a single range. */
4377 in_p
= 1, low
= low0
, high
= high0
;
4378 else if (lowequal
&& highequal
)
4379 in_p
= 0, low
= high
= 0;
4380 else if (subset
&& lowequal
)
4382 low
= range_successor (high1
);
4387 /* We are in the weird situation where high0 > high1 but
4388 high1 has no successor. Punt. */
4392 else if (! subset
|| highequal
)
4395 high
= range_predecessor (low1
);
4399 /* low0 < low1 but low1 has no predecessor. Punt. */
4407 else if (! in0_p
&& in1_p
)
4409 /* If they don't overlap, the result is the second range. If the second
4410 is a subset of the first, the result is false. Otherwise,
4411 the range starts just after the first range and ends at the
4412 end of the second. */
4414 in_p
= 1, low
= low1
, high
= high1
;
4415 else if (subset
|| highequal
)
4416 in_p
= 0, low
= high
= 0;
4419 low
= range_successor (high0
);
4424 /* high1 > high0 but high0 has no successor. Punt. */
4432 /* The case where we are excluding both ranges. Here the complex case
4433 is if they don't overlap. In that case, the only time we have a
4434 range is if they are adjacent. If the second is a subset of the
4435 first, the result is the first. Otherwise, the range to exclude
4436 starts at the beginning of the first range and ends at the end of the
4440 if (integer_onep (range_binop (EQ_EXPR
, integer_type_node
,
4441 range_successor (high0
),
4443 in_p
= 0, low
= low0
, high
= high1
;
4446 /* Canonicalize - [min, x] into - [-, x]. */
4447 if (low0
&& TREE_CODE (low0
) == INTEGER_CST
)
4448 switch (TREE_CODE (TREE_TYPE (low0
)))
4451 if (TYPE_PRECISION (TREE_TYPE (low0
))
4452 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0
))))
4456 if (tree_int_cst_equal (low0
,
4457 TYPE_MIN_VALUE (TREE_TYPE (low0
))))
4461 if (TYPE_UNSIGNED (TREE_TYPE (low0
))
4462 && integer_zerop (low0
))
4469 /* Canonicalize - [x, max] into - [x, -]. */
4470 if (high1
&& TREE_CODE (high1
) == INTEGER_CST
)
4471 switch (TREE_CODE (TREE_TYPE (high1
)))
4474 if (TYPE_PRECISION (TREE_TYPE (high1
))
4475 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1
))))
4479 if (tree_int_cst_equal (high1
,
4480 TYPE_MAX_VALUE (TREE_TYPE (high1
))))
4484 if (TYPE_UNSIGNED (TREE_TYPE (high1
))
4485 && integer_zerop (range_binop (PLUS_EXPR
, NULL_TREE
,
4487 integer_one_node
, 1)))
4494 /* The ranges might be also adjacent between the maximum and
4495 minimum values of the given type. For
4496 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4497 return + [x + 1, y - 1]. */
4498 if (low0
== 0 && high1
== 0)
4500 low
= range_successor (high0
);
4501 high
= range_predecessor (low1
);
4502 if (low
== 0 || high
== 0)
4512 in_p
= 0, low
= low0
, high
= high0
;
4514 in_p
= 0, low
= low0
, high
= high1
;
4517 *pin_p
= in_p
, *plow
= low
, *phigh
= high
;
4522 /* Subroutine of fold, looking inside expressions of the form
4523 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4524 of the COND_EXPR. This function is being used also to optimize
4525 A op B ? C : A, by reversing the comparison first.
4527 Return a folded expression whose code is not a COND_EXPR
4528 anymore, or NULL_TREE if no folding opportunity is found. */
4531 fold_cond_expr_with_comparison (location_t loc
, tree type
,
4532 tree arg0
, tree arg1
, tree arg2
)
4534 enum tree_code comp_code
= TREE_CODE (arg0
);
4535 tree arg00
= TREE_OPERAND (arg0
, 0);
4536 tree arg01
= TREE_OPERAND (arg0
, 1);
4537 tree arg1_type
= TREE_TYPE (arg1
);
4543 /* If we have A op 0 ? A : -A, consider applying the following
4546 A == 0? A : -A same as -A
4547 A != 0? A : -A same as A
4548 A >= 0? A : -A same as abs (A)
4549 A > 0? A : -A same as abs (A)
4550 A <= 0? A : -A same as -abs (A)
4551 A < 0? A : -A same as -abs (A)
4553 None of these transformations work for modes with signed
4554 zeros. If A is +/-0, the first two transformations will
4555 change the sign of the result (from +0 to -0, or vice
4556 versa). The last four will fix the sign of the result,
4557 even though the original expressions could be positive or
4558 negative, depending on the sign of A.
4560 Note that all these transformations are correct if A is
4561 NaN, since the two alternatives (A and -A) are also NaNs. */
4562 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4563 && (FLOAT_TYPE_P (TREE_TYPE (arg01
))
4564 ? real_zerop (arg01
)
4565 : integer_zerop (arg01
))
4566 && ((TREE_CODE (arg2
) == NEGATE_EXPR
4567 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4568 /* In the case that A is of the form X-Y, '-A' (arg2) may
4569 have already been folded to Y-X, check for that. */
4570 || (TREE_CODE (arg1
) == MINUS_EXPR
4571 && TREE_CODE (arg2
) == MINUS_EXPR
4572 && operand_equal_p (TREE_OPERAND (arg1
, 0),
4573 TREE_OPERAND (arg2
, 1), 0)
4574 && operand_equal_p (TREE_OPERAND (arg1
, 1),
4575 TREE_OPERAND (arg2
, 0), 0))))
4580 tem
= fold_convert_loc (loc
, arg1_type
, arg1
);
4581 return pedantic_non_lvalue_loc (loc
,
4582 fold_convert_loc (loc
, type
,
4583 negate_expr (tem
)));
4586 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4589 if (flag_trapping_math
)
4594 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4595 arg1
= fold_convert_loc (loc
, signed_type_for
4596 (TREE_TYPE (arg1
)), arg1
);
4597 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4598 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4601 if (flag_trapping_math
)
4605 if (TYPE_UNSIGNED (TREE_TYPE (arg1
)))
4606 arg1
= fold_convert_loc (loc
, signed_type_for
4607 (TREE_TYPE (arg1
)), arg1
);
4608 tem
= fold_build1_loc (loc
, ABS_EXPR
, TREE_TYPE (arg1
), arg1
);
4609 return negate_expr (fold_convert_loc (loc
, type
, tem
));
4611 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4615 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4616 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4617 both transformations are correct when A is NaN: A != 0
4618 is then true, and A == 0 is false. */
4620 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4621 && integer_zerop (arg01
) && integer_zerop (arg2
))
4623 if (comp_code
== NE_EXPR
)
4624 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4625 else if (comp_code
== EQ_EXPR
)
4626 return build_int_cst (type
, 0);
4629 /* Try some transformations of A op B ? A : B.
4631 A == B? A : B same as B
4632 A != B? A : B same as A
4633 A >= B? A : B same as max (A, B)
4634 A > B? A : B same as max (B, A)
4635 A <= B? A : B same as min (A, B)
4636 A < B? A : B same as min (B, A)
4638 As above, these transformations don't work in the presence
4639 of signed zeros. For example, if A and B are zeros of
4640 opposite sign, the first two transformations will change
4641 the sign of the result. In the last four, the original
4642 expressions give different results for (A=+0, B=-0) and
4643 (A=-0, B=+0), but the transformed expressions do not.
4645 The first two transformations are correct if either A or B
4646 is a NaN. In the first transformation, the condition will
4647 be false, and B will indeed be chosen. In the case of the
4648 second transformation, the condition A != B will be true,
4649 and A will be chosen.
4651 The conversions to max() and min() are not correct if B is
4652 a number and A is not. The conditions in the original
4653 expressions will be false, so all four give B. The min()
4654 and max() versions would give a NaN instead. */
4655 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
))
4656 && operand_equal_for_comparison_p (arg01
, arg2
, arg00
)
4657 /* Avoid these transformations if the COND_EXPR may be used
4658 as an lvalue in the C++ front-end. PR c++/19199. */
4660 || (strcmp (lang_hooks
.name
, "GNU C++") != 0
4661 && strcmp (lang_hooks
.name
, "GNU Objective-C++") != 0)
4662 || ! maybe_lvalue_p (arg1
)
4663 || ! maybe_lvalue_p (arg2
)))
4665 tree comp_op0
= arg00
;
4666 tree comp_op1
= arg01
;
4667 tree comp_type
= TREE_TYPE (comp_op0
);
4669 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4670 if (TYPE_MAIN_VARIANT (comp_type
) == TYPE_MAIN_VARIANT (type
))
4680 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg2
));
4682 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
4687 /* In C++ a ?: expression can be an lvalue, so put the
4688 operand which will be used if they are equal first
4689 so that we can convert this back to the
4690 corresponding COND_EXPR. */
4691 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4693 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4694 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4695 tem
= (comp_code
== LE_EXPR
|| comp_code
== UNLE_EXPR
)
4696 ? fold_build2_loc (loc
, MIN_EXPR
, comp_type
, comp_op0
, comp_op1
)
4697 : fold_build2_loc (loc
, MIN_EXPR
, comp_type
,
4698 comp_op1
, comp_op0
);
4699 return pedantic_non_lvalue_loc (loc
,
4700 fold_convert_loc (loc
, type
, tem
));
4707 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4709 comp_op0
= fold_convert_loc (loc
, comp_type
, comp_op0
);
4710 comp_op1
= fold_convert_loc (loc
, comp_type
, comp_op1
);
4711 tem
= (comp_code
== GE_EXPR
|| comp_code
== UNGE_EXPR
)
4712 ? fold_build2_loc (loc
, MAX_EXPR
, comp_type
, comp_op0
, comp_op1
)
4713 : fold_build2_loc (loc
, MAX_EXPR
, comp_type
,
4714 comp_op1
, comp_op0
);
4715 return pedantic_non_lvalue_loc (loc
,
4716 fold_convert_loc (loc
, type
, tem
));
4720 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4721 return pedantic_non_lvalue_loc (loc
,
4722 fold_convert_loc (loc
, type
, arg2
));
4725 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
))))
4726 return pedantic_non_lvalue_loc (loc
,
4727 fold_convert_loc (loc
, type
, arg1
));
4730 gcc_assert (TREE_CODE_CLASS (comp_code
) == tcc_comparison
);
4735 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4736 we might still be able to simplify this. For example,
4737 if C1 is one less or one more than C2, this might have started
4738 out as a MIN or MAX and been transformed by this function.
4739 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4741 if (INTEGRAL_TYPE_P (type
)
4742 && TREE_CODE (arg01
) == INTEGER_CST
4743 && TREE_CODE (arg2
) == INTEGER_CST
)
4747 if (TREE_CODE (arg1
) == INTEGER_CST
)
4749 /* We can replace A with C1 in this case. */
4750 arg1
= fold_convert_loc (loc
, type
, arg01
);
4751 return fold_build3_loc (loc
, COND_EXPR
, type
, arg0
, arg1
, arg2
);
4754 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4755 MIN_EXPR, to preserve the signedness of the comparison. */
4756 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4758 && operand_equal_p (arg01
,
4759 const_binop (PLUS_EXPR
, arg2
,
4760 build_int_cst (type
, 1)),
4763 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4764 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4766 return pedantic_non_lvalue_loc (loc
,
4767 fold_convert_loc (loc
, type
, tem
));
4772 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4774 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4776 && operand_equal_p (arg01
,
4777 const_binop (MINUS_EXPR
, arg2
,
4778 build_int_cst (type
, 1)),
4781 tem
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (arg00
), arg00
,
4782 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4784 return pedantic_non_lvalue_loc (loc
,
4785 fold_convert_loc (loc
, type
, tem
));
4790 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4791 MAX_EXPR, to preserve the signedness of the comparison. */
4792 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
),
4794 && operand_equal_p (arg01
,
4795 const_binop (MINUS_EXPR
, arg2
,
4796 build_int_cst (type
, 1)),
4799 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4800 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4802 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4807 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4808 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
),
4810 && operand_equal_p (arg01
,
4811 const_binop (PLUS_EXPR
, arg2
,
4812 build_int_cst (type
, 1)),
4815 tem
= fold_build2_loc (loc
, MAX_EXPR
, TREE_TYPE (arg00
), arg00
,
4816 fold_convert_loc (loc
, TREE_TYPE (arg00
),
4818 return pedantic_non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
4832 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4833 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4834 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4838 /* EXP is some logical combination of boolean tests. See if we can
4839 merge it into some range test. Return the new tree if so. */
4842 fold_range_test (location_t loc
, enum tree_code code
, tree type
,
4845 int or_op
= (code
== TRUTH_ORIF_EXPR
4846 || code
== TRUTH_OR_EXPR
);
4847 int in0_p
, in1_p
, in_p
;
4848 tree low0
, low1
, low
, high0
, high1
, high
;
4849 bool strict_overflow_p
= false;
4850 tree lhs
= make_range (op0
, &in0_p
, &low0
, &high0
, &strict_overflow_p
);
4851 tree rhs
= make_range (op1
, &in1_p
, &low1
, &high1
, &strict_overflow_p
);
4853 const char * const warnmsg
= G_("assuming signed overflow does not occur "
4854 "when simplifying range test");
4856 /* If this is an OR operation, invert both sides; we will invert
4857 again at the end. */
4859 in0_p
= ! in0_p
, in1_p
= ! in1_p
;
4861 /* If both expressions are the same, if we can merge the ranges, and we
4862 can build the range test, return it or it inverted. If one of the
4863 ranges is always true or always false, consider it to be the same
4864 expression as the other. */
4865 if ((lhs
== 0 || rhs
== 0 || operand_equal_p (lhs
, rhs
, 0))
4866 && merge_ranges (&in_p
, &low
, &high
, in0_p
, low0
, high0
,
4868 && 0 != (tem
= (build_range_check (loc
, type
,
4870 : rhs
!= 0 ? rhs
: integer_zero_node
,
4873 if (strict_overflow_p
)
4874 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
4875 return or_op
? invert_truthvalue_loc (loc
, tem
) : tem
;
4878 /* On machines where the branch cost is expensive, if this is a
4879 short-circuited branch and the underlying object on both sides
4880 is the same, make a non-short-circuit operation. */
4881 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4882 && lhs
!= 0 && rhs
!= 0
4883 && (code
== TRUTH_ANDIF_EXPR
4884 || code
== TRUTH_ORIF_EXPR
)
4885 && operand_equal_p (lhs
, rhs
, 0))
4887 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4888 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4889 which cases we can't do this. */
4890 if (simple_operand_p (lhs
))
4891 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4892 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4895 else if (!lang_hooks
.decls
.global_bindings_p ()
4896 && !CONTAINS_PLACEHOLDER_P (lhs
))
4898 tree common
= save_expr (lhs
);
4900 if (0 != (lhs
= build_range_check (loc
, type
, common
,
4901 or_op
? ! in0_p
: in0_p
,
4903 && (0 != (rhs
= build_range_check (loc
, type
, common
,
4904 or_op
? ! in1_p
: in1_p
,
4907 if (strict_overflow_p
)
4908 fold_overflow_warning (warnmsg
,
4909 WARN_STRICT_OVERFLOW_COMPARISON
);
4910 return build2_loc (loc
, code
== TRUTH_ANDIF_EXPR
4911 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
,
4920 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4921 bit value. Arrange things so the extra bits will be set to zero if and
4922 only if C is signed-extended to its full width. If MASK is nonzero,
4923 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4926 unextend (tree c
, int p
, int unsignedp
, tree mask
)
4928 tree type
= TREE_TYPE (c
);
4929 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
4932 if (p
== modesize
|| unsignedp
)
4935 /* We work by getting just the sign bit into the low-order bit, then
4936 into the high-order bit, then sign-extend. We then XOR that value
4938 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1));
4939 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1));
4941 /* We must use a signed type in order to get an arithmetic right shift.
4942 However, we must also avoid introducing accidental overflows, so that
4943 a subsequent call to integer_zerop will work. Hence we must
4944 do the type conversion here. At this point, the constant is either
4945 zero or one, and the conversion to a signed type can never overflow.
4946 We could get an overflow if this conversion is done anywhere else. */
4947 if (TYPE_UNSIGNED (type
))
4948 temp
= fold_convert (signed_type_for (type
), temp
);
4950 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1));
4951 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1));
4953 temp
= const_binop (BIT_AND_EXPR
, temp
,
4954 fold_convert (TREE_TYPE (c
), mask
));
4955 /* If necessary, convert the type back to match the type of C. */
4956 if (TYPE_UNSIGNED (type
))
4957 temp
= fold_convert (type
, temp
);
4959 return fold_convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
));
4962 /* For an expression that has the form
4966 we can drop one of the inner expressions and simplify to
4970 LOC is the location of the resulting expression. OP is the inner
4971 logical operation; the left-hand side in the examples above, while CMPOP
4972 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
4973 removing a condition that guards another, as in
4974 (A != NULL && A->...) || A == NULL
4975 which we must not transform. If RHS_ONLY is true, only eliminate the
4976 right-most operand of the inner logical operation. */
4979 merge_truthop_with_opposite_arm (location_t loc
, tree op
, tree cmpop
,
4982 tree type
= TREE_TYPE (cmpop
);
4983 enum tree_code code
= TREE_CODE (cmpop
);
4984 enum tree_code truthop_code
= TREE_CODE (op
);
4985 tree lhs
= TREE_OPERAND (op
, 0);
4986 tree rhs
= TREE_OPERAND (op
, 1);
4987 tree orig_lhs
= lhs
, orig_rhs
= rhs
;
4988 enum tree_code rhs_code
= TREE_CODE (rhs
);
4989 enum tree_code lhs_code
= TREE_CODE (lhs
);
4990 enum tree_code inv_code
;
4992 if (TREE_SIDE_EFFECTS (op
) || TREE_SIDE_EFFECTS (cmpop
))
4995 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
4998 if (rhs_code
== truthop_code
)
5000 tree newrhs
= merge_truthop_with_opposite_arm (loc
, rhs
, cmpop
, rhs_only
);
5001 if (newrhs
!= NULL_TREE
)
5004 rhs_code
= TREE_CODE (rhs
);
5007 if (lhs_code
== truthop_code
&& !rhs_only
)
5009 tree newlhs
= merge_truthop_with_opposite_arm (loc
, lhs
, cmpop
, false);
5010 if (newlhs
!= NULL_TREE
)
5013 lhs_code
= TREE_CODE (lhs
);
5017 inv_code
= invert_tree_comparison (code
, HONOR_NANS (TYPE_MODE (type
)));
5018 if (inv_code
== rhs_code
5019 && operand_equal_p (TREE_OPERAND (rhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5020 && operand_equal_p (TREE_OPERAND (rhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5022 if (!rhs_only
&& inv_code
== lhs_code
5023 && operand_equal_p (TREE_OPERAND (lhs
, 0), TREE_OPERAND (cmpop
, 0), 0)
5024 && operand_equal_p (TREE_OPERAND (lhs
, 1), TREE_OPERAND (cmpop
, 1), 0))
5026 if (rhs
!= orig_rhs
|| lhs
!= orig_lhs
)
5027 return fold_build2_loc (loc
, truthop_code
, TREE_TYPE (cmpop
),
5032 /* Find ways of folding logical expressions of LHS and RHS:
5033 Try to merge two comparisons to the same innermost item.
5034 Look for range tests like "ch >= '0' && ch <= '9'".
5035 Look for combinations of simple terms on machines with expensive branches
5036 and evaluate the RHS unconditionally.
5038 For example, if we have p->a == 2 && p->b == 4 and we can make an
5039 object large enough to span both A and B, we can do this with a comparison
5040 against the object ANDed with the a mask.
5042 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5043 operations to do this with one comparison.
5045 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5046 function and the one above.
5048 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5049 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5051 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5054 We return the simplified tree or 0 if no optimization is possible. */
5057 fold_truth_andor_1 (location_t loc
, enum tree_code code
, tree truth_type
,
5060 /* If this is the "or" of two comparisons, we can do something if
5061 the comparisons are NE_EXPR. If this is the "and", we can do something
5062 if the comparisons are EQ_EXPR. I.e.,
5063 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5065 WANTED_CODE is this operation code. For single bit fields, we can
5066 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5067 comparison for one-bit fields. */
5069 enum tree_code wanted_code
;
5070 enum tree_code lcode
, rcode
;
5071 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
5072 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
5073 HOST_WIDE_INT ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
5074 HOST_WIDE_INT rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
5075 HOST_WIDE_INT xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
5076 HOST_WIDE_INT lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
5077 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
5078 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
5079 enum machine_mode lnmode
, rnmode
;
5080 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
5081 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
5082 tree l_const
, r_const
;
5083 tree lntype
, rntype
, result
;
5084 HOST_WIDE_INT first_bit
, end_bit
;
5087 /* Start by getting the comparison codes. Fail if anything is volatile.
5088 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5089 it were surrounded with a NE_EXPR. */
5091 if (TREE_SIDE_EFFECTS (lhs
) || TREE_SIDE_EFFECTS (rhs
))
5094 lcode
= TREE_CODE (lhs
);
5095 rcode
= TREE_CODE (rhs
);
5097 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
5099 lhs
= build2 (NE_EXPR
, truth_type
, lhs
,
5100 build_int_cst (TREE_TYPE (lhs
), 0));
5104 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
5106 rhs
= build2 (NE_EXPR
, truth_type
, rhs
,
5107 build_int_cst (TREE_TYPE (rhs
), 0));
5111 if (TREE_CODE_CLASS (lcode
) != tcc_comparison
5112 || TREE_CODE_CLASS (rcode
) != tcc_comparison
)
5115 ll_arg
= TREE_OPERAND (lhs
, 0);
5116 lr_arg
= TREE_OPERAND (lhs
, 1);
5117 rl_arg
= TREE_OPERAND (rhs
, 0);
5118 rr_arg
= TREE_OPERAND (rhs
, 1);
5120 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5121 if (simple_operand_p (ll_arg
)
5122 && simple_operand_p (lr_arg
))
5124 if (operand_equal_p (ll_arg
, rl_arg
, 0)
5125 && operand_equal_p (lr_arg
, rr_arg
, 0))
5127 result
= combine_comparisons (loc
, code
, lcode
, rcode
,
5128 truth_type
, ll_arg
, lr_arg
);
5132 else if (operand_equal_p (ll_arg
, rr_arg
, 0)
5133 && operand_equal_p (lr_arg
, rl_arg
, 0))
5135 result
= combine_comparisons (loc
, code
, lcode
,
5136 swap_tree_comparison (rcode
),
5137 truth_type
, ll_arg
, lr_arg
);
5143 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
5144 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
5146 /* If the RHS can be evaluated unconditionally and its operands are
5147 simple, it wins to evaluate the RHS unconditionally on machines
5148 with expensive branches. In this case, this isn't a comparison
5149 that can be merged. */
5151 if (BRANCH_COST (optimize_function_for_speed_p (cfun
),
5153 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg
))
5154 && simple_operand_p (rl_arg
)
5155 && simple_operand_p (rr_arg
))
5157 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5158 if (code
== TRUTH_OR_EXPR
5159 && lcode
== NE_EXPR
&& integer_zerop (lr_arg
)
5160 && rcode
== NE_EXPR
&& integer_zerop (rr_arg
)
5161 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5162 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5163 return build2_loc (loc
, NE_EXPR
, truth_type
,
5164 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5166 build_int_cst (TREE_TYPE (ll_arg
), 0));
5168 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5169 if (code
== TRUTH_AND_EXPR
5170 && lcode
== EQ_EXPR
&& integer_zerop (lr_arg
)
5171 && rcode
== EQ_EXPR
&& integer_zerop (rr_arg
)
5172 && TREE_TYPE (ll_arg
) == TREE_TYPE (rl_arg
)
5173 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg
)))
5174 return build2_loc (loc
, EQ_EXPR
, truth_type
,
5175 build2 (BIT_IOR_EXPR
, TREE_TYPE (ll_arg
),
5177 build_int_cst (TREE_TYPE (ll_arg
), 0));
5180 /* See if the comparisons can be merged. Then get all the parameters for
5183 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
5184 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
5188 ll_inner
= decode_field_reference (loc
, ll_arg
,
5189 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
5190 &ll_unsignedp
, &volatilep
, &ll_mask
,
5192 lr_inner
= decode_field_reference (loc
, lr_arg
,
5193 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
5194 &lr_unsignedp
, &volatilep
, &lr_mask
,
5196 rl_inner
= decode_field_reference (loc
, rl_arg
,
5197 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
5198 &rl_unsignedp
, &volatilep
, &rl_mask
,
5200 rr_inner
= decode_field_reference (loc
, rr_arg
,
5201 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
5202 &rr_unsignedp
, &volatilep
, &rr_mask
,
5205 /* It must be true that the inner operation on the lhs of each
5206 comparison must be the same if we are to be able to do anything.
5207 Then see if we have constants. If not, the same must be true for
5209 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
5210 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
5213 if (TREE_CODE (lr_arg
) == INTEGER_CST
5214 && TREE_CODE (rr_arg
) == INTEGER_CST
)
5215 l_const
= lr_arg
, r_const
= rr_arg
;
5216 else if (lr_inner
== 0 || rr_inner
== 0
5217 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
5220 l_const
= r_const
= 0;
5222 /* If either comparison code is not correct for our logical operation,
5223 fail. However, we can convert a one-bit comparison against zero into
5224 the opposite comparison against that bit being set in the field. */
5226 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
5227 if (lcode
!= wanted_code
)
5229 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
5231 /* Make the left operand unsigned, since we are only interested
5232 in the value of one bit. Otherwise we are doing the wrong
5241 /* This is analogous to the code for l_const above. */
5242 if (rcode
!= wanted_code
)
5244 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
5253 /* See if we can find a mode that contains both fields being compared on
5254 the left. If we can't, fail. Otherwise, update all constants and masks
5255 to be relative to a field of that size. */
5256 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
5257 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
5258 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5259 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
5261 if (lnmode
== VOIDmode
)
5264 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
5265 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
5266 lntype
= lang_hooks
.types
.type_for_size (lnbitsize
, 1);
5267 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
5269 if (BYTES_BIG_ENDIAN
)
5271 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
5272 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
5275 ll_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, ll_mask
),
5276 size_int (xll_bitpos
));
5277 rl_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
, lntype
, rl_mask
),
5278 size_int (xrl_bitpos
));
5282 l_const
= fold_convert_loc (loc
, lntype
, l_const
);
5283 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
5284 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
));
5285 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
5286 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5289 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5291 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5296 r_const
= fold_convert_loc (loc
, lntype
, r_const
);
5297 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
5298 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
));
5299 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
5300 fold_build1_loc (loc
, BIT_NOT_EXPR
,
5303 warning (0, "comparison is always %d", wanted_code
== NE_EXPR
);
5305 return constant_boolean_node (wanted_code
== NE_EXPR
, truth_type
);
5309 /* If the right sides are not constant, do the same for it. Also,
5310 disallow this optimization if a size or signedness mismatch occurs
5311 between the left and right sides. */
5314 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
5315 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
5316 /* Make sure the two fields on the right
5317 correspond to the left without being swapped. */
5318 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
5321 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
5322 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
5323 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
, 0, 0,
5324 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
5326 if (rnmode
== VOIDmode
)
5329 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
5330 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
5331 rntype
= lang_hooks
.types
.type_for_size (rnbitsize
, 1);
5332 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
5334 if (BYTES_BIG_ENDIAN
)
5336 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
5337 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
5340 lr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5342 size_int (xlr_bitpos
));
5343 rr_mask
= const_binop (LSHIFT_EXPR
, fold_convert_loc (loc
,
5345 size_int (xrr_bitpos
));
5347 /* Make a mask that corresponds to both fields being compared.
5348 Do this for both items being compared. If the operands are the
5349 same size and the bits being compared are in the same position
5350 then we can do this by masking both and comparing the masked
5352 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5353 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
);
5354 if (lnbitsize
== rnbitsize
&& xll_bitpos
== xlr_bitpos
)
5356 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5357 ll_unsignedp
|| rl_unsignedp
);
5358 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5359 lhs
= build2 (BIT_AND_EXPR
, lntype
, lhs
, ll_mask
);
5361 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
, rnbitsize
, rnbitpos
,
5362 lr_unsignedp
|| rr_unsignedp
);
5363 if (! all_ones_mask_p (lr_mask
, rnbitsize
))
5364 rhs
= build2 (BIT_AND_EXPR
, rntype
, rhs
, lr_mask
);
5366 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5369 /* There is still another way we can do something: If both pairs of
5370 fields being compared are adjacent, we may be able to make a wider
5371 field containing them both.
5373 Note that we still must mask the lhs/rhs expressions. Furthermore,
5374 the mask must be shifted to account for the shift done by
5375 make_bit_field_ref. */
5376 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
5377 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
5378 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
5379 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
5383 lhs
= make_bit_field_ref (loc
, ll_inner
, lntype
,
5384 ll_bitsize
+ rl_bitsize
,
5385 MIN (ll_bitpos
, rl_bitpos
), ll_unsignedp
);
5386 rhs
= make_bit_field_ref (loc
, lr_inner
, rntype
,
5387 lr_bitsize
+ rr_bitsize
,
5388 MIN (lr_bitpos
, rr_bitpos
), lr_unsignedp
);
5390 ll_mask
= const_binop (RSHIFT_EXPR
, ll_mask
,
5391 size_int (MIN (xll_bitpos
, xrl_bitpos
)));
5392 lr_mask
= const_binop (RSHIFT_EXPR
, lr_mask
,
5393 size_int (MIN (xlr_bitpos
, xrr_bitpos
)));
5395 /* Convert to the smaller type before masking out unwanted bits. */
5397 if (lntype
!= rntype
)
5399 if (lnbitsize
> rnbitsize
)
5401 lhs
= fold_convert_loc (loc
, rntype
, lhs
);
5402 ll_mask
= fold_convert_loc (loc
, rntype
, ll_mask
);
5405 else if (lnbitsize
< rnbitsize
)
5407 rhs
= fold_convert_loc (loc
, lntype
, rhs
);
5408 lr_mask
= fold_convert_loc (loc
, lntype
, lr_mask
);
5413 if (! all_ones_mask_p (ll_mask
, ll_bitsize
+ rl_bitsize
))
5414 lhs
= build2 (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
5416 if (! all_ones_mask_p (lr_mask
, lr_bitsize
+ rr_bitsize
))
5417 rhs
= build2 (BIT_AND_EXPR
, type
, rhs
, lr_mask
);
5419 return build2_loc (loc
, wanted_code
, truth_type
, lhs
, rhs
);
5425 /* Handle the case of comparisons with constants. If there is something in
5426 common between the masks, those bits of the constants must be the same.
5427 If not, the condition is always false. Test for this to avoid generating
5428 incorrect code below. */
5429 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
);
5430 if (! integer_zerop (result
)
5431 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
),
5432 const_binop (BIT_AND_EXPR
, result
, r_const
)) != 1)
5434 if (wanted_code
== NE_EXPR
)
5436 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5437 return constant_boolean_node (true, truth_type
);
5441 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5442 return constant_boolean_node (false, truth_type
);
5446 /* Construct the expression we will return. First get the component
5447 reference we will make. Unless the mask is all ones the width of
5448 that field, perform the mask operation. Then compare with the
5450 result
= make_bit_field_ref (loc
, ll_inner
, lntype
, lnbitsize
, lnbitpos
,
5451 ll_unsignedp
|| rl_unsignedp
);
5453 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
);
5454 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
5455 result
= build2_loc (loc
, BIT_AND_EXPR
, lntype
, result
, ll_mask
);
5457 return build2_loc (loc
, wanted_code
, truth_type
, result
,
5458 const_binop (BIT_IOR_EXPR
, l_const
, r_const
));
5461 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5465 optimize_minmax_comparison (location_t loc
, enum tree_code code
, tree type
,
5469 enum tree_code op_code
;
5472 int consts_equal
, consts_lt
;
5475 STRIP_SIGN_NOPS (arg0
);
5477 op_code
= TREE_CODE (arg0
);
5478 minmax_const
= TREE_OPERAND (arg0
, 1);
5479 comp_const
= fold_convert_loc (loc
, TREE_TYPE (arg0
), op1
);
5480 consts_equal
= tree_int_cst_equal (minmax_const
, comp_const
);
5481 consts_lt
= tree_int_cst_lt (minmax_const
, comp_const
);
5482 inner
= TREE_OPERAND (arg0
, 0);
5484 /* If something does not permit us to optimize, return the original tree. */
5485 if ((op_code
!= MIN_EXPR
&& op_code
!= MAX_EXPR
)
5486 || TREE_CODE (comp_const
) != INTEGER_CST
5487 || TREE_OVERFLOW (comp_const
)
5488 || TREE_CODE (minmax_const
) != INTEGER_CST
5489 || TREE_OVERFLOW (minmax_const
))
5492 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5493 and GT_EXPR, doing the rest with recursive calls using logical
5497 case NE_EXPR
: case LT_EXPR
: case LE_EXPR
:
5500 = optimize_minmax_comparison (loc
,
5501 invert_tree_comparison (code
, false),
5504 return invert_truthvalue_loc (loc
, tem
);
5510 fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
5511 optimize_minmax_comparison
5512 (loc
, EQ_EXPR
, type
, arg0
, comp_const
),
5513 optimize_minmax_comparison
5514 (loc
, GT_EXPR
, type
, arg0
, comp_const
));
5517 if (op_code
== MAX_EXPR
&& consts_equal
)
5518 /* MAX (X, 0) == 0 -> X <= 0 */
5519 return fold_build2_loc (loc
, LE_EXPR
, type
, inner
, comp_const
);
5521 else if (op_code
== MAX_EXPR
&& consts_lt
)
5522 /* MAX (X, 0) == 5 -> X == 5 */
5523 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5525 else if (op_code
== MAX_EXPR
)
5526 /* MAX (X, 0) == -1 -> false */
5527 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5529 else if (consts_equal
)
5530 /* MIN (X, 0) == 0 -> X >= 0 */
5531 return fold_build2_loc (loc
, GE_EXPR
, type
, inner
, comp_const
);
5534 /* MIN (X, 0) == 5 -> false */
5535 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5538 /* MIN (X, 0) == -1 -> X == -1 */
5539 return fold_build2_loc (loc
, EQ_EXPR
, type
, inner
, comp_const
);
5542 if (op_code
== MAX_EXPR
&& (consts_equal
|| consts_lt
))
5543 /* MAX (X, 0) > 0 -> X > 0
5544 MAX (X, 0) > 5 -> X > 5 */
5545 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5547 else if (op_code
== MAX_EXPR
)
5548 /* MAX (X, 0) > -1 -> true */
5549 return omit_one_operand_loc (loc
, type
, integer_one_node
, inner
);
5551 else if (op_code
== MIN_EXPR
&& (consts_equal
|| consts_lt
))
5552 /* MIN (X, 0) > 0 -> false
5553 MIN (X, 0) > 5 -> false */
5554 return omit_one_operand_loc (loc
, type
, integer_zero_node
, inner
);
5557 /* MIN (X, 0) > -1 -> X > -1 */
5558 return fold_build2_loc (loc
, GT_EXPR
, type
, inner
, comp_const
);
5565 /* T is an integer expression that is being multiplied, divided, or taken a
5566 modulus (CODE says which and what kind of divide or modulus) by a
5567 constant C. See if we can eliminate that operation by folding it with
5568 other operations already in T. WIDE_TYPE, if non-null, is a type that
5569 should be used for the computation if wider than our type.
5571 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5572 (X * 2) + (Y * 4). We must, however, be assured that either the original
5573 expression would not overflow or that overflow is undefined for the type
5574 in the language in question.
5576 If we return a non-null expression, it is an equivalent form of the
5577 original computation, but need not be in the original type.
5579 We set *STRICT_OVERFLOW_P to true if the return values depends on
5580 signed overflow being undefined. Otherwise we do not change
5581 *STRICT_OVERFLOW_P. */
5584 extract_muldiv (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5585 bool *strict_overflow_p
)
5587 /* To avoid exponential search depth, refuse to allow recursion past
5588 three levels. Beyond that (1) it's highly unlikely that we'll find
5589 something interesting and (2) we've probably processed it before
5590 when we built the inner expression. */
5599 ret
= extract_muldiv_1 (t
, c
, code
, wide_type
, strict_overflow_p
);
5606 extract_muldiv_1 (tree t
, tree c
, enum tree_code code
, tree wide_type
,
5607 bool *strict_overflow_p
)
5609 tree type
= TREE_TYPE (t
);
5610 enum tree_code tcode
= TREE_CODE (t
);
5611 tree ctype
= (wide_type
!= 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type
))
5612 > GET_MODE_SIZE (TYPE_MODE (type
)))
5613 ? wide_type
: type
);
5615 int same_p
= tcode
== code
;
5616 tree op0
= NULL_TREE
, op1
= NULL_TREE
;
5617 bool sub_strict_overflow_p
;
5619 /* Don't deal with constants of zero here; they confuse the code below. */
5620 if (integer_zerop (c
))
5623 if (TREE_CODE_CLASS (tcode
) == tcc_unary
)
5624 op0
= TREE_OPERAND (t
, 0);
5626 if (TREE_CODE_CLASS (tcode
) == tcc_binary
)
5627 op0
= TREE_OPERAND (t
, 0), op1
= TREE_OPERAND (t
, 1);
5629 /* Note that we need not handle conditional operations here since fold
5630 already handles those cases. So just do arithmetic here. */
5634 /* For a constant, we can always simplify if we are a multiply
5635 or (for divide and modulus) if it is a multiple of our constant. */
5636 if (code
== MULT_EXPR
5637 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, t
, c
)))
5638 return const_binop (code
, fold_convert (ctype
, t
),
5639 fold_convert (ctype
, c
));
5642 CASE_CONVERT
: case NON_LVALUE_EXPR
:
5643 /* If op0 is an expression ... */
5644 if ((COMPARISON_CLASS_P (op0
)
5645 || UNARY_CLASS_P (op0
)
5646 || BINARY_CLASS_P (op0
)
5647 || VL_EXP_CLASS_P (op0
)
5648 || EXPRESSION_CLASS_P (op0
))
5649 /* ... and has wrapping overflow, and its type is smaller
5650 than ctype, then we cannot pass through as widening. */
5651 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0
))
5652 && ! (TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
5653 && TYPE_IS_SIZETYPE (TREE_TYPE (op0
)))
5654 && (TYPE_PRECISION (ctype
)
5655 > TYPE_PRECISION (TREE_TYPE (op0
))))
5656 /* ... or this is a truncation (t is narrower than op0),
5657 then we cannot pass through this narrowing. */
5658 || (TYPE_PRECISION (type
)
5659 < TYPE_PRECISION (TREE_TYPE (op0
)))
5660 /* ... or signedness changes for division or modulus,
5661 then we cannot pass through this conversion. */
5662 || (code
!= MULT_EXPR
5663 && (TYPE_UNSIGNED (ctype
)
5664 != TYPE_UNSIGNED (TREE_TYPE (op0
))))
5665 /* ... or has undefined overflow while the converted to
5666 type has not, we cannot do the operation in the inner type
5667 as that would introduce undefined overflow. */
5668 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0
))
5669 && !TYPE_OVERFLOW_UNDEFINED (type
))))
5672 /* Pass the constant down and see if we can make a simplification. If
5673 we can, replace this expression with the inner simplification for
5674 possible later conversion to our or some other type. */
5675 if ((t2
= fold_convert (TREE_TYPE (op0
), c
)) != 0
5676 && TREE_CODE (t2
) == INTEGER_CST
5677 && !TREE_OVERFLOW (t2
)
5678 && (0 != (t1
= extract_muldiv (op0
, t2
, code
,
5680 ? ctype
: NULL_TREE
,
5681 strict_overflow_p
))))
5686 /* If widening the type changes it from signed to unsigned, then we
5687 must avoid building ABS_EXPR itself as unsigned. */
5688 if (TYPE_UNSIGNED (ctype
) && !TYPE_UNSIGNED (type
))
5690 tree cstype
= (*signed_type_for
) (ctype
);
5691 if ((t1
= extract_muldiv (op0
, c
, code
, cstype
, strict_overflow_p
))
5694 t1
= fold_build1 (tcode
, cstype
, fold_convert (cstype
, t1
));
5695 return fold_convert (ctype
, t1
);
5699 /* If the constant is negative, we cannot simplify this. */
5700 if (tree_int_cst_sgn (c
) == -1)
5704 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
, strict_overflow_p
))
5706 return fold_build1 (tcode
, ctype
, fold_convert (ctype
, t1
));
5709 case MIN_EXPR
: case MAX_EXPR
:
5710 /* If widening the type changes the signedness, then we can't perform
5711 this optimization as that changes the result. */
5712 if (TYPE_UNSIGNED (ctype
) != TYPE_UNSIGNED (type
))
5715 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5716 sub_strict_overflow_p
= false;
5717 if ((t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5718 &sub_strict_overflow_p
)) != 0
5719 && (t2
= extract_muldiv (op1
, c
, code
, wide_type
,
5720 &sub_strict_overflow_p
)) != 0)
5722 if (tree_int_cst_sgn (c
) < 0)
5723 tcode
= (tcode
== MIN_EXPR
? MAX_EXPR
: MIN_EXPR
);
5724 if (sub_strict_overflow_p
)
5725 *strict_overflow_p
= true;
5726 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5727 fold_convert (ctype
, t2
));
5731 case LSHIFT_EXPR
: case RSHIFT_EXPR
:
5732 /* If the second operand is constant, this is a multiplication
5733 or floor division, by a power of two, so we can treat it that
5734 way unless the multiplier or divisor overflows. Signed
5735 left-shift overflow is implementation-defined rather than
5736 undefined in C90, so do not convert signed left shift into
5738 if (TREE_CODE (op1
) == INTEGER_CST
5739 && (tcode
== RSHIFT_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (op0
)))
5740 /* const_binop may not detect overflow correctly,
5741 so check for it explicitly here. */
5742 && TYPE_PRECISION (TREE_TYPE (size_one_node
)) > TREE_INT_CST_LOW (op1
)
5743 && TREE_INT_CST_HIGH (op1
) == 0
5744 && 0 != (t1
= fold_convert (ctype
,
5745 const_binop (LSHIFT_EXPR
,
5748 && !TREE_OVERFLOW (t1
))
5749 return extract_muldiv (build2 (tcode
== LSHIFT_EXPR
5750 ? MULT_EXPR
: FLOOR_DIV_EXPR
,
5752 fold_convert (ctype
, op0
),
5754 c
, code
, wide_type
, strict_overflow_p
);
5757 case PLUS_EXPR
: case MINUS_EXPR
:
5758 /* See if we can eliminate the operation on both sides. If we can, we
5759 can return a new PLUS or MINUS. If we can't, the only remaining
5760 cases where we can do anything are if the second operand is a
5762 sub_strict_overflow_p
= false;
5763 t1
= extract_muldiv (op0
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5764 t2
= extract_muldiv (op1
, c
, code
, wide_type
, &sub_strict_overflow_p
);
5765 if (t1
!= 0 && t2
!= 0
5766 && (code
== MULT_EXPR
5767 /* If not multiplication, we can only do this if both operands
5768 are divisible by c. */
5769 || (multiple_of_p (ctype
, op0
, c
)
5770 && multiple_of_p (ctype
, op1
, c
))))
5772 if (sub_strict_overflow_p
)
5773 *strict_overflow_p
= true;
5774 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5775 fold_convert (ctype
, t2
));
5778 /* If this was a subtraction, negate OP1 and set it to be an addition.
5779 This simplifies the logic below. */
5780 if (tcode
== MINUS_EXPR
)
5782 tcode
= PLUS_EXPR
, op1
= negate_expr (op1
);
5783 /* If OP1 was not easily negatable, the constant may be OP0. */
5784 if (TREE_CODE (op0
) == INTEGER_CST
)
5795 if (TREE_CODE (op1
) != INTEGER_CST
)
5798 /* If either OP1 or C are negative, this optimization is not safe for
5799 some of the division and remainder types while for others we need
5800 to change the code. */
5801 if (tree_int_cst_sgn (op1
) < 0 || tree_int_cst_sgn (c
) < 0)
5803 if (code
== CEIL_DIV_EXPR
)
5804 code
= FLOOR_DIV_EXPR
;
5805 else if (code
== FLOOR_DIV_EXPR
)
5806 code
= CEIL_DIV_EXPR
;
5807 else if (code
!= MULT_EXPR
5808 && code
!= CEIL_MOD_EXPR
&& code
!= FLOOR_MOD_EXPR
)
5812 /* If it's a multiply or a division/modulus operation of a multiple
5813 of our constant, do the operation and verify it doesn't overflow. */
5814 if (code
== MULT_EXPR
5815 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5817 op1
= const_binop (code
, fold_convert (ctype
, op1
),
5818 fold_convert (ctype
, c
));
5819 /* We allow the constant to overflow with wrapping semantics. */
5821 || (TREE_OVERFLOW (op1
) && !TYPE_OVERFLOW_WRAPS (ctype
)))
5827 /* If we have an unsigned type is not a sizetype, we cannot widen
5828 the operation since it will change the result if the original
5829 computation overflowed. */
5830 if (TYPE_UNSIGNED (ctype
)
5831 && ! (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
))
5835 /* If we were able to eliminate our operation from the first side,
5836 apply our operation to the second side and reform the PLUS. */
5837 if (t1
!= 0 && (TREE_CODE (t1
) != code
|| code
== MULT_EXPR
))
5838 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
), op1
);
5840 /* The last case is if we are a multiply. In that case, we can
5841 apply the distributive law to commute the multiply and addition
5842 if the multiplication of the constants doesn't overflow. */
5843 if (code
== MULT_EXPR
)
5844 return fold_build2 (tcode
, ctype
,
5845 fold_build2 (code
, ctype
,
5846 fold_convert (ctype
, op0
),
5847 fold_convert (ctype
, c
)),
5853 /* We have a special case here if we are doing something like
5854 (C * 8) % 4 since we know that's zero. */
5855 if ((code
== TRUNC_MOD_EXPR
|| code
== CEIL_MOD_EXPR
5856 || code
== FLOOR_MOD_EXPR
|| code
== ROUND_MOD_EXPR
)
5857 /* If the multiplication can overflow we cannot optimize this.
5858 ??? Until we can properly mark individual operations as
5859 not overflowing we need to treat sizetype special here as
5860 stor-layout relies on this opimization to make
5861 DECL_FIELD_BIT_OFFSET always a constant. */
5862 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t
))
5863 || (TREE_CODE (TREE_TYPE (t
)) == INTEGER_TYPE
5864 && TYPE_IS_SIZETYPE (TREE_TYPE (t
))))
5865 && TREE_CODE (TREE_OPERAND (t
, 1)) == INTEGER_CST
5866 && integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5868 *strict_overflow_p
= true;
5869 return omit_one_operand (type
, integer_zero_node
, op0
);
5872 /* ... fall through ... */
5874 case TRUNC_DIV_EXPR
: case CEIL_DIV_EXPR
: case FLOOR_DIV_EXPR
:
5875 case ROUND_DIV_EXPR
: case EXACT_DIV_EXPR
:
5876 /* If we can extract our operation from the LHS, do so and return a
5877 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5878 do something only if the second operand is a constant. */
5880 && (t1
= extract_muldiv (op0
, c
, code
, wide_type
,
5881 strict_overflow_p
)) != 0)
5882 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, t1
),
5883 fold_convert (ctype
, op1
));
5884 else if (tcode
== MULT_EXPR
&& code
== MULT_EXPR
5885 && (t1
= extract_muldiv (op1
, c
, code
, wide_type
,
5886 strict_overflow_p
)) != 0)
5887 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5888 fold_convert (ctype
, t1
));
5889 else if (TREE_CODE (op1
) != INTEGER_CST
)
5892 /* If these are the same operation types, we can associate them
5893 assuming no overflow. */
5898 mul
= double_int_mul_with_sign
5900 (tree_to_double_int (op1
),
5901 TYPE_PRECISION (ctype
), TYPE_UNSIGNED (ctype
)),
5903 (tree_to_double_int (c
),
5904 TYPE_PRECISION (ctype
), TYPE_UNSIGNED (ctype
)),
5905 false, &overflow_p
);
5906 overflow_p
= (((!TYPE_UNSIGNED (ctype
)
5907 || (TREE_CODE (ctype
) == INTEGER_TYPE
5908 && TYPE_IS_SIZETYPE (ctype
)))
5910 | TREE_OVERFLOW (c
) | TREE_OVERFLOW (op1
));
5911 if (!double_int_fits_to_tree_p (ctype
, mul
)
5912 && ((TYPE_UNSIGNED (ctype
) && tcode
!= MULT_EXPR
)
5913 || !TYPE_UNSIGNED (ctype
)
5914 || (TREE_CODE (ctype
) == INTEGER_TYPE
5915 && TYPE_IS_SIZETYPE (ctype
))))
5918 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5919 double_int_to_tree (ctype
, mul
));
5922 /* If these operations "cancel" each other, we have the main
5923 optimizations of this pass, which occur when either constant is a
5924 multiple of the other, in which case we replace this with either an
5925 operation or CODE or TCODE.
5927 If we have an unsigned type that is not a sizetype, we cannot do
5928 this since it will change the result if the original computation
5930 if ((TYPE_OVERFLOW_UNDEFINED (ctype
)
5931 || (TREE_CODE (ctype
) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (ctype
)))
5932 && ((code
== MULT_EXPR
&& tcode
== EXACT_DIV_EXPR
)
5933 || (tcode
== MULT_EXPR
5934 && code
!= TRUNC_MOD_EXPR
&& code
!= CEIL_MOD_EXPR
5935 && code
!= FLOOR_MOD_EXPR
&& code
!= ROUND_MOD_EXPR
5936 && code
!= MULT_EXPR
)))
5938 if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, op1
, c
)))
5940 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5941 *strict_overflow_p
= true;
5942 return fold_build2 (tcode
, ctype
, fold_convert (ctype
, op0
),
5943 fold_convert (ctype
,
5944 const_binop (TRUNC_DIV_EXPR
,
5947 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR
, c
, op1
)))
5949 if (TYPE_OVERFLOW_UNDEFINED (ctype
))
5950 *strict_overflow_p
= true;
5951 return fold_build2 (code
, ctype
, fold_convert (ctype
, op0
),
5952 fold_convert (ctype
,
5953 const_binop (TRUNC_DIV_EXPR
,
5966 /* Return a node which has the indicated constant VALUE (either 0 or
5967 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5968 and is of the indicated TYPE. */
5971 constant_boolean_node (bool value
, tree type
)
5973 if (type
== integer_type_node
)
5974 return value
? integer_one_node
: integer_zero_node
;
5975 else if (type
== boolean_type_node
)
5976 return value
? boolean_true_node
: boolean_false_node
;
5977 else if (TREE_CODE (type
) == VECTOR_TYPE
)
5978 return build_vector_from_val (type
,
5979 build_int_cst (TREE_TYPE (type
),
5982 return fold_convert (type
, value
? integer_one_node
: integer_zero_node
);
5986 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5987 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5988 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5989 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5990 COND is the first argument to CODE; otherwise (as in the example
5991 given here), it is the second argument. TYPE is the type of the
5992 original expression. Return NULL_TREE if no simplification is
5996 fold_binary_op_with_conditional_arg (location_t loc
,
5997 enum tree_code code
,
5998 tree type
, tree op0
, tree op1
,
5999 tree cond
, tree arg
, int cond_first_p
)
6001 tree cond_type
= cond_first_p
? TREE_TYPE (op0
) : TREE_TYPE (op1
);
6002 tree arg_type
= cond_first_p
? TREE_TYPE (op1
) : TREE_TYPE (op0
);
6003 tree test
, true_value
, false_value
;
6004 tree lhs
= NULL_TREE
;
6005 tree rhs
= NULL_TREE
;
6007 if (TREE_CODE (cond
) == COND_EXPR
)
6009 test
= TREE_OPERAND (cond
, 0);
6010 true_value
= TREE_OPERAND (cond
, 1);
6011 false_value
= TREE_OPERAND (cond
, 2);
6012 /* If this operand throws an expression, then it does not make
6013 sense to try to perform a logical or arithmetic operation
6015 if (VOID_TYPE_P (TREE_TYPE (true_value
)))
6017 if (VOID_TYPE_P (TREE_TYPE (false_value
)))
6022 tree testtype
= TREE_TYPE (cond
);
6024 true_value
= constant_boolean_node (true, testtype
);
6025 false_value
= constant_boolean_node (false, testtype
);
6028 /* This transformation is only worthwhile if we don't have to wrap ARG
6029 in a SAVE_EXPR and the operation can be simplified on at least one
6030 of the branches once its pushed inside the COND_EXPR. */
6031 if (!TREE_CONSTANT (arg
)
6032 && (TREE_SIDE_EFFECTS (arg
)
6033 || TREE_CONSTANT (true_value
) || TREE_CONSTANT (false_value
)))
6036 arg
= fold_convert_loc (loc
, arg_type
, arg
);
6039 true_value
= fold_convert_loc (loc
, cond_type
, true_value
);
6041 lhs
= fold_build2_loc (loc
, code
, type
, true_value
, arg
);
6043 lhs
= fold_build2_loc (loc
, code
, type
, arg
, true_value
);
6047 false_value
= fold_convert_loc (loc
, cond_type
, false_value
);
6049 rhs
= fold_build2_loc (loc
, code
, type
, false_value
, arg
);
6051 rhs
= fold_build2_loc (loc
, code
, type
, arg
, false_value
);
6054 /* Check that we have simplified at least one of the branches. */
6055 if (!TREE_CONSTANT (arg
) && !TREE_CONSTANT (lhs
) && !TREE_CONSTANT (rhs
))
6058 return fold_build3_loc (loc
, COND_EXPR
, type
, test
, lhs
, rhs
);
6062 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6064 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6065 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6066 ADDEND is the same as X.
6068 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6069 and finite. The problematic cases are when X is zero, and its mode
6070 has signed zeros. In the case of rounding towards -infinity,
6071 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6072 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6075 fold_real_zero_addition_p (const_tree type
, const_tree addend
, int negate
)
6077 if (!real_zerop (addend
))
6080 /* Don't allow the fold with -fsignaling-nans. */
6081 if (HONOR_SNANS (TYPE_MODE (type
)))
6084 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6085 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
6088 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6089 if (TREE_CODE (addend
) == REAL_CST
6090 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend
)))
6093 /* The mode has signed zeros, and we have to honor their sign.
6094 In this situation, there is only one case we can return true for.
6095 X - 0 is the same as X unless rounding towards -infinity is
6097 return negate
&& !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type
));
6100 /* Subroutine of fold() that checks comparisons of built-in math
6101 functions against real constants.
6103 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6104 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6105 is the type of the result and ARG0 and ARG1 are the operands of the
6106 comparison. ARG1 must be a TREE_REAL_CST.
6108 The function returns the constant folded tree if a simplification
6109 can be made, and NULL_TREE otherwise. */
6112 fold_mathfn_compare (location_t loc
,
6113 enum built_in_function fcode
, enum tree_code code
,
6114 tree type
, tree arg0
, tree arg1
)
6118 if (BUILTIN_SQRT_P (fcode
))
6120 tree arg
= CALL_EXPR_ARG (arg0
, 0);
6121 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg0
));
6123 c
= TREE_REAL_CST (arg1
);
6124 if (REAL_VALUE_NEGATIVE (c
))
6126 /* sqrt(x) < y is always false, if y is negative. */
6127 if (code
== EQ_EXPR
|| code
== LT_EXPR
|| code
== LE_EXPR
)
6128 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6130 /* sqrt(x) > y is always true, if y is negative and we
6131 don't care about NaNs, i.e. negative values of x. */
6132 if (code
== NE_EXPR
|| !HONOR_NANS (mode
))
6133 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6135 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6136 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6137 build_real (TREE_TYPE (arg
), dconst0
));
6139 else if (code
== GT_EXPR
|| code
== GE_EXPR
)
6143 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6144 real_convert (&c2
, mode
, &c2
);
6146 if (REAL_VALUE_ISINF (c2
))
6148 /* sqrt(x) > y is x == +Inf, when y is very large. */
6149 if (HONOR_INFINITIES (mode
))
6150 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg
,
6151 build_real (TREE_TYPE (arg
), c2
));
6153 /* sqrt(x) > y is always false, when y is very large
6154 and we don't care about infinities. */
6155 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
6158 /* sqrt(x) > c is the same as x > c*c. */
6159 return fold_build2_loc (loc
, code
, type
, arg
,
6160 build_real (TREE_TYPE (arg
), c2
));
6162 else if (code
== LT_EXPR
|| code
== LE_EXPR
)
6166 REAL_ARITHMETIC (c2
, MULT_EXPR
, c
, c
);
6167 real_convert (&c2
, mode
, &c2
);
6169 if (REAL_VALUE_ISINF (c2
))
6171 /* sqrt(x) < y is always true, when y is a very large
6172 value and we don't care about NaNs or Infinities. */
6173 if (! HONOR_NANS (mode
) && ! HONOR_INFINITIES (mode
))
6174 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
6176 /* sqrt(x) < y is x != +Inf when y is very large and we
6177 don't care about NaNs. */
6178 if (! HONOR_NANS (mode
))
6179 return fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6180 build_real (TREE_TYPE (arg
), c2
));
6182 /* sqrt(x) < y is x >= 0 when y is very large and we
6183 don't care about Infinities. */
6184 if (! HONOR_INFINITIES (mode
))
6185 return fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6186 build_real (TREE_TYPE (arg
), dconst0
));
6188 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6189 arg
= save_expr (arg
);
6190 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6191 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6192 build_real (TREE_TYPE (arg
),
6194 fold_build2_loc (loc
, NE_EXPR
, type
, arg
,
6195 build_real (TREE_TYPE (arg
),
6199 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6200 if (! HONOR_NANS (mode
))
6201 return fold_build2_loc (loc
, code
, type
, arg
,
6202 build_real (TREE_TYPE (arg
), c2
));
6204 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6205 arg
= save_expr (arg
);
6206 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
6207 fold_build2_loc (loc
, GE_EXPR
, type
, arg
,
6208 build_real (TREE_TYPE (arg
),
6210 fold_build2_loc (loc
, code
, type
, arg
,
6211 build_real (TREE_TYPE (arg
),
6219 /* Subroutine of fold() that optimizes comparisons against Infinities,
6220 either +Inf or -Inf.
6222 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6223 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6224 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6226 The function returns the constant folded tree if a simplification
6227 can be made, and NULL_TREE otherwise. */
6230 fold_inf_compare (location_t loc
, enum tree_code code
, tree type
,
6231 tree arg0
, tree arg1
)
6233 enum machine_mode mode
;
6234 REAL_VALUE_TYPE max
;
6238 mode
= TYPE_MODE (TREE_TYPE (arg0
));
6240 /* For negative infinity swap the sense of the comparison. */
6241 neg
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
));
6243 code
= swap_tree_comparison (code
);
6248 /* x > +Inf is always false, if with ignore sNANs. */
6249 if (HONOR_SNANS (mode
))
6251 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6254 /* x <= +Inf is always true, if we don't case about NaNs. */
6255 if (! HONOR_NANS (mode
))
6256 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6258 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6259 arg0
= save_expr (arg0
);
6260 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg0
);
6264 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6265 real_maxval (&max
, neg
, mode
);
6266 return fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6267 arg0
, build_real (TREE_TYPE (arg0
), max
));
6270 /* x < +Inf is always equal to x <= DBL_MAX. */
6271 real_maxval (&max
, neg
, mode
);
6272 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6273 arg0
, build_real (TREE_TYPE (arg0
), max
));
6276 /* x != +Inf is always equal to !(x > DBL_MAX). */
6277 real_maxval (&max
, neg
, mode
);
6278 if (! HONOR_NANS (mode
))
6279 return fold_build2_loc (loc
, neg
? GE_EXPR
: LE_EXPR
, type
,
6280 arg0
, build_real (TREE_TYPE (arg0
), max
));
6282 temp
= fold_build2_loc (loc
, neg
? LT_EXPR
: GT_EXPR
, type
,
6283 arg0
, build_real (TREE_TYPE (arg0
), max
));
6284 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, temp
);
6293 /* Subroutine of fold() that optimizes comparisons of a division by
6294 a nonzero integer constant against an integer constant, i.e.
6297 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6298 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6299 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6301 The function returns the constant folded tree if a simplification
6302 can be made, and NULL_TREE otherwise. */
6305 fold_div_compare (location_t loc
,
6306 enum tree_code code
, tree type
, tree arg0
, tree arg1
)
6308 tree prod
, tmp
, hi
, lo
;
6309 tree arg00
= TREE_OPERAND (arg0
, 0);
6310 tree arg01
= TREE_OPERAND (arg0
, 1);
6312 bool unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (arg0
));
6316 /* We have to do this the hard way to detect unsigned overflow.
6317 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6318 overflow
= mul_double_with_sign (TREE_INT_CST_LOW (arg01
),
6319 TREE_INT_CST_HIGH (arg01
),
6320 TREE_INT_CST_LOW (arg1
),
6321 TREE_INT_CST_HIGH (arg1
),
6322 &val
.low
, &val
.high
, unsigned_p
);
6323 prod
= force_fit_type_double (TREE_TYPE (arg00
), val
, -1, overflow
);
6324 neg_overflow
= false;
6328 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6329 build_int_cst (TREE_TYPE (arg01
), 1));
6332 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6333 overflow
= add_double_with_sign (TREE_INT_CST_LOW (prod
),
6334 TREE_INT_CST_HIGH (prod
),
6335 TREE_INT_CST_LOW (tmp
),
6336 TREE_INT_CST_HIGH (tmp
),
6337 &val
.low
, &val
.high
, unsigned_p
);
6338 hi
= force_fit_type_double (TREE_TYPE (arg00
), val
,
6339 -1, overflow
| TREE_OVERFLOW (prod
));
6341 else if (tree_int_cst_sgn (arg01
) >= 0)
6343 tmp
= int_const_binop (MINUS_EXPR
, arg01
,
6344 build_int_cst (TREE_TYPE (arg01
), 1));
6345 switch (tree_int_cst_sgn (arg1
))
6348 neg_overflow
= true;
6349 lo
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6354 lo
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6359 hi
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6369 /* A negative divisor reverses the relational operators. */
6370 code
= swap_tree_comparison (code
);
6372 tmp
= int_const_binop (PLUS_EXPR
, arg01
,
6373 build_int_cst (TREE_TYPE (arg01
), 1));
6374 switch (tree_int_cst_sgn (arg1
))
6377 hi
= int_const_binop (MINUS_EXPR
, prod
, tmp
);
6382 hi
= fold_negate_const (tmp
, TREE_TYPE (arg0
));
6387 neg_overflow
= true;
6388 lo
= int_const_binop (PLUS_EXPR
, prod
, tmp
);
6400 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6401 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg00
);
6402 if (TREE_OVERFLOW (hi
))
6403 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6404 if (TREE_OVERFLOW (lo
))
6405 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6406 return build_range_check (loc
, type
, arg00
, 1, lo
, hi
);
6409 if (TREE_OVERFLOW (lo
) && TREE_OVERFLOW (hi
))
6410 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg00
);
6411 if (TREE_OVERFLOW (hi
))
6412 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6413 if (TREE_OVERFLOW (lo
))
6414 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6415 return build_range_check (loc
, type
, arg00
, 0, lo
, hi
);
6418 if (TREE_OVERFLOW (lo
))
6420 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6421 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6423 return fold_build2_loc (loc
, LT_EXPR
, type
, arg00
, lo
);
6426 if (TREE_OVERFLOW (hi
))
6428 tmp
= neg_overflow
? integer_zero_node
: integer_one_node
;
6429 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6431 return fold_build2_loc (loc
, LE_EXPR
, type
, arg00
, hi
);
6434 if (TREE_OVERFLOW (hi
))
6436 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6437 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6439 return fold_build2_loc (loc
, GT_EXPR
, type
, arg00
, hi
);
6442 if (TREE_OVERFLOW (lo
))
6444 tmp
= neg_overflow
? integer_one_node
: integer_zero_node
;
6445 return omit_one_operand_loc (loc
, type
, tmp
, arg00
);
6447 return fold_build2_loc (loc
, GE_EXPR
, type
, arg00
, lo
);
6457 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6458 equality/inequality test, then return a simplified form of the test
6459 using a sign testing. Otherwise return NULL. TYPE is the desired
6463 fold_single_bit_test_into_sign_test (location_t loc
,
6464 enum tree_code code
, tree arg0
, tree arg1
,
6467 /* If this is testing a single bit, we can optimize the test. */
6468 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6469 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6470 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6472 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6473 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6474 tree arg00
= sign_bit_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
6476 if (arg00
!= NULL_TREE
6477 /* This is only a win if casting to a signed type is cheap,
6478 i.e. when arg00's type is not a partial mode. */
6479 && TYPE_PRECISION (TREE_TYPE (arg00
))
6480 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00
))))
6482 tree stype
= signed_type_for (TREE_TYPE (arg00
));
6483 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
6485 fold_convert_loc (loc
, stype
, arg00
),
6486 build_int_cst (stype
, 0));
6493 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6494 equality/inequality test, then return a simplified form of
6495 the test using shifts and logical operations. Otherwise return
6496 NULL. TYPE is the desired result type. */
6499 fold_single_bit_test (location_t loc
, enum tree_code code
,
6500 tree arg0
, tree arg1
, tree result_type
)
6502 /* If this is testing a single bit, we can optimize the test. */
6503 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
6504 && TREE_CODE (arg0
) == BIT_AND_EXPR
&& integer_zerop (arg1
)
6505 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
6507 tree inner
= TREE_OPERAND (arg0
, 0);
6508 tree type
= TREE_TYPE (arg0
);
6509 int bitnum
= tree_log2 (TREE_OPERAND (arg0
, 1));
6510 enum machine_mode operand_mode
= TYPE_MODE (type
);
6512 tree signed_type
, unsigned_type
, intermediate_type
;
6515 /* First, see if we can fold the single bit test into a sign-bit
6517 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
,
6522 /* Otherwise we have (A & C) != 0 where C is a single bit,
6523 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6524 Similarly for (A & C) == 0. */
6526 /* If INNER is a right shift of a constant and it plus BITNUM does
6527 not overflow, adjust BITNUM and INNER. */
6528 if (TREE_CODE (inner
) == RSHIFT_EXPR
6529 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
6530 && TREE_INT_CST_HIGH (TREE_OPERAND (inner
, 1)) == 0
6531 && bitnum
< TYPE_PRECISION (type
)
6532 && 0 > compare_tree_int (TREE_OPERAND (inner
, 1),
6533 bitnum
- TYPE_PRECISION (type
)))
6535 bitnum
+= TREE_INT_CST_LOW (TREE_OPERAND (inner
, 1));
6536 inner
= TREE_OPERAND (inner
, 0);
6539 /* If we are going to be able to omit the AND below, we must do our
6540 operations as unsigned. If we must use the AND, we have a choice.
6541 Normally unsigned is faster, but for some machines signed is. */
6542 #ifdef LOAD_EXTEND_OP
6543 ops_unsigned
= (LOAD_EXTEND_OP (operand_mode
) == SIGN_EXTEND
6544 && !flag_syntax_only
) ? 0 : 1;
6549 signed_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 0);
6550 unsigned_type
= lang_hooks
.types
.type_for_mode (operand_mode
, 1);
6551 intermediate_type
= ops_unsigned
? unsigned_type
: signed_type
;
6552 inner
= fold_convert_loc (loc
, intermediate_type
, inner
);
6555 inner
= build2 (RSHIFT_EXPR
, intermediate_type
,
6556 inner
, size_int (bitnum
));
6558 one
= build_int_cst (intermediate_type
, 1);
6560 if (code
== EQ_EXPR
)
6561 inner
= fold_build2_loc (loc
, BIT_XOR_EXPR
, intermediate_type
, inner
, one
);
6563 /* Put the AND last so it can combine with more things. */
6564 inner
= build2 (BIT_AND_EXPR
, intermediate_type
, inner
, one
);
6566 /* Make sure to return the proper type. */
6567 inner
= fold_convert_loc (loc
, result_type
, inner
);
6574 /* Check whether we are allowed to reorder operands arg0 and arg1,
6575 such that the evaluation of arg1 occurs before arg0. */
6578 reorder_operands_p (const_tree arg0
, const_tree arg1
)
6580 if (! flag_evaluation_order
)
6582 if (TREE_CONSTANT (arg0
) || TREE_CONSTANT (arg1
))
6584 return ! TREE_SIDE_EFFECTS (arg0
)
6585 && ! TREE_SIDE_EFFECTS (arg1
);
6588 /* Test whether it is preferable two swap two operands, ARG0 and
6589 ARG1, for example because ARG0 is an integer constant and ARG1
6590 isn't. If REORDER is true, only recommend swapping if we can
6591 evaluate the operands in reverse order. */
6594 tree_swap_operands_p (const_tree arg0
, const_tree arg1
, bool reorder
)
6596 STRIP_SIGN_NOPS (arg0
);
6597 STRIP_SIGN_NOPS (arg1
);
6599 if (TREE_CODE (arg1
) == INTEGER_CST
)
6601 if (TREE_CODE (arg0
) == INTEGER_CST
)
6604 if (TREE_CODE (arg1
) == REAL_CST
)
6606 if (TREE_CODE (arg0
) == REAL_CST
)
6609 if (TREE_CODE (arg1
) == FIXED_CST
)
6611 if (TREE_CODE (arg0
) == FIXED_CST
)
6614 if (TREE_CODE (arg1
) == COMPLEX_CST
)
6616 if (TREE_CODE (arg0
) == COMPLEX_CST
)
6619 if (TREE_CONSTANT (arg1
))
6621 if (TREE_CONSTANT (arg0
))
6624 if (optimize_function_for_size_p (cfun
))
6627 if (reorder
&& flag_evaluation_order
6628 && (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
)))
6631 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6632 for commutative and comparison operators. Ensuring a canonical
6633 form allows the optimizers to find additional redundancies without
6634 having to explicitly check for both orderings. */
6635 if (TREE_CODE (arg0
) == SSA_NAME
6636 && TREE_CODE (arg1
) == SSA_NAME
6637 && SSA_NAME_VERSION (arg0
) > SSA_NAME_VERSION (arg1
))
6640 /* Put SSA_NAMEs last. */
6641 if (TREE_CODE (arg1
) == SSA_NAME
)
6643 if (TREE_CODE (arg0
) == SSA_NAME
)
6646 /* Put variables last. */
6655 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6656 ARG0 is extended to a wider type. */
6659 fold_widened_comparison (location_t loc
, enum tree_code code
,
6660 tree type
, tree arg0
, tree arg1
)
6662 tree arg0_unw
= get_unwidened (arg0
, NULL_TREE
);
6664 tree shorter_type
, outer_type
;
6668 if (arg0_unw
== arg0
)
6670 shorter_type
= TREE_TYPE (arg0_unw
);
6672 #ifdef HAVE_canonicalize_funcptr_for_compare
6673 /* Disable this optimization if we're casting a function pointer
6674 type on targets that require function pointer canonicalization. */
6675 if (HAVE_canonicalize_funcptr_for_compare
6676 && TREE_CODE (shorter_type
) == POINTER_TYPE
6677 && TREE_CODE (TREE_TYPE (shorter_type
)) == FUNCTION_TYPE
)
6681 if (TYPE_PRECISION (TREE_TYPE (arg0
)) <= TYPE_PRECISION (shorter_type
))
6684 arg1_unw
= get_unwidened (arg1
, NULL_TREE
);
6686 /* If possible, express the comparison in the shorter mode. */
6687 if ((code
== EQ_EXPR
|| code
== NE_EXPR
6688 || TYPE_UNSIGNED (TREE_TYPE (arg0
)) == TYPE_UNSIGNED (shorter_type
))
6689 && (TREE_TYPE (arg1_unw
) == shorter_type
6690 || ((TYPE_PRECISION (shorter_type
)
6691 >= TYPE_PRECISION (TREE_TYPE (arg1_unw
)))
6692 && (TYPE_UNSIGNED (shorter_type
)
6693 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw
))))
6694 || (TREE_CODE (arg1_unw
) == INTEGER_CST
6695 && (TREE_CODE (shorter_type
) == INTEGER_TYPE
6696 || TREE_CODE (shorter_type
) == BOOLEAN_TYPE
)
6697 && int_fits_type_p (arg1_unw
, shorter_type
))))
6698 return fold_build2_loc (loc
, code
, type
, arg0_unw
,
6699 fold_convert_loc (loc
, shorter_type
, arg1_unw
));
6701 if (TREE_CODE (arg1_unw
) != INTEGER_CST
6702 || TREE_CODE (shorter_type
) != INTEGER_TYPE
6703 || !int_fits_type_p (arg1_unw
, shorter_type
))
6706 /* If we are comparing with the integer that does not fit into the range
6707 of the shorter type, the result is known. */
6708 outer_type
= TREE_TYPE (arg1_unw
);
6709 min
= lower_bound_in_type (outer_type
, shorter_type
);
6710 max
= upper_bound_in_type (outer_type
, shorter_type
);
6712 above
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6714 below
= integer_nonzerop (fold_relational_const (LT_EXPR
, type
,
6721 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6726 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6732 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6734 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6739 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
6741 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
6750 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6751 ARG0 just the signedness is changed. */
6754 fold_sign_changed_comparison (location_t loc
, enum tree_code code
, tree type
,
6755 tree arg0
, tree arg1
)
6758 tree inner_type
, outer_type
;
6760 if (!CONVERT_EXPR_P (arg0
))
6763 outer_type
= TREE_TYPE (arg0
);
6764 arg0_inner
= TREE_OPERAND (arg0
, 0);
6765 inner_type
= TREE_TYPE (arg0_inner
);
6767 #ifdef HAVE_canonicalize_funcptr_for_compare
6768 /* Disable this optimization if we're casting a function pointer
6769 type on targets that require function pointer canonicalization. */
6770 if (HAVE_canonicalize_funcptr_for_compare
6771 && TREE_CODE (inner_type
) == POINTER_TYPE
6772 && TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
)
6776 if (TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
6779 if (TREE_CODE (arg1
) != INTEGER_CST
6780 && !(CONVERT_EXPR_P (arg1
)
6781 && TREE_TYPE (TREE_OPERAND (arg1
, 0)) == inner_type
))
6784 if ((TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
6785 || POINTER_TYPE_P (inner_type
) != POINTER_TYPE_P (outer_type
))
6790 if (TREE_CODE (arg1
) == INTEGER_CST
)
6791 arg1
= force_fit_type_double (inner_type
, tree_to_double_int (arg1
),
6792 0, TREE_OVERFLOW (arg1
));
6794 arg1
= fold_convert_loc (loc
, inner_type
, arg1
);
6796 return fold_build2_loc (loc
, code
, type
, arg0_inner
, arg1
);
6799 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6800 step of the array. Reconstructs s and delta in the case of s *
6801 delta being an integer constant (and thus already folded). ADDR is
6802 the address. MULT is the multiplicative expression. If the
6803 function succeeds, the new address expression is returned.
6804 Otherwise NULL_TREE is returned. LOC is the location of the
6805 resulting expression. */
6808 try_move_mult_to_index (location_t loc
, tree addr
, tree op1
)
6810 tree s
, delta
, step
;
6811 tree ref
= TREE_OPERAND (addr
, 0), pref
;
6816 /* Strip the nops that might be added when converting op1 to sizetype. */
6819 /* Canonicalize op1 into a possibly non-constant delta
6820 and an INTEGER_CST s. */
6821 if (TREE_CODE (op1
) == MULT_EXPR
)
6823 tree arg0
= TREE_OPERAND (op1
, 0), arg1
= TREE_OPERAND (op1
, 1);
6828 if (TREE_CODE (arg0
) == INTEGER_CST
)
6833 else if (TREE_CODE (arg1
) == INTEGER_CST
)
6841 else if (TREE_CODE (op1
) == INTEGER_CST
)
6848 /* Simulate we are delta * 1. */
6850 s
= integer_one_node
;
6853 for (;; ref
= TREE_OPERAND (ref
, 0))
6855 if (TREE_CODE (ref
) == ARRAY_REF
)
6859 /* Remember if this was a multi-dimensional array. */
6860 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6863 domain
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
6866 itype
= TREE_TYPE (domain
);
6868 step
= array_ref_element_size (ref
);
6869 if (TREE_CODE (step
) != INTEGER_CST
)
6874 if (! tree_int_cst_equal (step
, s
))
6879 /* Try if delta is a multiple of step. */
6880 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6886 /* Only fold here if we can verify we do not overflow one
6887 dimension of a multi-dimensional array. */
6892 if (TREE_CODE (TREE_OPERAND (ref
, 1)) != INTEGER_CST
6893 || !TYPE_MAX_VALUE (domain
)
6894 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6897 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6898 fold_convert_loc (loc
, itype
,
6899 TREE_OPERAND (ref
, 1)),
6900 fold_convert_loc (loc
, itype
, delta
));
6902 || TREE_CODE (tmp
) != INTEGER_CST
6903 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6909 else if (TREE_CODE (ref
) == COMPONENT_REF
6910 && TREE_CODE (TREE_TYPE (ref
)) == ARRAY_TYPE
)
6914 /* Remember if this was a multi-dimensional array. */
6915 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == ARRAY_REF
)
6918 domain
= TYPE_DOMAIN (TREE_TYPE (ref
));
6921 itype
= TREE_TYPE (domain
);
6923 step
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref
)));
6924 if (TREE_CODE (step
) != INTEGER_CST
)
6929 if (! tree_int_cst_equal (step
, s
))
6934 /* Try if delta is a multiple of step. */
6935 tree tmp
= div_if_zero_remainder (EXACT_DIV_EXPR
, op1
, step
);
6941 /* Only fold here if we can verify we do not overflow one
6942 dimension of a multi-dimensional array. */
6947 if (!TYPE_MIN_VALUE (domain
)
6948 || !TYPE_MAX_VALUE (domain
)
6949 || TREE_CODE (TYPE_MAX_VALUE (domain
)) != INTEGER_CST
)
6952 tmp
= fold_binary_loc (loc
, PLUS_EXPR
, itype
,
6953 fold_convert_loc (loc
, itype
,
6954 TYPE_MIN_VALUE (domain
)),
6955 fold_convert_loc (loc
, itype
, delta
));
6956 if (TREE_CODE (tmp
) != INTEGER_CST
6957 || tree_int_cst_lt (TYPE_MAX_VALUE (domain
), tmp
))
6966 if (!handled_component_p (ref
))
6970 /* We found the suitable array reference. So copy everything up to it,
6971 and replace the index. */
6973 pref
= TREE_OPERAND (addr
, 0);
6974 ret
= copy_node (pref
);
6975 SET_EXPR_LOCATION (ret
, loc
);
6980 pref
= TREE_OPERAND (pref
, 0);
6981 TREE_OPERAND (pos
, 0) = copy_node (pref
);
6982 pos
= TREE_OPERAND (pos
, 0);
6985 if (TREE_CODE (ref
) == ARRAY_REF
)
6987 TREE_OPERAND (pos
, 1)
6988 = fold_build2_loc (loc
, PLUS_EXPR
, itype
,
6989 fold_convert_loc (loc
, itype
, TREE_OPERAND (pos
, 1)),
6990 fold_convert_loc (loc
, itype
, delta
));
6991 return fold_build1_loc (loc
, ADDR_EXPR
, TREE_TYPE (addr
), ret
);
6993 else if (TREE_CODE (ref
) == COMPONENT_REF
)
6995 gcc_assert (ret
== pos
);
6996 ret
= build4_loc (loc
, ARRAY_REF
, TREE_TYPE (TREE_TYPE (ref
)), ret
,
6998 (loc
, PLUS_EXPR
, itype
,
6999 fold_convert_loc (loc
, itype
,
7001 (TYPE_DOMAIN (TREE_TYPE (ref
)))),
7002 fold_convert_loc (loc
, itype
, delta
)),
7003 NULL_TREE
, NULL_TREE
);
7004 return build_fold_addr_expr_loc (loc
, ret
);
7011 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7012 means A >= Y && A != MAX, but in this case we know that
7013 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7016 fold_to_nonsharp_ineq_using_bound (location_t loc
, tree ineq
, tree bound
)
7018 tree a
, typea
, type
= TREE_TYPE (ineq
), a1
, diff
, y
;
7020 if (TREE_CODE (bound
) == LT_EXPR
)
7021 a
= TREE_OPERAND (bound
, 0);
7022 else if (TREE_CODE (bound
) == GT_EXPR
)
7023 a
= TREE_OPERAND (bound
, 1);
7027 typea
= TREE_TYPE (a
);
7028 if (!INTEGRAL_TYPE_P (typea
)
7029 && !POINTER_TYPE_P (typea
))
7032 if (TREE_CODE (ineq
) == LT_EXPR
)
7034 a1
= TREE_OPERAND (ineq
, 1);
7035 y
= TREE_OPERAND (ineq
, 0);
7037 else if (TREE_CODE (ineq
) == GT_EXPR
)
7039 a1
= TREE_OPERAND (ineq
, 0);
7040 y
= TREE_OPERAND (ineq
, 1);
7045 if (TREE_TYPE (a1
) != typea
)
7048 if (POINTER_TYPE_P (typea
))
7050 /* Convert the pointer types into integer before taking the difference. */
7051 tree ta
= fold_convert_loc (loc
, ssizetype
, a
);
7052 tree ta1
= fold_convert_loc (loc
, ssizetype
, a1
);
7053 diff
= fold_binary_loc (loc
, MINUS_EXPR
, ssizetype
, ta1
, ta
);
7056 diff
= fold_binary_loc (loc
, MINUS_EXPR
, typea
, a1
, a
);
7058 if (!diff
|| !integer_onep (diff
))
7061 return fold_build2_loc (loc
, GE_EXPR
, type
, a
, y
);
7064 /* Fold a sum or difference of at least one multiplication.
7065 Returns the folded tree or NULL if no simplification could be made. */
7068 fold_plusminus_mult_expr (location_t loc
, enum tree_code code
, tree type
,
7069 tree arg0
, tree arg1
)
7071 tree arg00
, arg01
, arg10
, arg11
;
7072 tree alt0
= NULL_TREE
, alt1
= NULL_TREE
, same
;
7074 /* (A * C) +- (B * C) -> (A+-B) * C.
7075 (A * C) +- A -> A * (C+-1).
7076 We are most concerned about the case where C is a constant,
7077 but other combinations show up during loop reduction. Since
7078 it is not difficult, try all four possibilities. */
7080 if (TREE_CODE (arg0
) == MULT_EXPR
)
7082 arg00
= TREE_OPERAND (arg0
, 0);
7083 arg01
= TREE_OPERAND (arg0
, 1);
7085 else if (TREE_CODE (arg0
) == INTEGER_CST
)
7087 arg00
= build_one_cst (type
);
7092 /* We cannot generate constant 1 for fract. */
7093 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7096 arg01
= build_one_cst (type
);
7098 if (TREE_CODE (arg1
) == MULT_EXPR
)
7100 arg10
= TREE_OPERAND (arg1
, 0);
7101 arg11
= TREE_OPERAND (arg1
, 1);
7103 else if (TREE_CODE (arg1
) == INTEGER_CST
)
7105 arg10
= build_one_cst (type
);
7106 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7107 the purpose of this canonicalization. */
7108 if (TREE_INT_CST_HIGH (arg1
) == -1
7109 && negate_expr_p (arg1
)
7110 && code
== PLUS_EXPR
)
7112 arg11
= negate_expr (arg1
);
7120 /* We cannot generate constant 1 for fract. */
7121 if (ALL_FRACT_MODE_P (TYPE_MODE (type
)))
7124 arg11
= build_one_cst (type
);
7128 if (operand_equal_p (arg01
, arg11
, 0))
7129 same
= arg01
, alt0
= arg00
, alt1
= arg10
;
7130 else if (operand_equal_p (arg00
, arg10
, 0))
7131 same
= arg00
, alt0
= arg01
, alt1
= arg11
;
7132 else if (operand_equal_p (arg00
, arg11
, 0))
7133 same
= arg00
, alt0
= arg01
, alt1
= arg10
;
7134 else if (operand_equal_p (arg01
, arg10
, 0))
7135 same
= arg01
, alt0
= arg00
, alt1
= arg11
;
7137 /* No identical multiplicands; see if we can find a common
7138 power-of-two factor in non-power-of-two multiplies. This
7139 can help in multi-dimensional array access. */
7140 else if (host_integerp (arg01
, 0)
7141 && host_integerp (arg11
, 0))
7143 HOST_WIDE_INT int01
, int11
, tmp
;
7146 int01
= TREE_INT_CST_LOW (arg01
);
7147 int11
= TREE_INT_CST_LOW (arg11
);
7149 /* Move min of absolute values to int11. */
7150 if (absu_hwi (int01
) < absu_hwi (int11
))
7152 tmp
= int01
, int01
= int11
, int11
= tmp
;
7153 alt0
= arg00
, arg00
= arg10
, arg10
= alt0
;
7160 if (exact_log2 (absu_hwi (int11
)) > 0 && int01
% int11
== 0
7161 /* The remainder should not be a constant, otherwise we
7162 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7163 increased the number of multiplications necessary. */
7164 && TREE_CODE (arg10
) != INTEGER_CST
)
7166 alt0
= fold_build2_loc (loc
, MULT_EXPR
, TREE_TYPE (arg00
), arg00
,
7167 build_int_cst (TREE_TYPE (arg00
),
7172 maybe_same
= alt0
, alt0
= alt1
, alt1
= maybe_same
;
7177 return fold_build2_loc (loc
, MULT_EXPR
, type
,
7178 fold_build2_loc (loc
, code
, type
,
7179 fold_convert_loc (loc
, type
, alt0
),
7180 fold_convert_loc (loc
, type
, alt1
)),
7181 fold_convert_loc (loc
, type
, same
));
7186 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7187 specified by EXPR into the buffer PTR of length LEN bytes.
7188 Return the number of bytes placed in the buffer, or zero
7192 native_encode_int (const_tree expr
, unsigned char *ptr
, int len
)
7194 tree type
= TREE_TYPE (expr
);
7195 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7196 int byte
, offset
, word
, words
;
7197 unsigned char value
;
7199 if (total_bytes
> len
)
7201 words
= total_bytes
/ UNITS_PER_WORD
;
7203 for (byte
= 0; byte
< total_bytes
; byte
++)
7205 int bitpos
= byte
* BITS_PER_UNIT
;
7206 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7207 value
= (unsigned char) (TREE_INT_CST_LOW (expr
) >> bitpos
);
7209 value
= (unsigned char) (TREE_INT_CST_HIGH (expr
)
7210 >> (bitpos
- HOST_BITS_PER_WIDE_INT
));
7212 if (total_bytes
> UNITS_PER_WORD
)
7214 word
= byte
/ UNITS_PER_WORD
;
7215 if (WORDS_BIG_ENDIAN
)
7216 word
= (words
- 1) - word
;
7217 offset
= word
* UNITS_PER_WORD
;
7218 if (BYTES_BIG_ENDIAN
)
7219 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7221 offset
+= byte
% UNITS_PER_WORD
;
7224 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7225 ptr
[offset
] = value
;
7231 /* Subroutine of native_encode_expr. Encode the REAL_CST
7232 specified by EXPR into the buffer PTR of length LEN bytes.
7233 Return the number of bytes placed in the buffer, or zero
7237 native_encode_real (const_tree expr
, unsigned char *ptr
, int len
)
7239 tree type
= TREE_TYPE (expr
);
7240 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7241 int byte
, offset
, word
, words
, bitpos
;
7242 unsigned char value
;
7244 /* There are always 32 bits in each long, no matter the size of
7245 the hosts long. We handle floating point representations with
7249 if (total_bytes
> len
)
7251 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7253 real_to_target (tmp
, TREE_REAL_CST_PTR (expr
), TYPE_MODE (type
));
7255 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7256 bitpos
+= BITS_PER_UNIT
)
7258 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7259 value
= (unsigned char) (tmp
[bitpos
/ 32] >> (bitpos
& 31));
7261 if (UNITS_PER_WORD
< 4)
7263 word
= byte
/ UNITS_PER_WORD
;
7264 if (WORDS_BIG_ENDIAN
)
7265 word
= (words
- 1) - word
;
7266 offset
= word
* UNITS_PER_WORD
;
7267 if (BYTES_BIG_ENDIAN
)
7268 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7270 offset
+= byte
% UNITS_PER_WORD
;
7273 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7274 ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)] = value
;
7279 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7280 specified by EXPR into the buffer PTR of length LEN bytes.
7281 Return the number of bytes placed in the buffer, or zero
7285 native_encode_complex (const_tree expr
, unsigned char *ptr
, int len
)
7290 part
= TREE_REALPART (expr
);
7291 rsize
= native_encode_expr (part
, ptr
, len
);
7294 part
= TREE_IMAGPART (expr
);
7295 isize
= native_encode_expr (part
, ptr
+rsize
, len
-rsize
);
7298 return rsize
+ isize
;
7302 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7303 specified by EXPR into the buffer PTR of length LEN bytes.
7304 Return the number of bytes placed in the buffer, or zero
7308 native_encode_vector (const_tree expr
, unsigned char *ptr
, int len
)
7310 int i
, size
, offset
, count
;
7311 tree itype
, elem
, elements
;
7314 elements
= TREE_VECTOR_CST_ELTS (expr
);
7315 count
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
));
7316 itype
= TREE_TYPE (TREE_TYPE (expr
));
7317 size
= GET_MODE_SIZE (TYPE_MODE (itype
));
7318 for (i
= 0; i
< count
; i
++)
7322 elem
= TREE_VALUE (elements
);
7323 elements
= TREE_CHAIN (elements
);
7330 if (native_encode_expr (elem
, ptr
+offset
, len
-offset
) != size
)
7335 if (offset
+ size
> len
)
7337 memset (ptr
+offset
, 0, size
);
7345 /* Subroutine of native_encode_expr. Encode the STRING_CST
7346 specified by EXPR into the buffer PTR of length LEN bytes.
7347 Return the number of bytes placed in the buffer, or zero
7351 native_encode_string (const_tree expr
, unsigned char *ptr
, int len
)
7353 tree type
= TREE_TYPE (expr
);
7354 HOST_WIDE_INT total_bytes
;
7356 if (TREE_CODE (type
) != ARRAY_TYPE
7357 || TREE_CODE (TREE_TYPE (type
)) != INTEGER_TYPE
7358 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) != BITS_PER_UNIT
7359 || !host_integerp (TYPE_SIZE_UNIT (type
), 0))
7361 total_bytes
= tree_low_cst (TYPE_SIZE_UNIT (type
), 0);
7362 if (total_bytes
> len
)
7364 if (TREE_STRING_LENGTH (expr
) < total_bytes
)
7366 memcpy (ptr
, TREE_STRING_POINTER (expr
), TREE_STRING_LENGTH (expr
));
7367 memset (ptr
+ TREE_STRING_LENGTH (expr
), 0,
7368 total_bytes
- TREE_STRING_LENGTH (expr
));
7371 memcpy (ptr
, TREE_STRING_POINTER (expr
), total_bytes
);
7376 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7377 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7378 buffer PTR of length LEN bytes. Return the number of bytes
7379 placed in the buffer, or zero upon failure. */
7382 native_encode_expr (const_tree expr
, unsigned char *ptr
, int len
)
7384 switch (TREE_CODE (expr
))
7387 return native_encode_int (expr
, ptr
, len
);
7390 return native_encode_real (expr
, ptr
, len
);
7393 return native_encode_complex (expr
, ptr
, len
);
7396 return native_encode_vector (expr
, ptr
, len
);
7399 return native_encode_string (expr
, ptr
, len
);
7407 /* Subroutine of native_interpret_expr. Interpret the contents of
7408 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7409 If the buffer cannot be interpreted, return NULL_TREE. */
7412 native_interpret_int (tree type
, const unsigned char *ptr
, int len
)
7414 int total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7415 int byte
, offset
, word
, words
;
7416 unsigned char value
;
7419 if (total_bytes
> len
)
7421 if (total_bytes
* BITS_PER_UNIT
> 2 * HOST_BITS_PER_WIDE_INT
)
7424 result
= double_int_zero
;
7425 words
= total_bytes
/ UNITS_PER_WORD
;
7427 for (byte
= 0; byte
< total_bytes
; byte
++)
7429 int bitpos
= byte
* BITS_PER_UNIT
;
7430 if (total_bytes
> UNITS_PER_WORD
)
7432 word
= byte
/ UNITS_PER_WORD
;
7433 if (WORDS_BIG_ENDIAN
)
7434 word
= (words
- 1) - word
;
7435 offset
= word
* UNITS_PER_WORD
;
7436 if (BYTES_BIG_ENDIAN
)
7437 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7439 offset
+= byte
% UNITS_PER_WORD
;
7442 offset
= BYTES_BIG_ENDIAN
? (total_bytes
- 1) - byte
: byte
;
7443 value
= ptr
[offset
];
7445 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
7446 result
.low
|= (unsigned HOST_WIDE_INT
) value
<< bitpos
;
7448 result
.high
|= (unsigned HOST_WIDE_INT
) value
7449 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
7452 return double_int_to_tree (type
, result
);
7456 /* Subroutine of native_interpret_expr. Interpret the contents of
7457 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7458 If the buffer cannot be interpreted, return NULL_TREE. */
7461 native_interpret_real (tree type
, const unsigned char *ptr
, int len
)
7463 enum machine_mode mode
= TYPE_MODE (type
);
7464 int total_bytes
= GET_MODE_SIZE (mode
);
7465 int byte
, offset
, word
, words
, bitpos
;
7466 unsigned char value
;
7467 /* There are always 32 bits in each long, no matter the size of
7468 the hosts long. We handle floating point representations with
7473 total_bytes
= GET_MODE_SIZE (TYPE_MODE (type
));
7474 if (total_bytes
> len
|| total_bytes
> 24)
7476 words
= (32 / BITS_PER_UNIT
) / UNITS_PER_WORD
;
7478 memset (tmp
, 0, sizeof (tmp
));
7479 for (bitpos
= 0; bitpos
< total_bytes
* BITS_PER_UNIT
;
7480 bitpos
+= BITS_PER_UNIT
)
7482 byte
= (bitpos
/ BITS_PER_UNIT
) & 3;
7483 if (UNITS_PER_WORD
< 4)
7485 word
= byte
/ UNITS_PER_WORD
;
7486 if (WORDS_BIG_ENDIAN
)
7487 word
= (words
- 1) - word
;
7488 offset
= word
* UNITS_PER_WORD
;
7489 if (BYTES_BIG_ENDIAN
)
7490 offset
+= (UNITS_PER_WORD
- 1) - (byte
% UNITS_PER_WORD
);
7492 offset
+= byte
% UNITS_PER_WORD
;
7495 offset
= BYTES_BIG_ENDIAN
? 3 - byte
: byte
;
7496 value
= ptr
[offset
+ ((bitpos
/ BITS_PER_UNIT
) & ~3)];
7498 tmp
[bitpos
/ 32] |= (unsigned long)value
<< (bitpos
& 31);
7501 real_from_target (&r
, tmp
, mode
);
7502 return build_real (type
, r
);
7506 /* Subroutine of native_interpret_expr. Interpret the contents of
7507 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7508 If the buffer cannot be interpreted, return NULL_TREE. */
7511 native_interpret_complex (tree type
, const unsigned char *ptr
, int len
)
7513 tree etype
, rpart
, ipart
;
7516 etype
= TREE_TYPE (type
);
7517 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7520 rpart
= native_interpret_expr (etype
, ptr
, size
);
7523 ipart
= native_interpret_expr (etype
, ptr
+size
, size
);
7526 return build_complex (type
, rpart
, ipart
);
7530 /* Subroutine of native_interpret_expr. Interpret the contents of
7531 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7532 If the buffer cannot be interpreted, return NULL_TREE. */
7535 native_interpret_vector (tree type
, const unsigned char *ptr
, int len
)
7537 tree etype
, elem
, elements
;
7540 etype
= TREE_TYPE (type
);
7541 size
= GET_MODE_SIZE (TYPE_MODE (etype
));
7542 count
= TYPE_VECTOR_SUBPARTS (type
);
7543 if (size
* count
> len
)
7546 elements
= NULL_TREE
;
7547 for (i
= count
- 1; i
>= 0; i
--)
7549 elem
= native_interpret_expr (etype
, ptr
+(i
*size
), size
);
7552 elements
= tree_cons (NULL_TREE
, elem
, elements
);
7554 return build_vector (type
, elements
);
7558 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7559 the buffer PTR of length LEN as a constant of type TYPE. For
7560 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7561 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7562 return NULL_TREE. */
7565 native_interpret_expr (tree type
, const unsigned char *ptr
, int len
)
7567 switch (TREE_CODE (type
))
7572 return native_interpret_int (type
, ptr
, len
);
7575 return native_interpret_real (type
, ptr
, len
);
7578 return native_interpret_complex (type
, ptr
, len
);
7581 return native_interpret_vector (type
, ptr
, len
);
7589 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7590 TYPE at compile-time. If we're unable to perform the conversion
7591 return NULL_TREE. */
7594 fold_view_convert_expr (tree type
, tree expr
)
7596 /* We support up to 512-bit values (for V8DFmode). */
7597 unsigned char buffer
[64];
7600 /* Check that the host and target are sane. */
7601 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
7604 len
= native_encode_expr (expr
, buffer
, sizeof (buffer
));
7608 return native_interpret_expr (type
, buffer
, len
);
7611 /* Build an expression for the address of T. Folds away INDIRECT_REF
7612 to avoid confusing the gimplify process. */
7615 build_fold_addr_expr_with_type_loc (location_t loc
, tree t
, tree ptrtype
)
7617 /* The size of the object is not relevant when talking about its address. */
7618 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
7619 t
= TREE_OPERAND (t
, 0);
7621 if (TREE_CODE (t
) == INDIRECT_REF
)
7623 t
= TREE_OPERAND (t
, 0);
7625 if (TREE_TYPE (t
) != ptrtype
)
7626 t
= build1_loc (loc
, NOP_EXPR
, ptrtype
, t
);
7628 else if (TREE_CODE (t
) == MEM_REF
7629 && integer_zerop (TREE_OPERAND (t
, 1)))
7630 return TREE_OPERAND (t
, 0);
7631 else if (TREE_CODE (t
) == VIEW_CONVERT_EXPR
)
7633 t
= build_fold_addr_expr_loc (loc
, TREE_OPERAND (t
, 0));
7635 if (TREE_TYPE (t
) != ptrtype
)
7636 t
= fold_convert_loc (loc
, ptrtype
, t
);
7639 t
= build1_loc (loc
, ADDR_EXPR
, ptrtype
, t
);
7644 /* Build an expression for the address of T. */
7647 build_fold_addr_expr_loc (location_t loc
, tree t
)
7649 tree ptrtype
= build_pointer_type (TREE_TYPE (t
));
7651 return build_fold_addr_expr_with_type_loc (loc
, t
, ptrtype
);
7654 /* Fold a unary expression of code CODE and type TYPE with operand
7655 OP0. Return the folded expression if folding is successful.
7656 Otherwise, return NULL_TREE. */
7659 fold_unary_loc (location_t loc
, enum tree_code code
, tree type
, tree op0
)
7663 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
7665 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
7666 && TREE_CODE_LENGTH (code
) == 1);
7671 if (CONVERT_EXPR_CODE_P (code
)
7672 || code
== FLOAT_EXPR
|| code
== ABS_EXPR
|| code
== NEGATE_EXPR
)
7674 /* Don't use STRIP_NOPS, because signedness of argument type
7676 STRIP_SIGN_NOPS (arg0
);
7680 /* Strip any conversions that don't change the mode. This
7681 is safe for every expression, except for a comparison
7682 expression because its signedness is derived from its
7685 Note that this is done as an internal manipulation within
7686 the constant folder, in order to find the simplest
7687 representation of the arguments so that their form can be
7688 studied. In any cases, the appropriate type conversions
7689 should be put back in the tree that will get out of the
7695 if (TREE_CODE_CLASS (code
) == tcc_unary
)
7697 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
7698 return build2 (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7699 fold_build1_loc (loc
, code
, type
,
7700 fold_convert_loc (loc
, TREE_TYPE (op0
),
7701 TREE_OPERAND (arg0
, 1))));
7702 else if (TREE_CODE (arg0
) == COND_EXPR
)
7704 tree arg01
= TREE_OPERAND (arg0
, 1);
7705 tree arg02
= TREE_OPERAND (arg0
, 2);
7706 if (! VOID_TYPE_P (TREE_TYPE (arg01
)))
7707 arg01
= fold_build1_loc (loc
, code
, type
,
7708 fold_convert_loc (loc
,
7709 TREE_TYPE (op0
), arg01
));
7710 if (! VOID_TYPE_P (TREE_TYPE (arg02
)))
7711 arg02
= fold_build1_loc (loc
, code
, type
,
7712 fold_convert_loc (loc
,
7713 TREE_TYPE (op0
), arg02
));
7714 tem
= fold_build3_loc (loc
, COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
7717 /* If this was a conversion, and all we did was to move into
7718 inside the COND_EXPR, bring it back out. But leave it if
7719 it is a conversion from integer to integer and the
7720 result precision is no wider than a word since such a
7721 conversion is cheap and may be optimized away by combine,
7722 while it couldn't if it were outside the COND_EXPR. Then return
7723 so we don't get into an infinite recursion loop taking the
7724 conversion out and then back in. */
7726 if ((CONVERT_EXPR_CODE_P (code
)
7727 || code
== NON_LVALUE_EXPR
)
7728 && TREE_CODE (tem
) == COND_EXPR
7729 && TREE_CODE (TREE_OPERAND (tem
, 1)) == code
7730 && TREE_CODE (TREE_OPERAND (tem
, 2)) == code
7731 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 1))
7732 && ! VOID_TYPE_P (TREE_OPERAND (tem
, 2))
7733 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))
7734 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 2), 0)))
7735 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7737 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem
, 1), 0))))
7738 && TYPE_PRECISION (TREE_TYPE (tem
)) <= BITS_PER_WORD
)
7739 || flag_syntax_only
))
7740 tem
= build1_loc (loc
, code
, type
,
7742 TREE_TYPE (TREE_OPERAND
7743 (TREE_OPERAND (tem
, 1), 0)),
7744 TREE_OPERAND (tem
, 0),
7745 TREE_OPERAND (TREE_OPERAND (tem
, 1), 0),
7746 TREE_OPERAND (TREE_OPERAND (tem
, 2),
7755 /* Re-association barriers around constants and other re-association
7756 barriers can be removed. */
7757 if (CONSTANT_CLASS_P (op0
)
7758 || TREE_CODE (op0
) == PAREN_EXPR
)
7759 return fold_convert_loc (loc
, type
, op0
);
7764 case FIX_TRUNC_EXPR
:
7765 if (TREE_TYPE (op0
) == type
)
7768 if (COMPARISON_CLASS_P (op0
))
7770 /* If we have (type) (a CMP b) and type is an integral type, return
7771 new expression involving the new type. Canonicalize
7772 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7774 Do not fold the result as that would not simplify further, also
7775 folding again results in recursions. */
7776 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7777 return build2_loc (loc
, TREE_CODE (op0
), type
,
7778 TREE_OPERAND (op0
, 0),
7779 TREE_OPERAND (op0
, 1));
7780 else if (!INTEGRAL_TYPE_P (type
))
7781 return build3_loc (loc
, COND_EXPR
, type
, op0
,
7782 constant_boolean_node (true, type
),
7783 constant_boolean_node (false, type
));
7786 /* Handle cases of two conversions in a row. */
7787 if (CONVERT_EXPR_P (op0
))
7789 tree inside_type
= TREE_TYPE (TREE_OPERAND (op0
, 0));
7790 tree inter_type
= TREE_TYPE (op0
);
7791 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
7792 int inside_ptr
= POINTER_TYPE_P (inside_type
);
7793 int inside_float
= FLOAT_TYPE_P (inside_type
);
7794 int inside_vec
= TREE_CODE (inside_type
) == VECTOR_TYPE
;
7795 unsigned int inside_prec
= TYPE_PRECISION (inside_type
);
7796 int inside_unsignedp
= TYPE_UNSIGNED (inside_type
);
7797 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
7798 int inter_ptr
= POINTER_TYPE_P (inter_type
);
7799 int inter_float
= FLOAT_TYPE_P (inter_type
);
7800 int inter_vec
= TREE_CODE (inter_type
) == VECTOR_TYPE
;
7801 unsigned int inter_prec
= TYPE_PRECISION (inter_type
);
7802 int inter_unsignedp
= TYPE_UNSIGNED (inter_type
);
7803 int final_int
= INTEGRAL_TYPE_P (type
);
7804 int final_ptr
= POINTER_TYPE_P (type
);
7805 int final_float
= FLOAT_TYPE_P (type
);
7806 int final_vec
= TREE_CODE (type
) == VECTOR_TYPE
;
7807 unsigned int final_prec
= TYPE_PRECISION (type
);
7808 int final_unsignedp
= TYPE_UNSIGNED (type
);
7810 /* In addition to the cases of two conversions in a row
7811 handled below, if we are converting something to its own
7812 type via an object of identical or wider precision, neither
7813 conversion is needed. */
7814 if (TYPE_MAIN_VARIANT (inside_type
) == TYPE_MAIN_VARIANT (type
)
7815 && (((inter_int
|| inter_ptr
) && final_int
)
7816 || (inter_float
&& final_float
))
7817 && inter_prec
>= final_prec
)
7818 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7820 /* Likewise, if the intermediate and initial types are either both
7821 float or both integer, we don't need the middle conversion if the
7822 former is wider than the latter and doesn't change the signedness
7823 (for integers). Avoid this if the final type is a pointer since
7824 then we sometimes need the middle conversion. Likewise if the
7825 final type has a precision not equal to the size of its mode. */
7826 if (((inter_int
&& inside_int
)
7827 || (inter_float
&& inside_float
)
7828 || (inter_vec
&& inside_vec
))
7829 && inter_prec
>= inside_prec
7830 && (inter_float
|| inter_vec
7831 || inter_unsignedp
== inside_unsignedp
)
7832 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7833 && TYPE_MODE (type
) == TYPE_MODE (inter_type
))
7835 && (! final_vec
|| inter_prec
== inside_prec
))
7836 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7838 /* If we have a sign-extension of a zero-extended value, we can
7839 replace that by a single zero-extension. */
7840 if (inside_int
&& inter_int
&& final_int
7841 && inside_prec
< inter_prec
&& inter_prec
< final_prec
7842 && inside_unsignedp
&& !inter_unsignedp
)
7843 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7845 /* Two conversions in a row are not needed unless:
7846 - some conversion is floating-point (overstrict for now), or
7847 - some conversion is a vector (overstrict for now), or
7848 - the intermediate type is narrower than both initial and
7850 - the intermediate type and innermost type differ in signedness,
7851 and the outermost type is wider than the intermediate, or
7852 - the initial type is a pointer type and the precisions of the
7853 intermediate and final types differ, or
7854 - the final type is a pointer type and the precisions of the
7855 initial and intermediate types differ. */
7856 if (! inside_float
&& ! inter_float
&& ! final_float
7857 && ! inside_vec
&& ! inter_vec
&& ! final_vec
7858 && (inter_prec
>= inside_prec
|| inter_prec
>= final_prec
)
7859 && ! (inside_int
&& inter_int
7860 && inter_unsignedp
!= inside_unsignedp
7861 && inter_prec
< final_prec
)
7862 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
7863 == (final_unsignedp
&& final_prec
> inter_prec
))
7864 && ! (inside_ptr
&& inter_prec
!= final_prec
)
7865 && ! (final_ptr
&& inside_prec
!= inter_prec
)
7866 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (type
))
7867 && TYPE_MODE (type
) == TYPE_MODE (inter_type
)))
7868 return fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 0));
7871 /* Handle (T *)&A.B.C for A being of type T and B and C
7872 living at offset zero. This occurs frequently in
7873 C++ upcasting and then accessing the base. */
7874 if (TREE_CODE (op0
) == ADDR_EXPR
7875 && POINTER_TYPE_P (type
)
7876 && handled_component_p (TREE_OPERAND (op0
, 0)))
7878 HOST_WIDE_INT bitsize
, bitpos
;
7880 enum machine_mode mode
;
7881 int unsignedp
, volatilep
;
7882 tree base
= TREE_OPERAND (op0
, 0);
7883 base
= get_inner_reference (base
, &bitsize
, &bitpos
, &offset
,
7884 &mode
, &unsignedp
, &volatilep
, false);
7885 /* If the reference was to a (constant) zero offset, we can use
7886 the address of the base if it has the same base type
7887 as the result type and the pointer type is unqualified. */
7888 if (! offset
&& bitpos
== 0
7889 && (TYPE_MAIN_VARIANT (TREE_TYPE (type
))
7890 == TYPE_MAIN_VARIANT (TREE_TYPE (base
)))
7891 && TYPE_QUALS (type
) == TYPE_UNQUALIFIED
)
7892 return fold_convert_loc (loc
, type
,
7893 build_fold_addr_expr_loc (loc
, base
));
7896 if (TREE_CODE (op0
) == MODIFY_EXPR
7897 && TREE_CONSTANT (TREE_OPERAND (op0
, 1))
7898 /* Detect assigning a bitfield. */
7899 && !(TREE_CODE (TREE_OPERAND (op0
, 0)) == COMPONENT_REF
7901 (TREE_OPERAND (TREE_OPERAND (op0
, 0), 1))))
7903 /* Don't leave an assignment inside a conversion
7904 unless assigning a bitfield. */
7905 tem
= fold_build1_loc (loc
, code
, type
, TREE_OPERAND (op0
, 1));
7906 /* First do the assignment, then return converted constant. */
7907 tem
= build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (tem
), op0
, tem
);
7908 TREE_NO_WARNING (tem
) = 1;
7909 TREE_USED (tem
) = 1;
7913 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7914 constants (if x has signed type, the sign bit cannot be set
7915 in c). This folds extension into the BIT_AND_EXPR.
7916 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7917 very likely don't have maximal range for their precision and this
7918 transformation effectively doesn't preserve non-maximal ranges. */
7919 if (TREE_CODE (type
) == INTEGER_TYPE
7920 && TREE_CODE (op0
) == BIT_AND_EXPR
7921 && TREE_CODE (TREE_OPERAND (op0
, 1)) == INTEGER_CST
)
7923 tree and_expr
= op0
;
7924 tree and0
= TREE_OPERAND (and_expr
, 0);
7925 tree and1
= TREE_OPERAND (and_expr
, 1);
7928 if (TYPE_UNSIGNED (TREE_TYPE (and_expr
))
7929 || (TYPE_PRECISION (type
)
7930 <= TYPE_PRECISION (TREE_TYPE (and_expr
))))
7932 else if (TYPE_PRECISION (TREE_TYPE (and1
))
7933 <= HOST_BITS_PER_WIDE_INT
7934 && host_integerp (and1
, 1))
7936 unsigned HOST_WIDE_INT cst
;
7938 cst
= tree_low_cst (and1
, 1);
7939 cst
&= (HOST_WIDE_INT
) -1
7940 << (TYPE_PRECISION (TREE_TYPE (and1
)) - 1);
7941 change
= (cst
== 0);
7942 #ifdef LOAD_EXTEND_OP
7944 && !flag_syntax_only
7945 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0
)))
7948 tree uns
= unsigned_type_for (TREE_TYPE (and0
));
7949 and0
= fold_convert_loc (loc
, uns
, and0
);
7950 and1
= fold_convert_loc (loc
, uns
, and1
);
7956 tem
= force_fit_type_double (type
, tree_to_double_int (and1
),
7957 0, TREE_OVERFLOW (and1
));
7958 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
7959 fold_convert_loc (loc
, type
, and0
), tem
);
7963 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7964 when one of the new casts will fold away. Conservatively we assume
7965 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7966 if (POINTER_TYPE_P (type
)
7967 && TREE_CODE (arg0
) == POINTER_PLUS_EXPR
7968 && (!TYPE_RESTRICT (type
) || TYPE_RESTRICT (TREE_TYPE (arg0
)))
7969 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
7970 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == NOP_EXPR
7971 || TREE_CODE (TREE_OPERAND (arg0
, 1)) == NOP_EXPR
))
7973 tree arg00
= TREE_OPERAND (arg0
, 0);
7974 tree arg01
= TREE_OPERAND (arg0
, 1);
7976 return fold_build_pointer_plus_loc
7977 (loc
, fold_convert_loc (loc
, type
, arg00
), arg01
);
7980 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7981 of the same precision, and X is an integer type not narrower than
7982 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7983 if (INTEGRAL_TYPE_P (type
)
7984 && TREE_CODE (op0
) == BIT_NOT_EXPR
7985 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
7986 && CONVERT_EXPR_P (TREE_OPERAND (op0
, 0))
7987 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
7989 tem
= TREE_OPERAND (TREE_OPERAND (op0
, 0), 0);
7990 if (INTEGRAL_TYPE_P (TREE_TYPE (tem
))
7991 && TYPE_PRECISION (type
) <= TYPE_PRECISION (TREE_TYPE (tem
)))
7992 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
7993 fold_convert_loc (loc
, type
, tem
));
7996 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7997 type of X and Y (integer types only). */
7998 if (INTEGRAL_TYPE_P (type
)
7999 && TREE_CODE (op0
) == MULT_EXPR
8000 && INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8001 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8003 /* Be careful not to introduce new overflows. */
8005 if (TYPE_OVERFLOW_WRAPS (type
))
8008 mult_type
= unsigned_type_for (type
);
8010 if (TYPE_PRECISION (mult_type
) < TYPE_PRECISION (TREE_TYPE (op0
)))
8012 tem
= fold_build2_loc (loc
, MULT_EXPR
, mult_type
,
8013 fold_convert_loc (loc
, mult_type
,
8014 TREE_OPERAND (op0
, 0)),
8015 fold_convert_loc (loc
, mult_type
,
8016 TREE_OPERAND (op0
, 1)));
8017 return fold_convert_loc (loc
, type
, tem
);
8021 tem
= fold_convert_const (code
, type
, op0
);
8022 return tem
? tem
: NULL_TREE
;
8024 case ADDR_SPACE_CONVERT_EXPR
:
8025 if (integer_zerop (arg0
))
8026 return fold_convert_const (code
, type
, arg0
);
8029 case FIXED_CONVERT_EXPR
:
8030 tem
= fold_convert_const (code
, type
, arg0
);
8031 return tem
? tem
: NULL_TREE
;
8033 case VIEW_CONVERT_EXPR
:
8034 if (TREE_TYPE (op0
) == type
)
8036 if (TREE_CODE (op0
) == VIEW_CONVERT_EXPR
)
8037 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8038 type
, TREE_OPERAND (op0
, 0));
8039 if (TREE_CODE (op0
) == MEM_REF
)
8040 return fold_build2_loc (loc
, MEM_REF
, type
,
8041 TREE_OPERAND (op0
, 0), TREE_OPERAND (op0
, 1));
8043 /* For integral conversions with the same precision or pointer
8044 conversions use a NOP_EXPR instead. */
8045 if ((INTEGRAL_TYPE_P (type
)
8046 || POINTER_TYPE_P (type
))
8047 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8048 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8049 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (op0
)))
8050 return fold_convert_loc (loc
, type
, op0
);
8052 /* Strip inner integral conversions that do not change the precision. */
8053 if (CONVERT_EXPR_P (op0
)
8054 && (INTEGRAL_TYPE_P (TREE_TYPE (op0
))
8055 || POINTER_TYPE_P (TREE_TYPE (op0
)))
8056 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0)))
8057 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0
, 0))))
8058 && (TYPE_PRECISION (TREE_TYPE (op0
))
8059 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0
, 0)))))
8060 return fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
8061 type
, TREE_OPERAND (op0
, 0));
8063 return fold_view_convert_expr (type
, op0
);
8066 tem
= fold_negate_expr (loc
, arg0
);
8068 return fold_convert_loc (loc
, type
, tem
);
8072 if (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
)
8073 return fold_abs_const (arg0
, type
);
8074 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
8075 return fold_build1_loc (loc
, ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
8076 /* Convert fabs((double)float) into (double)fabsf(float). */
8077 else if (TREE_CODE (arg0
) == NOP_EXPR
8078 && TREE_CODE (type
) == REAL_TYPE
)
8080 tree targ0
= strip_float_extensions (arg0
);
8082 return fold_convert_loc (loc
, type
,
8083 fold_build1_loc (loc
, ABS_EXPR
,
8087 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8088 else if (TREE_CODE (arg0
) == ABS_EXPR
)
8090 else if (tree_expr_nonnegative_p (arg0
))
8093 /* Strip sign ops from argument. */
8094 if (TREE_CODE (type
) == REAL_TYPE
)
8096 tem
= fold_strip_sign_ops (arg0
);
8098 return fold_build1_loc (loc
, ABS_EXPR
, type
,
8099 fold_convert_loc (loc
, type
, tem
));
8104 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8105 return fold_convert_loc (loc
, type
, arg0
);
8106 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8108 tree itype
= TREE_TYPE (type
);
8109 tree rpart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 0));
8110 tree ipart
= fold_convert_loc (loc
, itype
, TREE_OPERAND (arg0
, 1));
8111 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rpart
,
8112 negate_expr (ipart
));
8114 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8116 tree itype
= TREE_TYPE (type
);
8117 tree rpart
= fold_convert_loc (loc
, itype
, TREE_REALPART (arg0
));
8118 tree ipart
= fold_convert_loc (loc
, itype
, TREE_IMAGPART (arg0
));
8119 return build_complex (type
, rpart
, negate_expr (ipart
));
8121 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8122 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8126 if (TREE_CODE (arg0
) == INTEGER_CST
)
8127 return fold_not_const (arg0
, type
);
8128 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
8129 return fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
8130 /* Convert ~ (-A) to A - 1. */
8131 else if (INTEGRAL_TYPE_P (type
) && TREE_CODE (arg0
) == NEGATE_EXPR
)
8132 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
8133 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0)),
8134 build_int_cst (type
, 1));
8135 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8136 else if (INTEGRAL_TYPE_P (type
)
8137 && ((TREE_CODE (arg0
) == MINUS_EXPR
8138 && integer_onep (TREE_OPERAND (arg0
, 1)))
8139 || (TREE_CODE (arg0
) == PLUS_EXPR
8140 && integer_all_onesp (TREE_OPERAND (arg0
, 1)))))
8141 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
8142 fold_convert_loc (loc
, type
,
8143 TREE_OPERAND (arg0
, 0)));
8144 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8145 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8146 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8147 fold_convert_loc (loc
, type
,
8148 TREE_OPERAND (arg0
, 0)))))
8149 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, tem
,
8150 fold_convert_loc (loc
, type
,
8151 TREE_OPERAND (arg0
, 1)));
8152 else if (TREE_CODE (arg0
) == BIT_XOR_EXPR
8153 && (tem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
,
8154 fold_convert_loc (loc
, type
,
8155 TREE_OPERAND (arg0
, 1)))))
8156 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
8157 fold_convert_loc (loc
, type
,
8158 TREE_OPERAND (arg0
, 0)), tem
);
8159 /* Perform BIT_NOT_EXPR on each element individually. */
8160 else if (TREE_CODE (arg0
) == VECTOR_CST
)
8162 tree elements
= TREE_VECTOR_CST_ELTS (arg0
), elem
, list
= NULL_TREE
;
8163 int count
= TYPE_VECTOR_SUBPARTS (type
), i
;
8165 for (i
= 0; i
< count
; i
++)
8169 elem
= TREE_VALUE (elements
);
8170 elem
= fold_unary_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (type
), elem
);
8171 if (elem
== NULL_TREE
)
8173 elements
= TREE_CHAIN (elements
);
8176 elem
= build_int_cst (TREE_TYPE (type
), -1);
8177 list
= tree_cons (NULL_TREE
, elem
, list
);
8180 return build_vector (type
, nreverse (list
));
8185 case TRUTH_NOT_EXPR
:
8186 /* The argument to invert_truthvalue must have Boolean type. */
8187 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
8188 arg0
= fold_convert_loc (loc
, boolean_type_node
, arg0
);
8190 /* Note that the operand of this must be an int
8191 and its values must be 0 or 1.
8192 ("true" is a fixed value perhaps depending on the language,
8193 but we don't handle values other than 1 correctly yet.) */
8194 tem
= fold_truth_not_expr (loc
, arg0
);
8197 return fold_convert_loc (loc
, type
, tem
);
8200 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8201 return fold_convert_loc (loc
, type
, arg0
);
8202 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8203 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
8204 TREE_OPERAND (arg0
, 1));
8205 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8206 return fold_convert_loc (loc
, type
, TREE_REALPART (arg0
));
8207 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8209 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8210 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8211 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8212 TREE_OPERAND (arg0
, 0)),
8213 fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8214 TREE_OPERAND (arg0
, 1)));
8215 return fold_convert_loc (loc
, type
, tem
);
8217 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8219 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8220 tem
= fold_build1_loc (loc
, REALPART_EXPR
, itype
,
8221 TREE_OPERAND (arg0
, 0));
8222 return fold_convert_loc (loc
, type
, tem
);
8224 if (TREE_CODE (arg0
) == CALL_EXPR
)
8226 tree fn
= get_callee_fndecl (arg0
);
8227 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8228 switch (DECL_FUNCTION_CODE (fn
))
8230 CASE_FLT_FN (BUILT_IN_CEXPI
):
8231 fn
= mathfn_built_in (type
, BUILT_IN_COS
);
8233 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8243 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
8244 return build_zero_cst (type
);
8245 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
8246 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 1),
8247 TREE_OPERAND (arg0
, 0));
8248 if (TREE_CODE (arg0
) == COMPLEX_CST
)
8249 return fold_convert_loc (loc
, type
, TREE_IMAGPART (arg0
));
8250 if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8252 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8253 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), itype
,
8254 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8255 TREE_OPERAND (arg0
, 0)),
8256 fold_build1_loc (loc
, IMAGPART_EXPR
, itype
,
8257 TREE_OPERAND (arg0
, 1)));
8258 return fold_convert_loc (loc
, type
, tem
);
8260 if (TREE_CODE (arg0
) == CONJ_EXPR
)
8262 tree itype
= TREE_TYPE (TREE_TYPE (arg0
));
8263 tem
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, TREE_OPERAND (arg0
, 0));
8264 return fold_convert_loc (loc
, type
, negate_expr (tem
));
8266 if (TREE_CODE (arg0
) == CALL_EXPR
)
8268 tree fn
= get_callee_fndecl (arg0
);
8269 if (fn
&& DECL_BUILT_IN_CLASS (fn
) == BUILT_IN_NORMAL
)
8270 switch (DECL_FUNCTION_CODE (fn
))
8272 CASE_FLT_FN (BUILT_IN_CEXPI
):
8273 fn
= mathfn_built_in (type
, BUILT_IN_SIN
);
8275 return build_call_expr_loc (loc
, fn
, 1, CALL_EXPR_ARG (arg0
, 0));
8285 /* Fold *&X to X if X is an lvalue. */
8286 if (TREE_CODE (op0
) == ADDR_EXPR
)
8288 tree op00
= TREE_OPERAND (op0
, 0);
8289 if ((TREE_CODE (op00
) == VAR_DECL
8290 || TREE_CODE (op00
) == PARM_DECL
8291 || TREE_CODE (op00
) == RESULT_DECL
)
8292 && !TREE_READONLY (op00
))
8299 } /* switch (code) */
8303 /* If the operation was a conversion do _not_ mark a resulting constant
8304 with TREE_OVERFLOW if the original constant was not. These conversions
8305 have implementation defined behavior and retaining the TREE_OVERFLOW
8306 flag here would confuse later passes such as VRP. */
8308 fold_unary_ignore_overflow_loc (location_t loc
, enum tree_code code
,
8309 tree type
, tree op0
)
8311 tree res
= fold_unary_loc (loc
, code
, type
, op0
);
8313 && TREE_CODE (res
) == INTEGER_CST
8314 && TREE_CODE (op0
) == INTEGER_CST
8315 && CONVERT_EXPR_CODE_P (code
))
8316 TREE_OVERFLOW (res
) = TREE_OVERFLOW (op0
);
8321 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8322 operands OP0 and OP1. LOC is the location of the resulting expression.
8323 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8324 Return the folded expression if folding is successful. Otherwise,
8325 return NULL_TREE. */
8327 fold_truth_andor (location_t loc
, enum tree_code code
, tree type
,
8328 tree arg0
, tree arg1
, tree op0
, tree op1
)
8332 /* We only do these simplifications if we are optimizing. */
8336 /* Check for things like (A || B) && (A || C). We can convert this
8337 to A || (B && C). Note that either operator can be any of the four
8338 truth and/or operations and the transformation will still be
8339 valid. Also note that we only care about order for the
8340 ANDIF and ORIF operators. If B contains side effects, this
8341 might change the truth-value of A. */
8342 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
8343 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
8344 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
8345 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
8346 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
)
8347 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0
, 1)))
8349 tree a00
= TREE_OPERAND (arg0
, 0);
8350 tree a01
= TREE_OPERAND (arg0
, 1);
8351 tree a10
= TREE_OPERAND (arg1
, 0);
8352 tree a11
= TREE_OPERAND (arg1
, 1);
8353 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
8354 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
8355 && (code
== TRUTH_AND_EXPR
8356 || code
== TRUTH_OR_EXPR
));
8358 if (operand_equal_p (a00
, a10
, 0))
8359 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8360 fold_build2_loc (loc
, code
, type
, a01
, a11
));
8361 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
8362 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a00
,
8363 fold_build2_loc (loc
, code
, type
, a01
, a10
));
8364 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
8365 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
, a01
,
8366 fold_build2_loc (loc
, code
, type
, a00
, a11
));
8368 /* This case if tricky because we must either have commutative
8369 operators or else A10 must not have side-effects. */
8371 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
8372 && operand_equal_p (a01
, a11
, 0))
8373 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
8374 fold_build2_loc (loc
, code
, type
, a00
, a10
),
8378 /* See if we can build a range comparison. */
8379 if (0 != (tem
= fold_range_test (loc
, code
, type
, op0
, op1
)))
8382 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
)
8383 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
))
8385 tem
= merge_truthop_with_opposite_arm (loc
, arg0
, arg1
, true);
8387 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
8390 if ((code
== TRUTH_ANDIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ORIF_EXPR
)
8391 || (code
== TRUTH_ORIF_EXPR
&& TREE_CODE (arg1
) == TRUTH_ANDIF_EXPR
))
8393 tem
= merge_truthop_with_opposite_arm (loc
, arg1
, arg0
, false);
8395 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
8398 /* Check for the possibility of merging component references. If our
8399 lhs is another similar operation, try to merge its rhs with our
8400 rhs. Then try to merge our lhs and rhs. */
8401 if (TREE_CODE (arg0
) == code
8402 && 0 != (tem
= fold_truth_andor_1 (loc
, code
, type
,
8403 TREE_OPERAND (arg0
, 1), arg1
)))
8404 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
8406 if ((tem
= fold_truth_andor_1 (loc
, code
, type
, arg0
, arg1
)) != 0)
8409 if ((BRANCH_COST (optimize_function_for_speed_p (cfun
),
8411 && LOGICAL_OP_NON_SHORT_CIRCUIT
8412 && (code
== TRUTH_AND_EXPR
8413 || code
== TRUTH_ANDIF_EXPR
8414 || code
== TRUTH_OR_EXPR
8415 || code
== TRUTH_ORIF_EXPR
))
8417 enum tree_code ncode
, icode
;
8419 ncode
= (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_AND_EXPR
)
8420 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
;
8421 icode
= ncode
== TRUTH_AND_EXPR
? TRUTH_ANDIF_EXPR
: TRUTH_ORIF_EXPR
;
8423 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8424 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8425 We don't want to pack more than two leafs to a non-IF AND/OR
8427 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8428 equal to IF-CODE, then we don't want to add right-hand operand.
8429 If the inner right-hand side of left-hand operand has
8430 side-effects, or isn't simple, then we can't add to it,
8431 as otherwise we might destroy if-sequence. */
8432 if (TREE_CODE (arg0
) == icode
8433 && simple_operand_p_2 (arg1
)
8434 /* Needed for sequence points to handle trappings, and
8436 && simple_operand_p_2 (TREE_OPERAND (arg0
, 1)))
8438 tem
= fold_build2_loc (loc
, ncode
, type
, TREE_OPERAND (arg0
, 1),
8440 return fold_build2_loc (loc
, icode
, type
, TREE_OPERAND (arg0
, 0),
8443 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8444 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8445 else if (TREE_CODE (arg1
) == icode
8446 && simple_operand_p_2 (arg0
)
8447 /* Needed for sequence points to handle trappings, and
8449 && simple_operand_p_2 (TREE_OPERAND (arg1
, 0)))
8451 tem
= fold_build2_loc (loc
, ncode
, type
,
8452 arg0
, TREE_OPERAND (arg1
, 0));
8453 return fold_build2_loc (loc
, icode
, type
, tem
,
8454 TREE_OPERAND (arg1
, 1));
8456 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8458 For sequence point consistancy, we need to check for trapping,
8459 and side-effects. */
8460 else if (code
== icode
&& simple_operand_p_2 (arg0
)
8461 && simple_operand_p_2 (arg1
))
8462 return fold_build2_loc (loc
, ncode
, type
, arg0
, arg1
);
8468 /* Fold a binary expression of code CODE and type TYPE with operands
8469 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8470 Return the folded expression if folding is successful. Otherwise,
8471 return NULL_TREE. */
8474 fold_minmax (location_t loc
, enum tree_code code
, tree type
, tree op0
, tree op1
)
8476 enum tree_code compl_code
;
8478 if (code
== MIN_EXPR
)
8479 compl_code
= MAX_EXPR
;
8480 else if (code
== MAX_EXPR
)
8481 compl_code
= MIN_EXPR
;
8485 /* MIN (MAX (a, b), b) == b. */
8486 if (TREE_CODE (op0
) == compl_code
8487 && operand_equal_p (TREE_OPERAND (op0
, 1), op1
, 0))
8488 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 0));
8490 /* MIN (MAX (b, a), b) == b. */
8491 if (TREE_CODE (op0
) == compl_code
8492 && operand_equal_p (TREE_OPERAND (op0
, 0), op1
, 0)
8493 && reorder_operands_p (TREE_OPERAND (op0
, 1), op1
))
8494 return omit_one_operand_loc (loc
, type
, op1
, TREE_OPERAND (op0
, 1));
8496 /* MIN (a, MAX (a, b)) == a. */
8497 if (TREE_CODE (op1
) == compl_code
8498 && operand_equal_p (op0
, TREE_OPERAND (op1
, 0), 0)
8499 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 1)))
8500 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 1));
8502 /* MIN (a, MAX (b, a)) == a. */
8503 if (TREE_CODE (op1
) == compl_code
8504 && operand_equal_p (op0
, TREE_OPERAND (op1
, 1), 0)
8505 && reorder_operands_p (op0
, TREE_OPERAND (op1
, 0)))
8506 return omit_one_operand_loc (loc
, type
, op0
, TREE_OPERAND (op1
, 0));
8511 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8512 by changing CODE to reduce the magnitude of constants involved in
8513 ARG0 of the comparison.
8514 Returns a canonicalized comparison tree if a simplification was
8515 possible, otherwise returns NULL_TREE.
8516 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8517 valid if signed overflow is undefined. */
8520 maybe_canonicalize_comparison_1 (location_t loc
, enum tree_code code
, tree type
,
8521 tree arg0
, tree arg1
,
8522 bool *strict_overflow_p
)
8524 enum tree_code code0
= TREE_CODE (arg0
);
8525 tree t
, cst0
= NULL_TREE
;
8529 /* Match A +- CST code arg1 and CST code arg1. We can change the
8530 first form only if overflow is undefined. */
8531 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
8532 /* In principle pointers also have undefined overflow behavior,
8533 but that causes problems elsewhere. */
8534 && !POINTER_TYPE_P (TREE_TYPE (arg0
))
8535 && (code0
== MINUS_EXPR
8536 || code0
== PLUS_EXPR
)
8537 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
8538 || code0
== INTEGER_CST
))
8541 /* Identify the constant in arg0 and its sign. */
8542 if (code0
== INTEGER_CST
)
8545 cst0
= TREE_OPERAND (arg0
, 1);
8546 sgn0
= tree_int_cst_sgn (cst0
);
8548 /* Overflowed constants and zero will cause problems. */
8549 if (integer_zerop (cst0
)
8550 || TREE_OVERFLOW (cst0
))
8553 /* See if we can reduce the magnitude of the constant in
8554 arg0 by changing the comparison code. */
8555 if (code0
== INTEGER_CST
)
8557 /* CST <= arg1 -> CST-1 < arg1. */
8558 if (code
== LE_EXPR
&& sgn0
== 1)
8560 /* -CST < arg1 -> -CST-1 <= arg1. */
8561 else if (code
== LT_EXPR
&& sgn0
== -1)
8563 /* CST > arg1 -> CST-1 >= arg1. */
8564 else if (code
== GT_EXPR
&& sgn0
== 1)
8566 /* -CST >= arg1 -> -CST-1 > arg1. */
8567 else if (code
== GE_EXPR
&& sgn0
== -1)
8571 /* arg1 code' CST' might be more canonical. */
8576 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8578 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8580 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8581 else if (code
== GT_EXPR
8582 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8584 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8585 else if (code
== LE_EXPR
8586 && code0
== ((sgn0
== -1) ? MINUS_EXPR
: PLUS_EXPR
))
8588 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8589 else if (code
== GE_EXPR
8590 && code0
== ((sgn0
== -1) ? PLUS_EXPR
: MINUS_EXPR
))
8594 *strict_overflow_p
= true;
8597 /* Now build the constant reduced in magnitude. But not if that
8598 would produce one outside of its types range. */
8599 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0
))
8601 && TYPE_MIN_VALUE (TREE_TYPE (cst0
))
8602 && tree_int_cst_equal (cst0
, TYPE_MIN_VALUE (TREE_TYPE (cst0
))))
8604 && TYPE_MAX_VALUE (TREE_TYPE (cst0
))
8605 && tree_int_cst_equal (cst0
, TYPE_MAX_VALUE (TREE_TYPE (cst0
))))))
8606 /* We cannot swap the comparison here as that would cause us to
8607 endlessly recurse. */
8610 t
= int_const_binop (sgn0
== -1 ? PLUS_EXPR
: MINUS_EXPR
,
8611 cst0
, build_int_cst (TREE_TYPE (cst0
), 1));
8612 if (code0
!= INTEGER_CST
)
8613 t
= fold_build2_loc (loc
, code0
, TREE_TYPE (arg0
), TREE_OPERAND (arg0
, 0), t
);
8614 t
= fold_convert (TREE_TYPE (arg1
), t
);
8616 /* If swapping might yield to a more canonical form, do so. */
8618 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, arg1
, t
);
8620 return fold_build2_loc (loc
, code
, type
, t
, arg1
);
8623 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8624 overflow further. Try to decrease the magnitude of constants involved
8625 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8626 and put sole constants at the second argument position.
8627 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8630 maybe_canonicalize_comparison (location_t loc
, enum tree_code code
, tree type
,
8631 tree arg0
, tree arg1
)
8634 bool strict_overflow_p
;
8635 const char * const warnmsg
= G_("assuming signed overflow does not occur "
8636 "when reducing constant in comparison");
8638 /* Try canonicalization by simplifying arg0. */
8639 strict_overflow_p
= false;
8640 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg0
, arg1
,
8641 &strict_overflow_p
);
8644 if (strict_overflow_p
)
8645 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8649 /* Try canonicalization by simplifying arg1 using the swapped
8651 code
= swap_tree_comparison (code
);
8652 strict_overflow_p
= false;
8653 t
= maybe_canonicalize_comparison_1 (loc
, code
, type
, arg1
, arg0
,
8654 &strict_overflow_p
);
8655 if (t
&& strict_overflow_p
)
8656 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_MAGNITUDE
);
8660 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8661 space. This is used to avoid issuing overflow warnings for
8662 expressions like &p->x which can not wrap. */
8665 pointer_may_wrap_p (tree base
, tree offset
, HOST_WIDE_INT bitpos
)
8667 unsigned HOST_WIDE_INT offset_low
, total_low
;
8668 HOST_WIDE_INT size
, offset_high
, total_high
;
8670 if (!POINTER_TYPE_P (TREE_TYPE (base
)))
8676 if (offset
== NULL_TREE
)
8681 else if (TREE_CODE (offset
) != INTEGER_CST
|| TREE_OVERFLOW (offset
))
8685 offset_low
= TREE_INT_CST_LOW (offset
);
8686 offset_high
= TREE_INT_CST_HIGH (offset
);
8689 if (add_double_with_sign (offset_low
, offset_high
,
8690 bitpos
/ BITS_PER_UNIT
, 0,
8691 &total_low
, &total_high
,
8695 if (total_high
!= 0)
8698 size
= int_size_in_bytes (TREE_TYPE (TREE_TYPE (base
)));
8702 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8704 if (TREE_CODE (base
) == ADDR_EXPR
)
8706 HOST_WIDE_INT base_size
;
8708 base_size
= int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base
, 0)));
8709 if (base_size
> 0 && size
< base_size
)
8713 return total_low
> (unsigned HOST_WIDE_INT
) size
;
8716 /* Subroutine of fold_binary. This routine performs all of the
8717 transformations that are common to the equality/inequality
8718 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8719 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8720 fold_binary should call fold_binary. Fold a comparison with
8721 tree code CODE and type TYPE with operands OP0 and OP1. Return
8722 the folded comparison or NULL_TREE. */
8725 fold_comparison (location_t loc
, enum tree_code code
, tree type
,
8728 tree arg0
, arg1
, tem
;
8733 STRIP_SIGN_NOPS (arg0
);
8734 STRIP_SIGN_NOPS (arg1
);
8736 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
8737 if (tem
!= NULL_TREE
)
8740 /* If one arg is a real or integer constant, put it last. */
8741 if (tree_swap_operands_p (arg0
, arg1
, true))
8742 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
, op1
, op0
);
8744 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8745 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
8746 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
8747 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
8748 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
8749 && (TREE_CODE (arg1
) == INTEGER_CST
8750 && !TREE_OVERFLOW (arg1
)))
8752 tree const1
= TREE_OPERAND (arg0
, 1);
8754 tree variable
= TREE_OPERAND (arg0
, 0);
8757 lhs_add
= TREE_CODE (arg0
) != PLUS_EXPR
;
8759 lhs
= fold_build2_loc (loc
, lhs_add
? PLUS_EXPR
: MINUS_EXPR
,
8760 TREE_TYPE (arg1
), const2
, const1
);
8762 /* If the constant operation overflowed this can be
8763 simplified as a comparison against INT_MAX/INT_MIN. */
8764 if (TREE_CODE (lhs
) == INTEGER_CST
8765 && TREE_OVERFLOW (lhs
))
8767 int const1_sgn
= tree_int_cst_sgn (const1
);
8768 enum tree_code code2
= code
;
8770 /* Get the sign of the constant on the lhs if the
8771 operation were VARIABLE + CONST1. */
8772 if (TREE_CODE (arg0
) == MINUS_EXPR
)
8773 const1_sgn
= -const1_sgn
;
8775 /* The sign of the constant determines if we overflowed
8776 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8777 Canonicalize to the INT_MIN overflow by swapping the comparison
8779 if (const1_sgn
== -1)
8780 code2
= swap_tree_comparison (code
);
8782 /* We now can look at the canonicalized case
8783 VARIABLE + 1 CODE2 INT_MIN
8784 and decide on the result. */
8785 if (code2
== LT_EXPR
8787 || code2
== EQ_EXPR
)
8788 return omit_one_operand_loc (loc
, type
, boolean_false_node
, variable
);
8789 else if (code2
== NE_EXPR
8791 || code2
== GT_EXPR
)
8792 return omit_one_operand_loc (loc
, type
, boolean_true_node
, variable
);
8795 if (TREE_CODE (lhs
) == TREE_CODE (arg1
)
8796 && (TREE_CODE (lhs
) != INTEGER_CST
8797 || !TREE_OVERFLOW (lhs
)))
8799 if (code
!= EQ_EXPR
&& code
!= NE_EXPR
)
8800 fold_overflow_warning ("assuming signed overflow does not occur "
8801 "when changing X +- C1 cmp C2 to "
8803 WARN_STRICT_OVERFLOW_COMPARISON
);
8804 return fold_build2_loc (loc
, code
, type
, variable
, lhs
);
8808 /* For comparisons of pointers we can decompose it to a compile time
8809 comparison of the base objects and the offsets into the object.
8810 This requires at least one operand being an ADDR_EXPR or a
8811 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8812 if (POINTER_TYPE_P (TREE_TYPE (arg0
))
8813 && (TREE_CODE (arg0
) == ADDR_EXPR
8814 || TREE_CODE (arg1
) == ADDR_EXPR
8815 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
8816 || TREE_CODE (arg1
) == POINTER_PLUS_EXPR
))
8818 tree base0
, base1
, offset0
= NULL_TREE
, offset1
= NULL_TREE
;
8819 HOST_WIDE_INT bitsize
, bitpos0
= 0, bitpos1
= 0;
8820 enum machine_mode mode
;
8821 int volatilep
, unsignedp
;
8822 bool indirect_base0
= false, indirect_base1
= false;
8824 /* Get base and offset for the access. Strip ADDR_EXPR for
8825 get_inner_reference, but put it back by stripping INDIRECT_REF
8826 off the base object if possible. indirect_baseN will be true
8827 if baseN is not an address but refers to the object itself. */
8829 if (TREE_CODE (arg0
) == ADDR_EXPR
)
8831 base0
= get_inner_reference (TREE_OPERAND (arg0
, 0),
8832 &bitsize
, &bitpos0
, &offset0
, &mode
,
8833 &unsignedp
, &volatilep
, false);
8834 if (TREE_CODE (base0
) == INDIRECT_REF
)
8835 base0
= TREE_OPERAND (base0
, 0);
8837 indirect_base0
= true;
8839 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
8841 base0
= TREE_OPERAND (arg0
, 0);
8842 STRIP_SIGN_NOPS (base0
);
8843 if (TREE_CODE (base0
) == ADDR_EXPR
)
8845 base0
= TREE_OPERAND (base0
, 0);
8846 indirect_base0
= true;
8848 offset0
= TREE_OPERAND (arg0
, 1);
8852 if (TREE_CODE (arg1
) == ADDR_EXPR
)
8854 base1
= get_inner_reference (TREE_OPERAND (arg1
, 0),
8855 &bitsize
, &bitpos1
, &offset1
, &mode
,
8856 &unsignedp
, &volatilep
, false);
8857 if (TREE_CODE (base1
) == INDIRECT_REF
)
8858 base1
= TREE_OPERAND (base1
, 0);
8860 indirect_base1
= true;
8862 else if (TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
8864 base1
= TREE_OPERAND (arg1
, 0);
8865 STRIP_SIGN_NOPS (base1
);
8866 if (TREE_CODE (base1
) == ADDR_EXPR
)
8868 base1
= TREE_OPERAND (base1
, 0);
8869 indirect_base1
= true;
8871 offset1
= TREE_OPERAND (arg1
, 1);
8874 /* A local variable can never be pointed to by
8875 the default SSA name of an incoming parameter. */
8876 if ((TREE_CODE (arg0
) == ADDR_EXPR
8878 && TREE_CODE (base0
) == VAR_DECL
8879 && auto_var_in_fn_p (base0
, current_function_decl
)
8881 && TREE_CODE (base1
) == SSA_NAME
8882 && TREE_CODE (SSA_NAME_VAR (base1
)) == PARM_DECL
8883 && SSA_NAME_IS_DEFAULT_DEF (base1
))
8884 || (TREE_CODE (arg1
) == ADDR_EXPR
8886 && TREE_CODE (base1
) == VAR_DECL
8887 && auto_var_in_fn_p (base1
, current_function_decl
)
8889 && TREE_CODE (base0
) == SSA_NAME
8890 && TREE_CODE (SSA_NAME_VAR (base0
)) == PARM_DECL
8891 && SSA_NAME_IS_DEFAULT_DEF (base0
)))
8893 if (code
== NE_EXPR
)
8894 return constant_boolean_node (1, type
);
8895 else if (code
== EQ_EXPR
)
8896 return constant_boolean_node (0, type
);
8898 /* If we have equivalent bases we might be able to simplify. */
8899 else if (indirect_base0
== indirect_base1
8900 && operand_equal_p (base0
, base1
, 0))
8902 /* We can fold this expression to a constant if the non-constant
8903 offset parts are equal. */
8904 if ((offset0
== offset1
8905 || (offset0
&& offset1
8906 && operand_equal_p (offset0
, offset1
, 0)))
8909 || (indirect_base0
&& DECL_P (base0
))
8910 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8915 && bitpos0
!= bitpos1
8916 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8917 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8918 fold_overflow_warning (("assuming pointer wraparound does not "
8919 "occur when comparing P +- C1 with "
8921 WARN_STRICT_OVERFLOW_CONDITIONAL
);
8926 return constant_boolean_node (bitpos0
== bitpos1
, type
);
8928 return constant_boolean_node (bitpos0
!= bitpos1
, type
);
8930 return constant_boolean_node (bitpos0
< bitpos1
, type
);
8932 return constant_boolean_node (bitpos0
<= bitpos1
, type
);
8934 return constant_boolean_node (bitpos0
>= bitpos1
, type
);
8936 return constant_boolean_node (bitpos0
> bitpos1
, type
);
8940 /* We can simplify the comparison to a comparison of the variable
8941 offset parts if the constant offset parts are equal.
8942 Be careful to use signed size type here because otherwise we
8943 mess with array offsets in the wrong way. This is possible
8944 because pointer arithmetic is restricted to retain within an
8945 object and overflow on pointer differences is undefined as of
8946 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8947 else if (bitpos0
== bitpos1
8948 && ((code
== EQ_EXPR
|| code
== NE_EXPR
)
8949 || (indirect_base0
&& DECL_P (base0
))
8950 || POINTER_TYPE_OVERFLOW_UNDEFINED
))
8952 /* By converting to signed size type we cover middle-end pointer
8953 arithmetic which operates on unsigned pointer types of size
8954 type size and ARRAY_REF offsets which are properly sign or
8955 zero extended from their type in case it is narrower than
8957 if (offset0
== NULL_TREE
)
8958 offset0
= build_int_cst (ssizetype
, 0);
8960 offset0
= fold_convert_loc (loc
, ssizetype
, offset0
);
8961 if (offset1
== NULL_TREE
)
8962 offset1
= build_int_cst (ssizetype
, 0);
8964 offset1
= fold_convert_loc (loc
, ssizetype
, offset1
);
8968 && (pointer_may_wrap_p (base0
, offset0
, bitpos0
)
8969 || pointer_may_wrap_p (base1
, offset1
, bitpos1
)))
8970 fold_overflow_warning (("assuming pointer wraparound does not "
8971 "occur when comparing P +- C1 with "
8973 WARN_STRICT_OVERFLOW_COMPARISON
);
8975 return fold_build2_loc (loc
, code
, type
, offset0
, offset1
);
8978 /* For non-equal bases we can simplify if they are addresses
8979 of local binding decls or constants. */
8980 else if (indirect_base0
&& indirect_base1
8981 /* We know that !operand_equal_p (base0, base1, 0)
8982 because the if condition was false. But make
8983 sure two decls are not the same. */
8985 && TREE_CODE (arg0
) == ADDR_EXPR
8986 && TREE_CODE (arg1
) == ADDR_EXPR
8987 && (((TREE_CODE (base0
) == VAR_DECL
8988 || TREE_CODE (base0
) == PARM_DECL
)
8989 && (targetm
.binds_local_p (base0
)
8990 || CONSTANT_CLASS_P (base1
)))
8991 || CONSTANT_CLASS_P (base0
))
8992 && (((TREE_CODE (base1
) == VAR_DECL
8993 || TREE_CODE (base1
) == PARM_DECL
)
8994 && (targetm
.binds_local_p (base1
)
8995 || CONSTANT_CLASS_P (base0
)))
8996 || CONSTANT_CLASS_P (base1
)))
8998 if (code
== EQ_EXPR
)
8999 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
9001 else if (code
== NE_EXPR
)
9002 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
9005 /* For equal offsets we can simplify to a comparison of the
9007 else if (bitpos0
== bitpos1
9009 ? base0
!= TREE_OPERAND (arg0
, 0) : base0
!= arg0
)
9011 ? base1
!= TREE_OPERAND (arg1
, 0) : base1
!= arg1
)
9012 && ((offset0
== offset1
)
9013 || (offset0
&& offset1
9014 && operand_equal_p (offset0
, offset1
, 0))))
9017 base0
= build_fold_addr_expr_loc (loc
, base0
);
9019 base1
= build_fold_addr_expr_loc (loc
, base1
);
9020 return fold_build2_loc (loc
, code
, type
, base0
, base1
);
9024 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9025 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9026 the resulting offset is smaller in absolute value than the
9028 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9029 && (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
9030 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9031 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9032 && (TREE_CODE (arg1
) == PLUS_EXPR
|| TREE_CODE (arg1
) == MINUS_EXPR
)
9033 && (TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9034 && !TREE_OVERFLOW (TREE_OPERAND (arg1
, 1))))
9036 tree const1
= TREE_OPERAND (arg0
, 1);
9037 tree const2
= TREE_OPERAND (arg1
, 1);
9038 tree variable1
= TREE_OPERAND (arg0
, 0);
9039 tree variable2
= TREE_OPERAND (arg1
, 0);
9041 const char * const warnmsg
= G_("assuming signed overflow does not "
9042 "occur when combining constants around "
9045 /* Put the constant on the side where it doesn't overflow and is
9046 of lower absolute value than before. */
9047 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9048 ? MINUS_EXPR
: PLUS_EXPR
,
9050 if (!TREE_OVERFLOW (cst
)
9051 && tree_int_cst_compare (const2
, cst
) == tree_int_cst_sgn (const2
))
9053 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9054 return fold_build2_loc (loc
, code
, type
,
9056 fold_build2_loc (loc
,
9057 TREE_CODE (arg1
), TREE_TYPE (arg1
),
9061 cst
= int_const_binop (TREE_CODE (arg0
) == TREE_CODE (arg1
)
9062 ? MINUS_EXPR
: PLUS_EXPR
,
9064 if (!TREE_OVERFLOW (cst
)
9065 && tree_int_cst_compare (const1
, cst
) == tree_int_cst_sgn (const1
))
9067 fold_overflow_warning (warnmsg
, WARN_STRICT_OVERFLOW_COMPARISON
);
9068 return fold_build2_loc (loc
, code
, type
,
9069 fold_build2_loc (loc
, TREE_CODE (arg0
), TREE_TYPE (arg0
),
9075 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9076 signed arithmetic case. That form is created by the compiler
9077 often enough for folding it to be of value. One example is in
9078 computing loop trip counts after Operator Strength Reduction. */
9079 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0
))
9080 && TREE_CODE (arg0
) == MULT_EXPR
9081 && (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9082 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1)))
9083 && integer_zerop (arg1
))
9085 tree const1
= TREE_OPERAND (arg0
, 1);
9086 tree const2
= arg1
; /* zero */
9087 tree variable1
= TREE_OPERAND (arg0
, 0);
9088 enum tree_code cmp_code
= code
;
9090 /* Handle unfolded multiplication by zero. */
9091 if (integer_zerop (const1
))
9092 return fold_build2_loc (loc
, cmp_code
, type
, const1
, const2
);
9094 fold_overflow_warning (("assuming signed overflow does not occur when "
9095 "eliminating multiplication in comparison "
9097 WARN_STRICT_OVERFLOW_COMPARISON
);
9099 /* If const1 is negative we swap the sense of the comparison. */
9100 if (tree_int_cst_sgn (const1
) < 0)
9101 cmp_code
= swap_tree_comparison (cmp_code
);
9103 return fold_build2_loc (loc
, cmp_code
, type
, variable1
, const2
);
9106 tem
= maybe_canonicalize_comparison (loc
, code
, type
, arg0
, arg1
);
9110 if (FLOAT_TYPE_P (TREE_TYPE (arg0
)))
9112 tree targ0
= strip_float_extensions (arg0
);
9113 tree targ1
= strip_float_extensions (arg1
);
9114 tree newtype
= TREE_TYPE (targ0
);
9116 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
9117 newtype
= TREE_TYPE (targ1
);
9119 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9120 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
9121 return fold_build2_loc (loc
, code
, type
,
9122 fold_convert_loc (loc
, newtype
, targ0
),
9123 fold_convert_loc (loc
, newtype
, targ1
));
9125 /* (-a) CMP (-b) -> b CMP a */
9126 if (TREE_CODE (arg0
) == NEGATE_EXPR
9127 && TREE_CODE (arg1
) == NEGATE_EXPR
)
9128 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg1
, 0),
9129 TREE_OPERAND (arg0
, 0));
9131 if (TREE_CODE (arg1
) == REAL_CST
)
9133 REAL_VALUE_TYPE cst
;
9134 cst
= TREE_REAL_CST (arg1
);
9136 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9137 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
9138 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9139 TREE_OPERAND (arg0
, 0),
9140 build_real (TREE_TYPE (arg1
),
9141 real_value_negate (&cst
)));
9143 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9144 /* a CMP (-0) -> a CMP 0 */
9145 if (REAL_VALUE_MINUS_ZERO (cst
))
9146 return fold_build2_loc (loc
, code
, type
, arg0
,
9147 build_real (TREE_TYPE (arg1
), dconst0
));
9149 /* x != NaN is always true, other ops are always false. */
9150 if (REAL_VALUE_ISNAN (cst
)
9151 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
))))
9153 tem
= (code
== NE_EXPR
) ? integer_one_node
: integer_zero_node
;
9154 return omit_one_operand_loc (loc
, type
, tem
, arg0
);
9157 /* Fold comparisons against infinity. */
9158 if (REAL_VALUE_ISINF (cst
)
9159 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
))))
9161 tem
= fold_inf_compare (loc
, code
, type
, arg0
, arg1
);
9162 if (tem
!= NULL_TREE
)
9167 /* If this is a comparison of a real constant with a PLUS_EXPR
9168 or a MINUS_EXPR of a real constant, we can convert it into a
9169 comparison with a revised real constant as long as no overflow
9170 occurs when unsafe_math_optimizations are enabled. */
9171 if (flag_unsafe_math_optimizations
9172 && TREE_CODE (arg1
) == REAL_CST
9173 && (TREE_CODE (arg0
) == PLUS_EXPR
9174 || TREE_CODE (arg0
) == MINUS_EXPR
)
9175 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
9176 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
9177 ? MINUS_EXPR
: PLUS_EXPR
,
9178 arg1
, TREE_OPERAND (arg0
, 1)))
9179 && !TREE_OVERFLOW (tem
))
9180 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
9182 /* Likewise, we can simplify a comparison of a real constant with
9183 a MINUS_EXPR whose first operand is also a real constant, i.e.
9184 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9185 floating-point types only if -fassociative-math is set. */
9186 if (flag_associative_math
9187 && TREE_CODE (arg1
) == REAL_CST
9188 && TREE_CODE (arg0
) == MINUS_EXPR
9189 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
9190 && 0 != (tem
= const_binop (MINUS_EXPR
, TREE_OPERAND (arg0
, 0),
9192 && !TREE_OVERFLOW (tem
))
9193 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9194 TREE_OPERAND (arg0
, 1), tem
);
9196 /* Fold comparisons against built-in math functions. */
9197 if (TREE_CODE (arg1
) == REAL_CST
9198 && flag_unsafe_math_optimizations
9199 && ! flag_errno_math
)
9201 enum built_in_function fcode
= builtin_mathfn_code (arg0
);
9203 if (fcode
!= END_BUILTINS
)
9205 tem
= fold_mathfn_compare (loc
, fcode
, code
, type
, arg0
, arg1
);
9206 if (tem
!= NULL_TREE
)
9212 if (TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
9213 && CONVERT_EXPR_P (arg0
))
9215 /* If we are widening one operand of an integer comparison,
9216 see if the other operand is similarly being widened. Perhaps we
9217 can do the comparison in the narrower type. */
9218 tem
= fold_widened_comparison (loc
, code
, type
, arg0
, arg1
);
9222 /* Or if we are changing signedness. */
9223 tem
= fold_sign_changed_comparison (loc
, code
, type
, arg0
, arg1
);
9228 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9229 constant, we can simplify it. */
9230 if (TREE_CODE (arg1
) == INTEGER_CST
9231 && (TREE_CODE (arg0
) == MIN_EXPR
9232 || TREE_CODE (arg0
) == MAX_EXPR
)
9233 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
9235 tem
= optimize_minmax_comparison (loc
, code
, type
, op0
, op1
);
9240 /* Simplify comparison of something with itself. (For IEEE
9241 floating-point, we can only do some of these simplifications.) */
9242 if (operand_equal_p (arg0
, arg1
, 0))
9247 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9248 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9249 return constant_boolean_node (1, type
);
9254 if (! FLOAT_TYPE_P (TREE_TYPE (arg0
))
9255 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9256 return constant_boolean_node (1, type
);
9257 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
, arg1
);
9260 /* For NE, we can only do this simplification if integer
9261 or we don't honor IEEE floating point NaNs. */
9262 if (FLOAT_TYPE_P (TREE_TYPE (arg0
))
9263 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9265 /* ... fall through ... */
9268 return constant_boolean_node (0, type
);
9274 /* If we are comparing an expression that just has comparisons
9275 of two integer values, arithmetic expressions of those comparisons,
9276 and constants, we can simplify it. There are only three cases
9277 to check: the two values can either be equal, the first can be
9278 greater, or the second can be greater. Fold the expression for
9279 those three values. Since each value must be 0 or 1, we have
9280 eight possibilities, each of which corresponds to the constant 0
9281 or 1 or one of the six possible comparisons.
9283 This handles common cases like (a > b) == 0 but also handles
9284 expressions like ((x > y) - (y > x)) > 0, which supposedly
9285 occur in macroized code. */
9287 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
9289 tree cval1
= 0, cval2
= 0;
9292 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
9293 /* Don't handle degenerate cases here; they should already
9294 have been handled anyway. */
9295 && cval1
!= 0 && cval2
!= 0
9296 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
9297 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
9298 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
9299 && TYPE_MAX_VALUE (TREE_TYPE (cval1
))
9300 && TYPE_MAX_VALUE (TREE_TYPE (cval2
))
9301 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
9302 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
9304 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
9305 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
9307 /* We can't just pass T to eval_subst in case cval1 or cval2
9308 was the same as ARG1. */
9311 = fold_build2_loc (loc
, code
, type
,
9312 eval_subst (loc
, arg0
, cval1
, maxval
,
9316 = fold_build2_loc (loc
, code
, type
,
9317 eval_subst (loc
, arg0
, cval1
, maxval
,
9321 = fold_build2_loc (loc
, code
, type
,
9322 eval_subst (loc
, arg0
, cval1
, minval
,
9326 /* All three of these results should be 0 or 1. Confirm they are.
9327 Then use those values to select the proper code to use. */
9329 if (TREE_CODE (high_result
) == INTEGER_CST
9330 && TREE_CODE (equal_result
) == INTEGER_CST
9331 && TREE_CODE (low_result
) == INTEGER_CST
)
9333 /* Make a 3-bit mask with the high-order bit being the
9334 value for `>', the next for '=', and the low for '<'. */
9335 switch ((integer_onep (high_result
) * 4)
9336 + (integer_onep (equal_result
) * 2)
9337 + integer_onep (low_result
))
9341 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
9362 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
9367 tem
= save_expr (build2 (code
, type
, cval1
, cval2
));
9368 SET_EXPR_LOCATION (tem
, loc
);
9371 return fold_build2_loc (loc
, code
, type
, cval1
, cval2
);
9376 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9377 into a single range test. */
9378 if ((TREE_CODE (arg0
) == TRUNC_DIV_EXPR
9379 || TREE_CODE (arg0
) == EXACT_DIV_EXPR
)
9380 && TREE_CODE (arg1
) == INTEGER_CST
9381 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9382 && !integer_zerop (TREE_OPERAND (arg0
, 1))
9383 && !TREE_OVERFLOW (TREE_OPERAND (arg0
, 1))
9384 && !TREE_OVERFLOW (arg1
))
9386 tem
= fold_div_compare (loc
, code
, type
, arg0
, arg1
);
9387 if (tem
!= NULL_TREE
)
9391 /* Fold ~X op ~Y as Y op X. */
9392 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9393 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
9395 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9396 return fold_build2_loc (loc
, code
, type
,
9397 fold_convert_loc (loc
, cmp_type
,
9398 TREE_OPERAND (arg1
, 0)),
9399 TREE_OPERAND (arg0
, 0));
9402 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9403 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9404 && TREE_CODE (arg1
) == INTEGER_CST
)
9406 tree cmp_type
= TREE_TYPE (TREE_OPERAND (arg0
, 0));
9407 return fold_build2_loc (loc
, swap_tree_comparison (code
), type
,
9408 TREE_OPERAND (arg0
, 0),
9409 fold_build1_loc (loc
, BIT_NOT_EXPR
, cmp_type
,
9410 fold_convert_loc (loc
, cmp_type
, arg1
)));
9417 /* Subroutine of fold_binary. Optimize complex multiplications of the
9418 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9419 argument EXPR represents the expression "z" of type TYPE. */
9422 fold_mult_zconjz (location_t loc
, tree type
, tree expr
)
9424 tree itype
= TREE_TYPE (type
);
9425 tree rpart
, ipart
, tem
;
9427 if (TREE_CODE (expr
) == COMPLEX_EXPR
)
9429 rpart
= TREE_OPERAND (expr
, 0);
9430 ipart
= TREE_OPERAND (expr
, 1);
9432 else if (TREE_CODE (expr
) == COMPLEX_CST
)
9434 rpart
= TREE_REALPART (expr
);
9435 ipart
= TREE_IMAGPART (expr
);
9439 expr
= save_expr (expr
);
9440 rpart
= fold_build1_loc (loc
, REALPART_EXPR
, itype
, expr
);
9441 ipart
= fold_build1_loc (loc
, IMAGPART_EXPR
, itype
, expr
);
9444 rpart
= save_expr (rpart
);
9445 ipart
= save_expr (ipart
);
9446 tem
= fold_build2_loc (loc
, PLUS_EXPR
, itype
,
9447 fold_build2_loc (loc
, MULT_EXPR
, itype
, rpart
, rpart
),
9448 fold_build2_loc (loc
, MULT_EXPR
, itype
, ipart
, ipart
));
9449 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, tem
,
9450 build_zero_cst (itype
));
9454 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9455 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9456 guarantees that P and N have the same least significant log2(M) bits.
9457 N is not otherwise constrained. In particular, N is not normalized to
9458 0 <= N < M as is common. In general, the precise value of P is unknown.
9459 M is chosen as large as possible such that constant N can be determined.
9461 Returns M and sets *RESIDUE to N.
9463 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9464 account. This is not always possible due to PR 35705.
9467 static unsigned HOST_WIDE_INT
9468 get_pointer_modulus_and_residue (tree expr
, unsigned HOST_WIDE_INT
*residue
,
9469 bool allow_func_align
)
9471 enum tree_code code
;
9475 code
= TREE_CODE (expr
);
9476 if (code
== ADDR_EXPR
)
9478 unsigned int bitalign
;
9479 bitalign
= get_object_alignment_1 (TREE_OPERAND (expr
, 0), residue
);
9480 *residue
/= BITS_PER_UNIT
;
9481 return bitalign
/ BITS_PER_UNIT
;
9483 else if (code
== POINTER_PLUS_EXPR
)
9486 unsigned HOST_WIDE_INT modulus
;
9487 enum tree_code inner_code
;
9489 op0
= TREE_OPERAND (expr
, 0);
9491 modulus
= get_pointer_modulus_and_residue (op0
, residue
,
9494 op1
= TREE_OPERAND (expr
, 1);
9496 inner_code
= TREE_CODE (op1
);
9497 if (inner_code
== INTEGER_CST
)
9499 *residue
+= TREE_INT_CST_LOW (op1
);
9502 else if (inner_code
== MULT_EXPR
)
9504 op1
= TREE_OPERAND (op1
, 1);
9505 if (TREE_CODE (op1
) == INTEGER_CST
)
9507 unsigned HOST_WIDE_INT align
;
9509 /* Compute the greatest power-of-2 divisor of op1. */
9510 align
= TREE_INT_CST_LOW (op1
);
9513 /* If align is non-zero and less than *modulus, replace
9514 *modulus with align., If align is 0, then either op1 is 0
9515 or the greatest power-of-2 divisor of op1 doesn't fit in an
9516 unsigned HOST_WIDE_INT. In either case, no additional
9517 constraint is imposed. */
9519 modulus
= MIN (modulus
, align
);
9526 /* If we get here, we were unable to determine anything useful about the
9531 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9532 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9535 vec_cst_ctor_to_array (tree arg
, tree
*elts
)
9537 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg
)), i
;
9539 if (TREE_CODE (arg
) == VECTOR_CST
)
9543 for (i
= 0, t
= TREE_VECTOR_CST_ELTS (arg
);
9544 i
< nelts
&& t
; i
++, t
= TREE_CHAIN (t
))
9545 elts
[i
] = TREE_VALUE (t
);
9549 else if (TREE_CODE (arg
) == CONSTRUCTOR
)
9551 constructor_elt
*elt
;
9553 FOR_EACH_VEC_ELT (constructor_elt
, CONSTRUCTOR_ELTS (arg
), i
, elt
)
9557 elts
[i
] = elt
->value
;
9561 for (; i
< nelts
; i
++)
9563 = fold_convert (TREE_TYPE (TREE_TYPE (arg
)), integer_zero_node
);
9567 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9568 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9569 NULL_TREE otherwise. */
9572 fold_vec_perm (tree type
, tree arg0
, tree arg1
, const unsigned char *sel
)
9574 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
9576 bool need_ctor
= false;
9578 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)) == nelts
9579 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1
)) == nelts
);
9580 if (TREE_TYPE (TREE_TYPE (arg0
)) != TREE_TYPE (type
)
9581 || TREE_TYPE (TREE_TYPE (arg1
)) != TREE_TYPE (type
))
9584 elts
= XALLOCAVEC (tree
, nelts
* 3);
9585 if (!vec_cst_ctor_to_array (arg0
, elts
)
9586 || !vec_cst_ctor_to_array (arg1
, elts
+ nelts
))
9589 for (i
= 0; i
< nelts
; i
++)
9591 if (!CONSTANT_CLASS_P (elts
[sel
[i
]]))
9593 elts
[i
+ 2 * nelts
] = unshare_expr (elts
[sel
[i
]]);
9598 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, nelts
);
9599 for (i
= 0; i
< nelts
; i
++)
9600 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[2 * nelts
+ i
]);
9601 return build_constructor (type
, v
);
9605 tree vals
= NULL_TREE
;
9606 for (i
= 0; i
< nelts
; i
++)
9607 vals
= tree_cons (NULL_TREE
, elts
[3 * nelts
- i
- 1], vals
);
9608 return build_vector (type
, vals
);
9612 /* Fold a binary expression of code CODE and type TYPE with operands
9613 OP0 and OP1. LOC is the location of the resulting expression.
9614 Return the folded expression if folding is successful. Otherwise,
9615 return NULL_TREE. */
9618 fold_binary_loc (location_t loc
,
9619 enum tree_code code
, tree type
, tree op0
, tree op1
)
9621 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
9622 tree arg0
, arg1
, tem
;
9623 tree t1
= NULL_TREE
;
9624 bool strict_overflow_p
;
9626 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
9627 && TREE_CODE_LENGTH (code
) == 2
9629 && op1
!= NULL_TREE
);
9634 /* Strip any conversions that don't change the mode. This is
9635 safe for every expression, except for a comparison expression
9636 because its signedness is derived from its operands. So, in
9637 the latter case, only strip conversions that don't change the
9638 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9641 Note that this is done as an internal manipulation within the
9642 constant folder, in order to find the simplest representation
9643 of the arguments so that their form can be studied. In any
9644 cases, the appropriate type conversions should be put back in
9645 the tree that will get out of the constant folder. */
9647 if (kind
== tcc_comparison
|| code
== MIN_EXPR
|| code
== MAX_EXPR
)
9649 STRIP_SIGN_NOPS (arg0
);
9650 STRIP_SIGN_NOPS (arg1
);
9658 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9659 constant but we can't do arithmetic on them. */
9660 if ((TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9661 || (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
9662 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == FIXED_CST
)
9663 || (TREE_CODE (arg0
) == FIXED_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9664 || (TREE_CODE (arg0
) == COMPLEX_CST
&& TREE_CODE (arg1
) == COMPLEX_CST
)
9665 || (TREE_CODE (arg0
) == VECTOR_CST
&& TREE_CODE (arg1
) == VECTOR_CST
))
9667 if (kind
== tcc_binary
)
9669 /* Make sure type and arg0 have the same saturating flag. */
9670 gcc_assert (TYPE_SATURATING (type
)
9671 == TYPE_SATURATING (TREE_TYPE (arg0
)));
9672 tem
= const_binop (code
, arg0
, arg1
);
9674 else if (kind
== tcc_comparison
)
9675 tem
= fold_relational_const (code
, type
, arg0
, arg1
);
9679 if (tem
!= NULL_TREE
)
9681 if (TREE_TYPE (tem
) != type
)
9682 tem
= fold_convert_loc (loc
, type
, tem
);
9687 /* If this is a commutative operation, and ARG0 is a constant, move it
9688 to ARG1 to reduce the number of tests below. */
9689 if (commutative_tree_code (code
)
9690 && tree_swap_operands_p (arg0
, arg1
, true))
9691 return fold_build2_loc (loc
, code
, type
, op1
, op0
);
9693 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9695 First check for cases where an arithmetic operation is applied to a
9696 compound, conditional, or comparison operation. Push the arithmetic
9697 operation inside the compound or conditional to see if any folding
9698 can then be done. Convert comparison to conditional for this purpose.
9699 The also optimizes non-constant cases that used to be done in
9702 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9703 one of the operands is a comparison and the other is a comparison, a
9704 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9705 code below would make the expression more complex. Change it to a
9706 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9707 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9709 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
9710 || code
== EQ_EXPR
|| code
== NE_EXPR
)
9711 && ((truth_value_p (TREE_CODE (arg0
))
9712 && (truth_value_p (TREE_CODE (arg1
))
9713 || (TREE_CODE (arg1
) == BIT_AND_EXPR
9714 && integer_onep (TREE_OPERAND (arg1
, 1)))))
9715 || (truth_value_p (TREE_CODE (arg1
))
9716 && (truth_value_p (TREE_CODE (arg0
))
9717 || (TREE_CODE (arg0
) == BIT_AND_EXPR
9718 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
9720 tem
= fold_build2_loc (loc
, code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
9721 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
9724 fold_convert_loc (loc
, boolean_type_node
, arg0
),
9725 fold_convert_loc (loc
, boolean_type_node
, arg1
));
9727 if (code
== EQ_EXPR
)
9728 tem
= invert_truthvalue_loc (loc
, tem
);
9730 return fold_convert_loc (loc
, type
, tem
);
9733 if (TREE_CODE_CLASS (code
) == tcc_binary
9734 || TREE_CODE_CLASS (code
) == tcc_comparison
)
9736 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
9738 tem
= fold_build2_loc (loc
, code
, type
,
9739 fold_convert_loc (loc
, TREE_TYPE (op0
),
9740 TREE_OPERAND (arg0
, 1)), op1
);
9741 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
9744 if (TREE_CODE (arg1
) == COMPOUND_EXPR
9745 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
9747 tem
= fold_build2_loc (loc
, code
, type
, op0
,
9748 fold_convert_loc (loc
, TREE_TYPE (op1
),
9749 TREE_OPERAND (arg1
, 1)));
9750 return build2_loc (loc
, COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
9754 if (TREE_CODE (arg0
) == COND_EXPR
|| COMPARISON_CLASS_P (arg0
))
9756 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9758 /*cond_first_p=*/1);
9759 if (tem
!= NULL_TREE
)
9763 if (TREE_CODE (arg1
) == COND_EXPR
|| COMPARISON_CLASS_P (arg1
))
9765 tem
= fold_binary_op_with_conditional_arg (loc
, code
, type
, op0
, op1
,
9767 /*cond_first_p=*/0);
9768 if (tem
!= NULL_TREE
)
9776 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9777 if (TREE_CODE (arg0
) == ADDR_EXPR
9778 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == MEM_REF
)
9780 tree iref
= TREE_OPERAND (arg0
, 0);
9781 return fold_build2 (MEM_REF
, type
,
9782 TREE_OPERAND (iref
, 0),
9783 int_const_binop (PLUS_EXPR
, arg1
,
9784 TREE_OPERAND (iref
, 1)));
9787 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9788 if (TREE_CODE (arg0
) == ADDR_EXPR
9789 && handled_component_p (TREE_OPERAND (arg0
, 0)))
9792 HOST_WIDE_INT coffset
;
9793 base
= get_addr_base_and_unit_offset (TREE_OPERAND (arg0
, 0),
9797 return fold_build2 (MEM_REF
, type
,
9798 build_fold_addr_expr (base
),
9799 int_const_binop (PLUS_EXPR
, arg1
,
9800 size_int (coffset
)));
9805 case POINTER_PLUS_EXPR
:
9806 /* 0 +p index -> (type)index */
9807 if (integer_zerop (arg0
))
9808 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
9810 /* PTR +p 0 -> PTR */
9811 if (integer_zerop (arg1
))
9812 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9814 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9815 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
9816 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
9817 return fold_convert_loc (loc
, type
,
9818 fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9819 fold_convert_loc (loc
, sizetype
,
9821 fold_convert_loc (loc
, sizetype
,
9824 /* (PTR +p B) +p A -> PTR +p (B + A) */
9825 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
9828 tree arg01
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (arg0
, 1));
9829 tree arg00
= TREE_OPERAND (arg0
, 0);
9830 inner
= fold_build2_loc (loc
, PLUS_EXPR
, sizetype
,
9831 arg01
, fold_convert_loc (loc
, sizetype
, arg1
));
9832 return fold_convert_loc (loc
, type
,
9833 fold_build_pointer_plus_loc (loc
,
9837 /* PTR_CST +p CST -> CST1 */
9838 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
9839 return fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
,
9840 fold_convert_loc (loc
, type
, arg1
));
9842 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9843 of the array. Loop optimizer sometimes produce this type of
9845 if (TREE_CODE (arg0
) == ADDR_EXPR
)
9847 tem
= try_move_mult_to_index (loc
, arg0
,
9848 fold_convert_loc (loc
, sizetype
, arg1
));
9850 return fold_convert_loc (loc
, type
, tem
);
9856 /* A + (-B) -> A - B */
9857 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
9858 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9859 fold_convert_loc (loc
, type
, arg0
),
9860 fold_convert_loc (loc
, type
,
9861 TREE_OPERAND (arg1
, 0)));
9862 /* (-A) + B -> B - A */
9863 if (TREE_CODE (arg0
) == NEGATE_EXPR
9864 && reorder_operands_p (TREE_OPERAND (arg0
, 0), arg1
))
9865 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
9866 fold_convert_loc (loc
, type
, arg1
),
9867 fold_convert_loc (loc
, type
,
9868 TREE_OPERAND (arg0
, 0)));
9870 if (INTEGRAL_TYPE_P (type
))
9872 /* Convert ~A + 1 to -A. */
9873 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9874 && integer_onep (arg1
))
9875 return fold_build1_loc (loc
, NEGATE_EXPR
, type
,
9876 fold_convert_loc (loc
, type
,
9877 TREE_OPERAND (arg0
, 0)));
9880 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
9881 && !TYPE_OVERFLOW_TRAPS (type
))
9883 tree tem
= TREE_OPERAND (arg0
, 0);
9886 if (operand_equal_p (tem
, arg1
, 0))
9888 t1
= build_int_cst_type (type
, -1);
9889 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
9894 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
9895 && !TYPE_OVERFLOW_TRAPS (type
))
9897 tree tem
= TREE_OPERAND (arg1
, 0);
9900 if (operand_equal_p (arg0
, tem
, 0))
9902 t1
= build_int_cst_type (type
, -1);
9903 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
9907 /* X + (X / CST) * -CST is X % CST. */
9908 if (TREE_CODE (arg1
) == MULT_EXPR
9909 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
9910 && operand_equal_p (arg0
,
9911 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0))
9913 tree cst0
= TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1);
9914 tree cst1
= TREE_OPERAND (arg1
, 1);
9915 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (cst1
),
9917 if (sum
&& integer_zerop (sum
))
9918 return fold_convert_loc (loc
, type
,
9919 fold_build2_loc (loc
, TRUNC_MOD_EXPR
,
9920 TREE_TYPE (arg0
), arg0
,
9925 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9926 same or one. Make sure type is not saturating.
9927 fold_plusminus_mult_expr will re-associate. */
9928 if ((TREE_CODE (arg0
) == MULT_EXPR
9929 || TREE_CODE (arg1
) == MULT_EXPR
)
9930 && !TYPE_SATURATING (type
)
9931 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
9933 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
9938 if (! FLOAT_TYPE_P (type
))
9940 if (integer_zerop (arg1
))
9941 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
9943 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9944 with a constant, and the two constants have no bits in common,
9945 we should treat this as a BIT_IOR_EXPR since this may produce more
9947 if (TREE_CODE (arg0
) == BIT_AND_EXPR
9948 && TREE_CODE (arg1
) == BIT_AND_EXPR
9949 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
9950 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
9951 && integer_zerop (const_binop (BIT_AND_EXPR
,
9952 TREE_OPERAND (arg0
, 1),
9953 TREE_OPERAND (arg1
, 1))))
9955 code
= BIT_IOR_EXPR
;
9959 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9960 (plus (plus (mult) (mult)) (foo)) so that we can
9961 take advantage of the factoring cases below. */
9962 if (TYPE_OVERFLOW_WRAPS (type
)
9963 && (((TREE_CODE (arg0
) == PLUS_EXPR
9964 || TREE_CODE (arg0
) == MINUS_EXPR
)
9965 && TREE_CODE (arg1
) == MULT_EXPR
)
9966 || ((TREE_CODE (arg1
) == PLUS_EXPR
9967 || TREE_CODE (arg1
) == MINUS_EXPR
)
9968 && TREE_CODE (arg0
) == MULT_EXPR
)))
9970 tree parg0
, parg1
, parg
, marg
;
9971 enum tree_code pcode
;
9973 if (TREE_CODE (arg1
) == MULT_EXPR
)
9974 parg
= arg0
, marg
= arg1
;
9976 parg
= arg1
, marg
= arg0
;
9977 pcode
= TREE_CODE (parg
);
9978 parg0
= TREE_OPERAND (parg
, 0);
9979 parg1
= TREE_OPERAND (parg
, 1);
9983 if (TREE_CODE (parg0
) == MULT_EXPR
9984 && TREE_CODE (parg1
) != MULT_EXPR
)
9985 return fold_build2_loc (loc
, pcode
, type
,
9986 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9987 fold_convert_loc (loc
, type
,
9989 fold_convert_loc (loc
, type
,
9991 fold_convert_loc (loc
, type
, parg1
));
9992 if (TREE_CODE (parg0
) != MULT_EXPR
9993 && TREE_CODE (parg1
) == MULT_EXPR
)
9995 fold_build2_loc (loc
, PLUS_EXPR
, type
,
9996 fold_convert_loc (loc
, type
, parg0
),
9997 fold_build2_loc (loc
, pcode
, type
,
9998 fold_convert_loc (loc
, type
, marg
),
9999 fold_convert_loc (loc
, type
,
10005 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10006 if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 0))
10007 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10009 /* Likewise if the operands are reversed. */
10010 if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10011 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
10013 /* Convert X + -C into X - C. */
10014 if (TREE_CODE (arg1
) == REAL_CST
10015 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
)))
10017 tem
= fold_negate_const (arg1
, type
);
10018 if (!TREE_OVERFLOW (arg1
) || !flag_trapping_math
)
10019 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10020 fold_convert_loc (loc
, type
, arg0
),
10021 fold_convert_loc (loc
, type
, tem
));
10024 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10025 to __complex__ ( x, y ). This is not the same for SNaNs or
10026 if signed zeros are involved. */
10027 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10028 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10029 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10031 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10032 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10033 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10034 bool arg0rz
= false, arg0iz
= false;
10035 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10036 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10038 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10039 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10040 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10042 tree rp
= arg1r
? arg1r
10043 : build1 (REALPART_EXPR
, rtype
, arg1
);
10044 tree ip
= arg0i
? arg0i
10045 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10046 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10048 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10050 tree rp
= arg0r
? arg0r
10051 : build1 (REALPART_EXPR
, rtype
, arg0
);
10052 tree ip
= arg1i
? arg1i
10053 : build1 (IMAGPART_EXPR
, rtype
, arg1
);
10054 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10059 if (flag_unsafe_math_optimizations
10060 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10061 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10062 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10065 /* Convert x+x into x*2.0. */
10066 if (operand_equal_p (arg0
, arg1
, 0)
10067 && SCALAR_FLOAT_TYPE_P (type
))
10068 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
,
10069 build_real (type
, dconst2
));
10071 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10072 We associate floats only if the user has specified
10073 -fassociative-math. */
10074 if (flag_associative_math
10075 && TREE_CODE (arg1
) == PLUS_EXPR
10076 && TREE_CODE (arg0
) != MULT_EXPR
)
10078 tree tree10
= TREE_OPERAND (arg1
, 0);
10079 tree tree11
= TREE_OPERAND (arg1
, 1);
10080 if (TREE_CODE (tree11
) == MULT_EXPR
10081 && TREE_CODE (tree10
) == MULT_EXPR
)
10084 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, arg0
, tree10
);
10085 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree0
, tree11
);
10088 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10089 We associate floats only if the user has specified
10090 -fassociative-math. */
10091 if (flag_associative_math
10092 && TREE_CODE (arg0
) == PLUS_EXPR
10093 && TREE_CODE (arg1
) != MULT_EXPR
)
10095 tree tree00
= TREE_OPERAND (arg0
, 0);
10096 tree tree01
= TREE_OPERAND (arg0
, 1);
10097 if (TREE_CODE (tree01
) == MULT_EXPR
10098 && TREE_CODE (tree00
) == MULT_EXPR
)
10101 tree0
= fold_build2_loc (loc
, PLUS_EXPR
, type
, tree01
, arg1
);
10102 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tree00
, tree0
);
10108 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10109 is a rotate of A by C1 bits. */
10110 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10111 is a rotate of A by B bits. */
10113 enum tree_code code0
, code1
;
10115 code0
= TREE_CODE (arg0
);
10116 code1
= TREE_CODE (arg1
);
10117 if (((code0
== RSHIFT_EXPR
&& code1
== LSHIFT_EXPR
)
10118 || (code1
== RSHIFT_EXPR
&& code0
== LSHIFT_EXPR
))
10119 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10120 TREE_OPERAND (arg1
, 0), 0)
10121 && (rtype
= TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10122 TYPE_UNSIGNED (rtype
))
10123 /* Only create rotates in complete modes. Other cases are not
10124 expanded properly. */
10125 && TYPE_PRECISION (rtype
) == GET_MODE_PRECISION (TYPE_MODE (rtype
)))
10127 tree tree01
, tree11
;
10128 enum tree_code code01
, code11
;
10130 tree01
= TREE_OPERAND (arg0
, 1);
10131 tree11
= TREE_OPERAND (arg1
, 1);
10132 STRIP_NOPS (tree01
);
10133 STRIP_NOPS (tree11
);
10134 code01
= TREE_CODE (tree01
);
10135 code11
= TREE_CODE (tree11
);
10136 if (code01
== INTEGER_CST
10137 && code11
== INTEGER_CST
10138 && TREE_INT_CST_HIGH (tree01
) == 0
10139 && TREE_INT_CST_HIGH (tree11
) == 0
10140 && ((TREE_INT_CST_LOW (tree01
) + TREE_INT_CST_LOW (tree11
))
10141 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
10143 tem
= build2_loc (loc
, LROTATE_EXPR
,
10144 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10145 TREE_OPERAND (arg0
, 0),
10146 code0
== LSHIFT_EXPR
? tree01
: tree11
);
10147 return fold_convert_loc (loc
, type
, tem
);
10149 else if (code11
== MINUS_EXPR
)
10151 tree tree110
, tree111
;
10152 tree110
= TREE_OPERAND (tree11
, 0);
10153 tree111
= TREE_OPERAND (tree11
, 1);
10154 STRIP_NOPS (tree110
);
10155 STRIP_NOPS (tree111
);
10156 if (TREE_CODE (tree110
) == INTEGER_CST
10157 && 0 == compare_tree_int (tree110
,
10159 (TREE_TYPE (TREE_OPERAND
10161 && operand_equal_p (tree01
, tree111
, 0))
10163 fold_convert_loc (loc
, type
,
10164 build2 ((code0
== LSHIFT_EXPR
10167 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10168 TREE_OPERAND (arg0
, 0), tree01
));
10170 else if (code01
== MINUS_EXPR
)
10172 tree tree010
, tree011
;
10173 tree010
= TREE_OPERAND (tree01
, 0);
10174 tree011
= TREE_OPERAND (tree01
, 1);
10175 STRIP_NOPS (tree010
);
10176 STRIP_NOPS (tree011
);
10177 if (TREE_CODE (tree010
) == INTEGER_CST
10178 && 0 == compare_tree_int (tree010
,
10180 (TREE_TYPE (TREE_OPERAND
10182 && operand_equal_p (tree11
, tree011
, 0))
10183 return fold_convert_loc
10185 build2 ((code0
!= LSHIFT_EXPR
10188 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
10189 TREE_OPERAND (arg0
, 0), tree11
));
10195 /* In most languages, can't associate operations on floats through
10196 parentheses. Rather than remember where the parentheses were, we
10197 don't associate floats at all, unless the user has specified
10198 -fassociative-math.
10199 And, we need to make sure type is not saturating. */
10201 if ((! FLOAT_TYPE_P (type
) || flag_associative_math
)
10202 && !TYPE_SATURATING (type
))
10204 tree var0
, con0
, lit0
, minus_lit0
;
10205 tree var1
, con1
, lit1
, minus_lit1
;
10208 /* Split both trees into variables, constants, and literals. Then
10209 associate each group together, the constants with literals,
10210 then the result with variables. This increases the chances of
10211 literals being recombined later and of generating relocatable
10212 expressions for the sum of a constant and literal. */
10213 var0
= split_tree (arg0
, code
, &con0
, &lit0
, &minus_lit0
, 0);
10214 var1
= split_tree (arg1
, code
, &con1
, &lit1
, &minus_lit1
,
10215 code
== MINUS_EXPR
);
10217 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10218 if (code
== MINUS_EXPR
)
10221 /* With undefined overflow we can only associate constants with one
10222 variable, and constants whose association doesn't overflow. */
10223 if ((POINTER_TYPE_P (type
) && POINTER_TYPE_OVERFLOW_UNDEFINED
)
10224 || (INTEGRAL_TYPE_P (type
) && !TYPE_OVERFLOW_WRAPS (type
)))
10231 if (TREE_CODE (tmp0
) == NEGATE_EXPR
)
10232 tmp0
= TREE_OPERAND (tmp0
, 0);
10233 if (TREE_CODE (tmp1
) == NEGATE_EXPR
)
10234 tmp1
= TREE_OPERAND (tmp1
, 0);
10235 /* The only case we can still associate with two variables
10236 is if they are the same, modulo negation. */
10237 if (!operand_equal_p (tmp0
, tmp1
, 0))
10241 if (ok
&& lit0
&& lit1
)
10243 tree tmp0
= fold_convert (type
, lit0
);
10244 tree tmp1
= fold_convert (type
, lit1
);
10246 if (!TREE_OVERFLOW (tmp0
) && !TREE_OVERFLOW (tmp1
)
10247 && TREE_OVERFLOW (fold_build2 (code
, type
, tmp0
, tmp1
)))
10252 /* Only do something if we found more than two objects. Otherwise,
10253 nothing has changed and we risk infinite recursion. */
10255 && (2 < ((var0
!= 0) + (var1
!= 0)
10256 + (con0
!= 0) + (con1
!= 0)
10257 + (lit0
!= 0) + (lit1
!= 0)
10258 + (minus_lit0
!= 0) + (minus_lit1
!= 0))))
10260 var0
= associate_trees (loc
, var0
, var1
, code
, type
);
10261 con0
= associate_trees (loc
, con0
, con1
, code
, type
);
10262 lit0
= associate_trees (loc
, lit0
, lit1
, code
, type
);
10263 minus_lit0
= associate_trees (loc
, minus_lit0
, minus_lit1
, code
, type
);
10265 /* Preserve the MINUS_EXPR if the negative part of the literal is
10266 greater than the positive part. Otherwise, the multiplicative
10267 folding code (i.e extract_muldiv) may be fooled in case
10268 unsigned constants are subtracted, like in the following
10269 example: ((X*2 + 4) - 8U)/2. */
10270 if (minus_lit0
&& lit0
)
10272 if (TREE_CODE (lit0
) == INTEGER_CST
10273 && TREE_CODE (minus_lit0
) == INTEGER_CST
10274 && tree_int_cst_lt (lit0
, minus_lit0
))
10276 minus_lit0
= associate_trees (loc
, minus_lit0
, lit0
,
10282 lit0
= associate_trees (loc
, lit0
, minus_lit0
,
10291 fold_convert_loc (loc
, type
,
10292 associate_trees (loc
, var0
, minus_lit0
,
10293 MINUS_EXPR
, type
));
10296 con0
= associate_trees (loc
, con0
, minus_lit0
,
10299 fold_convert_loc (loc
, type
,
10300 associate_trees (loc
, var0
, con0
,
10305 con0
= associate_trees (loc
, con0
, lit0
, code
, type
);
10307 fold_convert_loc (loc
, type
, associate_trees (loc
, var0
, con0
,
10315 /* Pointer simplifications for subtraction, simple reassociations. */
10316 if (POINTER_TYPE_P (TREE_TYPE (arg1
)) && POINTER_TYPE_P (TREE_TYPE (arg0
)))
10318 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10319 if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
10320 && TREE_CODE (arg1
) == POINTER_PLUS_EXPR
)
10322 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10323 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10324 tree arg10
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10325 tree arg11
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10326 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10327 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10329 fold_build2_loc (loc
, MINUS_EXPR
, type
,
10332 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10333 else if (TREE_CODE (arg0
) == POINTER_PLUS_EXPR
)
10335 tree arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10336 tree arg01
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10337 tree tmp
= fold_binary_loc (loc
, MINUS_EXPR
, type
, arg00
,
10338 fold_convert_loc (loc
, type
, arg1
));
10340 return fold_build2_loc (loc
, PLUS_EXPR
, type
, tmp
, arg01
);
10343 /* A - (-B) -> A + B */
10344 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
10345 return fold_build2_loc (loc
, PLUS_EXPR
, type
, op0
,
10346 fold_convert_loc (loc
, type
,
10347 TREE_OPERAND (arg1
, 0)));
10348 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10349 if (TREE_CODE (arg0
) == NEGATE_EXPR
10350 && (FLOAT_TYPE_P (type
)
10351 || INTEGRAL_TYPE_P (type
))
10352 && negate_expr_p (arg1
)
10353 && reorder_operands_p (arg0
, arg1
))
10354 return fold_build2_loc (loc
, MINUS_EXPR
, type
,
10355 fold_convert_loc (loc
, type
,
10356 negate_expr (arg1
)),
10357 fold_convert_loc (loc
, type
,
10358 TREE_OPERAND (arg0
, 0)));
10359 /* Convert -A - 1 to ~A. */
10360 if (INTEGRAL_TYPE_P (type
)
10361 && TREE_CODE (arg0
) == NEGATE_EXPR
10362 && integer_onep (arg1
)
10363 && !TYPE_OVERFLOW_TRAPS (type
))
10364 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
10365 fold_convert_loc (loc
, type
,
10366 TREE_OPERAND (arg0
, 0)));
10368 /* Convert -1 - A to ~A. */
10369 if (INTEGRAL_TYPE_P (type
)
10370 && integer_all_onesp (arg0
))
10371 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op1
);
10374 /* X - (X / CST) * CST is X % CST. */
10375 if (INTEGRAL_TYPE_P (type
)
10376 && TREE_CODE (arg1
) == MULT_EXPR
10377 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == TRUNC_DIV_EXPR
10378 && operand_equal_p (arg0
,
10379 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0), 0)
10380 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1),
10381 TREE_OPERAND (arg1
, 1), 0))
10383 fold_convert_loc (loc
, type
,
10384 fold_build2_loc (loc
, TRUNC_MOD_EXPR
, TREE_TYPE (arg0
),
10385 arg0
, TREE_OPERAND (arg1
, 1)));
10387 if (! FLOAT_TYPE_P (type
))
10389 if (integer_zerop (arg0
))
10390 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10391 if (integer_zerop (arg1
))
10392 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10394 /* Fold A - (A & B) into ~B & A. */
10395 if (!TREE_SIDE_EFFECTS (arg0
)
10396 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10398 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0))
10400 tree arg10
= fold_convert_loc (loc
, type
,
10401 TREE_OPERAND (arg1
, 0));
10402 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10403 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10405 fold_convert_loc (loc
, type
, arg0
));
10407 if (operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10409 tree arg11
= fold_convert_loc (loc
,
10410 type
, TREE_OPERAND (arg1
, 1));
10411 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10412 fold_build1_loc (loc
, BIT_NOT_EXPR
,
10414 fold_convert_loc (loc
, type
, arg0
));
10418 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10419 any power of 2 minus 1. */
10420 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10421 && TREE_CODE (arg1
) == BIT_AND_EXPR
10422 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10423 TREE_OPERAND (arg1
, 0), 0))
10425 tree mask0
= TREE_OPERAND (arg0
, 1);
10426 tree mask1
= TREE_OPERAND (arg1
, 1);
10427 tree tem
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, mask0
);
10429 if (operand_equal_p (tem
, mask1
, 0))
10431 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, type
,
10432 TREE_OPERAND (arg0
, 0), mask1
);
10433 return fold_build2_loc (loc
, MINUS_EXPR
, type
, tem
, mask1
);
10438 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10439 else if (fold_real_zero_addition_p (TREE_TYPE (arg0
), arg1
, 1))
10440 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10442 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10443 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10444 (-ARG1 + ARG0) reduces to -ARG1. */
10445 else if (fold_real_zero_addition_p (TREE_TYPE (arg1
), arg0
, 0))
10446 return negate_expr (fold_convert_loc (loc
, type
, arg1
));
10448 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10449 __complex__ ( x, -y ). This is not the same for SNaNs or if
10450 signed zeros are involved. */
10451 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10452 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10453 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10455 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10456 tree arg0r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg0
);
10457 tree arg0i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
);
10458 bool arg0rz
= false, arg0iz
= false;
10459 if ((arg0r
&& (arg0rz
= real_zerop (arg0r
)))
10460 || (arg0i
&& (arg0iz
= real_zerop (arg0i
))))
10462 tree arg1r
= fold_unary_loc (loc
, REALPART_EXPR
, rtype
, arg1
);
10463 tree arg1i
= fold_unary_loc (loc
, IMAGPART_EXPR
, rtype
, arg1
);
10464 if (arg0rz
&& arg1i
&& real_zerop (arg1i
))
10466 tree rp
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10468 : build1 (REALPART_EXPR
, rtype
, arg1
));
10469 tree ip
= arg0i
? arg0i
10470 : build1 (IMAGPART_EXPR
, rtype
, arg0
);
10471 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10473 else if (arg0iz
&& arg1r
&& real_zerop (arg1r
))
10475 tree rp
= arg0r
? arg0r
10476 : build1 (REALPART_EXPR
, rtype
, arg0
);
10477 tree ip
= fold_build1_loc (loc
, NEGATE_EXPR
, rtype
,
10479 : build1 (IMAGPART_EXPR
, rtype
, arg1
));
10480 return fold_build2_loc (loc
, COMPLEX_EXPR
, type
, rp
, ip
);
10485 /* Fold &x - &x. This can happen from &x.foo - &x.
10486 This is unsafe for certain floats even in non-IEEE formats.
10487 In IEEE, it is unsafe because it does wrong for NaNs.
10488 Also note that operand_equal_p is always false if an operand
10491 if ((!FLOAT_TYPE_P (type
) || !HONOR_NANS (TYPE_MODE (type
)))
10492 && operand_equal_p (arg0
, arg1
, 0))
10493 return build_zero_cst (type
);
10495 /* A - B -> A + (-B) if B is easily negatable. */
10496 if (negate_expr_p (arg1
)
10497 && ((FLOAT_TYPE_P (type
)
10498 /* Avoid this transformation if B is a positive REAL_CST. */
10499 && (TREE_CODE (arg1
) != REAL_CST
10500 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1
))))
10501 || INTEGRAL_TYPE_P (type
)))
10502 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
10503 fold_convert_loc (loc
, type
, arg0
),
10504 fold_convert_loc (loc
, type
,
10505 negate_expr (arg1
)));
10507 /* Try folding difference of addresses. */
10509 HOST_WIDE_INT diff
;
10511 if ((TREE_CODE (arg0
) == ADDR_EXPR
10512 || TREE_CODE (arg1
) == ADDR_EXPR
)
10513 && ptr_difference_const (arg0
, arg1
, &diff
))
10514 return build_int_cst_type (type
, diff
);
10517 /* Fold &a[i] - &a[j] to i-j. */
10518 if (TREE_CODE (arg0
) == ADDR_EXPR
10519 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == ARRAY_REF
10520 && TREE_CODE (arg1
) == ADDR_EXPR
10521 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == ARRAY_REF
)
10523 tree aref0
= TREE_OPERAND (arg0
, 0);
10524 tree aref1
= TREE_OPERAND (arg1
, 0);
10525 if (operand_equal_p (TREE_OPERAND (aref0
, 0),
10526 TREE_OPERAND (aref1
, 0), 0))
10528 tree op0
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref0
, 1));
10529 tree op1
= fold_convert_loc (loc
, type
, TREE_OPERAND (aref1
, 1));
10530 tree esz
= array_ref_element_size (aref0
);
10531 tree diff
= build2 (MINUS_EXPR
, type
, op0
, op1
);
10532 return fold_build2_loc (loc
, MULT_EXPR
, type
, diff
,
10533 fold_convert_loc (loc
, type
, esz
));
10538 if (FLOAT_TYPE_P (type
)
10539 && flag_unsafe_math_optimizations
10540 && (TREE_CODE (arg0
) == RDIV_EXPR
|| TREE_CODE (arg0
) == MULT_EXPR
)
10541 && (TREE_CODE (arg1
) == RDIV_EXPR
|| TREE_CODE (arg1
) == MULT_EXPR
)
10542 && (tem
= distribute_real_division (loc
, code
, type
, arg0
, arg1
)))
10545 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10546 same or one. Make sure type is not saturating.
10547 fold_plusminus_mult_expr will re-associate. */
10548 if ((TREE_CODE (arg0
) == MULT_EXPR
10549 || TREE_CODE (arg1
) == MULT_EXPR
)
10550 && !TYPE_SATURATING (type
)
10551 && (!FLOAT_TYPE_P (type
) || flag_associative_math
))
10553 tree tem
= fold_plusminus_mult_expr (loc
, code
, type
, arg0
, arg1
);
10561 /* (-A) * (-B) -> A * B */
10562 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
10563 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10564 fold_convert_loc (loc
, type
,
10565 TREE_OPERAND (arg0
, 0)),
10566 fold_convert_loc (loc
, type
,
10567 negate_expr (arg1
)));
10568 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
10569 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10570 fold_convert_loc (loc
, type
,
10571 negate_expr (arg0
)),
10572 fold_convert_loc (loc
, type
,
10573 TREE_OPERAND (arg1
, 0)));
10575 if (! FLOAT_TYPE_P (type
))
10577 if (integer_zerop (arg1
))
10578 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10579 if (integer_onep (arg1
))
10580 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10581 /* Transform x * -1 into -x. Make sure to do the negation
10582 on the original operand with conversions not stripped
10583 because we can only strip non-sign-changing conversions. */
10584 if (integer_all_onesp (arg1
))
10585 return fold_convert_loc (loc
, type
, negate_expr (op0
));
10586 /* Transform x * -C into -x * C if x is easily negatable. */
10587 if (TREE_CODE (arg1
) == INTEGER_CST
10588 && tree_int_cst_sgn (arg1
) == -1
10589 && negate_expr_p (arg0
)
10590 && (tem
= negate_expr (arg1
)) != arg1
10591 && !TREE_OVERFLOW (tem
))
10592 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10593 fold_convert_loc (loc
, type
,
10594 negate_expr (arg0
)),
10597 /* (a * (1 << b)) is (a << b) */
10598 if (TREE_CODE (arg1
) == LSHIFT_EXPR
10599 && integer_onep (TREE_OPERAND (arg1
, 0)))
10600 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op0
,
10601 TREE_OPERAND (arg1
, 1));
10602 if (TREE_CODE (arg0
) == LSHIFT_EXPR
10603 && integer_onep (TREE_OPERAND (arg0
, 0)))
10604 return fold_build2_loc (loc
, LSHIFT_EXPR
, type
, op1
,
10605 TREE_OPERAND (arg0
, 1));
10607 /* (A + A) * C -> A * 2 * C */
10608 if (TREE_CODE (arg0
) == PLUS_EXPR
10609 && TREE_CODE (arg1
) == INTEGER_CST
10610 && operand_equal_p (TREE_OPERAND (arg0
, 0),
10611 TREE_OPERAND (arg0
, 1), 0))
10612 return fold_build2_loc (loc
, MULT_EXPR
, type
,
10613 omit_one_operand_loc (loc
, type
,
10614 TREE_OPERAND (arg0
, 0),
10615 TREE_OPERAND (arg0
, 1)),
10616 fold_build2_loc (loc
, MULT_EXPR
, type
,
10617 build_int_cst (type
, 2) , arg1
));
10619 strict_overflow_p
= false;
10620 if (TREE_CODE (arg1
) == INTEGER_CST
10621 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
10622 &strict_overflow_p
)))
10624 if (strict_overflow_p
)
10625 fold_overflow_warning (("assuming signed overflow does not "
10626 "occur when simplifying "
10628 WARN_STRICT_OVERFLOW_MISC
);
10629 return fold_convert_loc (loc
, type
, tem
);
10632 /* Optimize z * conj(z) for integer complex numbers. */
10633 if (TREE_CODE (arg0
) == CONJ_EXPR
10634 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10635 return fold_mult_zconjz (loc
, type
, arg1
);
10636 if (TREE_CODE (arg1
) == CONJ_EXPR
10637 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10638 return fold_mult_zconjz (loc
, type
, arg0
);
10642 /* Maybe fold x * 0 to 0. The expressions aren't the same
10643 when x is NaN, since x * 0 is also NaN. Nor are they the
10644 same in modes with signed zeros, since multiplying a
10645 negative value by 0 gives -0, not +0. */
10646 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10647 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10648 && real_zerop (arg1
))
10649 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10650 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10651 Likewise for complex arithmetic with signed zeros. */
10652 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10653 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10654 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10655 && real_onep (arg1
))
10656 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10658 /* Transform x * -1.0 into -x. */
10659 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
10660 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10661 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
)))
10662 && real_minus_onep (arg1
))
10663 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
10665 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10666 the result for floating point types due to rounding so it is applied
10667 only if -fassociative-math was specify. */
10668 if (flag_associative_math
10669 && TREE_CODE (arg0
) == RDIV_EXPR
10670 && TREE_CODE (arg1
) == REAL_CST
10671 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == REAL_CST
)
10673 tree tem
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 0),
10676 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
10677 TREE_OPERAND (arg0
, 1));
10680 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10681 if (operand_equal_p (arg0
, arg1
, 0))
10683 tree tem
= fold_strip_sign_ops (arg0
);
10684 if (tem
!= NULL_TREE
)
10686 tem
= fold_convert_loc (loc
, type
, tem
);
10687 return fold_build2_loc (loc
, MULT_EXPR
, type
, tem
, tem
);
10691 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10692 This is not the same for NaNs or if signed zeros are
10694 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
10695 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0
)))
10696 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
10697 && TREE_CODE (arg1
) == COMPLEX_CST
10698 && real_zerop (TREE_REALPART (arg1
)))
10700 tree rtype
= TREE_TYPE (TREE_TYPE (arg0
));
10701 if (real_onep (TREE_IMAGPART (arg1
)))
10703 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10704 negate_expr (fold_build1_loc (loc
, IMAGPART_EXPR
,
10706 fold_build1_loc (loc
, REALPART_EXPR
, rtype
, arg0
));
10707 else if (real_minus_onep (TREE_IMAGPART (arg1
)))
10709 fold_build2_loc (loc
, COMPLEX_EXPR
, type
,
10710 fold_build1_loc (loc
, IMAGPART_EXPR
, rtype
, arg0
),
10711 negate_expr (fold_build1_loc (loc
, REALPART_EXPR
,
10715 /* Optimize z * conj(z) for floating point complex numbers.
10716 Guarded by flag_unsafe_math_optimizations as non-finite
10717 imaginary components don't produce scalar results. */
10718 if (flag_unsafe_math_optimizations
10719 && TREE_CODE (arg0
) == CONJ_EXPR
10720 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10721 return fold_mult_zconjz (loc
, type
, arg1
);
10722 if (flag_unsafe_math_optimizations
10723 && TREE_CODE (arg1
) == CONJ_EXPR
10724 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10725 return fold_mult_zconjz (loc
, type
, arg0
);
10727 if (flag_unsafe_math_optimizations
)
10729 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
10730 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
10732 /* Optimizations of root(...)*root(...). */
10733 if (fcode0
== fcode1
&& BUILTIN_ROOT_P (fcode0
))
10736 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10737 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10739 /* Optimize sqrt(x)*sqrt(x) as x. */
10740 if (BUILTIN_SQRT_P (fcode0
)
10741 && operand_equal_p (arg00
, arg10
, 0)
10742 && ! HONOR_SNANS (TYPE_MODE (type
)))
10745 /* Optimize root(x)*root(y) as root(x*y). */
10746 rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10747 arg
= fold_build2_loc (loc
, MULT_EXPR
, type
, arg00
, arg10
);
10748 return build_call_expr_loc (loc
, rootfn
, 1, arg
);
10751 /* Optimize expN(x)*expN(y) as expN(x+y). */
10752 if (fcode0
== fcode1
&& BUILTIN_EXPONENT_P (fcode0
))
10754 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10755 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10756 CALL_EXPR_ARG (arg0
, 0),
10757 CALL_EXPR_ARG (arg1
, 0));
10758 return build_call_expr_loc (loc
, expfn
, 1, arg
);
10761 /* Optimizations of pow(...)*pow(...). */
10762 if ((fcode0
== BUILT_IN_POW
&& fcode1
== BUILT_IN_POW
)
10763 || (fcode0
== BUILT_IN_POWF
&& fcode1
== BUILT_IN_POWF
)
10764 || (fcode0
== BUILT_IN_POWL
&& fcode1
== BUILT_IN_POWL
))
10766 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10767 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10768 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10769 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10771 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10772 if (operand_equal_p (arg01
, arg11
, 0))
10774 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10775 tree arg
= fold_build2_loc (loc
, MULT_EXPR
, type
,
10777 return build_call_expr_loc (loc
, powfn
, 2, arg
, arg01
);
10780 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10781 if (operand_equal_p (arg00
, arg10
, 0))
10783 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10784 tree arg
= fold_build2_loc (loc
, PLUS_EXPR
, type
,
10786 return build_call_expr_loc (loc
, powfn
, 2, arg00
, arg
);
10790 /* Optimize tan(x)*cos(x) as sin(x). */
10791 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_COS
)
10792 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_COSF
)
10793 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_COSL
)
10794 || (fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_TAN
)
10795 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_TANF
)
10796 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_TANL
))
10797 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
10798 CALL_EXPR_ARG (arg1
, 0), 0))
10800 tree sinfn
= mathfn_built_in (type
, BUILT_IN_SIN
);
10802 if (sinfn
!= NULL_TREE
)
10803 return build_call_expr_loc (loc
, sinfn
, 1,
10804 CALL_EXPR_ARG (arg0
, 0));
10807 /* Optimize x*pow(x,c) as pow(x,c+1). */
10808 if (fcode1
== BUILT_IN_POW
10809 || fcode1
== BUILT_IN_POWF
10810 || fcode1
== BUILT_IN_POWL
)
10812 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
10813 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
10814 if (TREE_CODE (arg11
) == REAL_CST
10815 && !TREE_OVERFLOW (arg11
)
10816 && operand_equal_p (arg0
, arg10
, 0))
10818 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
10822 c
= TREE_REAL_CST (arg11
);
10823 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10824 arg
= build_real (type
, c
);
10825 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10829 /* Optimize pow(x,c)*x as pow(x,c+1). */
10830 if (fcode0
== BUILT_IN_POW
10831 || fcode0
== BUILT_IN_POWF
10832 || fcode0
== BUILT_IN_POWL
)
10834 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
10835 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
10836 if (TREE_CODE (arg01
) == REAL_CST
10837 && !TREE_OVERFLOW (arg01
)
10838 && operand_equal_p (arg1
, arg00
, 0))
10840 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
10844 c
= TREE_REAL_CST (arg01
);
10845 real_arithmetic (&c
, PLUS_EXPR
, &c
, &dconst1
);
10846 arg
= build_real (type
, c
);
10847 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
10851 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10852 if (!in_gimple_form
10854 && operand_equal_p (arg0
, arg1
, 0))
10856 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
10860 tree arg
= build_real (type
, dconst2
);
10861 return build_call_expr_loc (loc
, powfn
, 2, arg0
, arg
);
10870 if (integer_all_onesp (arg1
))
10871 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
10872 if (integer_zerop (arg1
))
10873 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10874 if (operand_equal_p (arg0
, arg1
, 0))
10875 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
10877 /* ~X | X is -1. */
10878 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
10879 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
10881 t1
= build_zero_cst (type
);
10882 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10883 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
10886 /* X | ~X is -1. */
10887 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
10888 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
10890 t1
= build_zero_cst (type
);
10891 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
10892 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
10895 /* Canonicalize (X & C1) | C2. */
10896 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10897 && TREE_CODE (arg1
) == INTEGER_CST
10898 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
10900 unsigned HOST_WIDE_INT hi1
, lo1
, hi2
, lo2
, hi3
, lo3
, mlo
, mhi
;
10901 int width
= TYPE_PRECISION (type
), w
;
10902 hi1
= TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1));
10903 lo1
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
10904 hi2
= TREE_INT_CST_HIGH (arg1
);
10905 lo2
= TREE_INT_CST_LOW (arg1
);
10907 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10908 if ((hi1
& hi2
) == hi1
&& (lo1
& lo2
) == lo1
)
10909 return omit_one_operand_loc (loc
, type
, arg1
,
10910 TREE_OPERAND (arg0
, 0));
10912 if (width
> HOST_BITS_PER_WIDE_INT
)
10914 mhi
= (unsigned HOST_WIDE_INT
) -1
10915 >> (2 * HOST_BITS_PER_WIDE_INT
- width
);
10921 mlo
= (unsigned HOST_WIDE_INT
) -1
10922 >> (HOST_BITS_PER_WIDE_INT
- width
);
10925 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10926 if ((~(hi1
| hi2
) & mhi
) == 0 && (~(lo1
| lo2
) & mlo
) == 0)
10927 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10928 TREE_OPERAND (arg0
, 0), arg1
);
10930 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10931 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10932 mode which allows further optimizations. */
10939 for (w
= BITS_PER_UNIT
;
10940 w
<= width
&& w
<= HOST_BITS_PER_WIDE_INT
;
10943 unsigned HOST_WIDE_INT mask
10944 = (unsigned HOST_WIDE_INT
) -1 >> (HOST_BITS_PER_WIDE_INT
- w
);
10945 if (((lo1
| lo2
) & mask
) == mask
10946 && (lo1
& ~mask
) == 0 && hi1
== 0)
10953 if (hi3
!= hi1
|| lo3
!= lo1
)
10954 return fold_build2_loc (loc
, BIT_IOR_EXPR
, type
,
10955 fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
10956 TREE_OPERAND (arg0
, 0),
10957 build_int_cst_wide (type
,
10962 /* (X & Y) | Y is (X, Y). */
10963 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10964 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
10965 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
10966 /* (X & Y) | X is (Y, X). */
10967 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10968 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
10969 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
10970 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
10971 /* X | (X & Y) is (Y, X). */
10972 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10973 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
10974 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
10975 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
10976 /* X | (Y & X) is (Y, X). */
10977 if (TREE_CODE (arg1
) == BIT_AND_EXPR
10978 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
10979 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
10980 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
10982 /* (X & ~Y) | (~X & Y) is X ^ Y */
10983 if (TREE_CODE (arg0
) == BIT_AND_EXPR
10984 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
10986 tree a0
, a1
, l0
, l1
, n0
, n1
;
10988 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
10989 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
10991 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
10992 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
10994 n0
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l0
);
10995 n1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, l1
);
10997 if ((operand_equal_p (n0
, a0
, 0)
10998 && operand_equal_p (n1
, a1
, 0))
10999 || (operand_equal_p (n0
, a1
, 0)
11000 && operand_equal_p (n1
, a0
, 0)))
11001 return fold_build2_loc (loc
, BIT_XOR_EXPR
, type
, l0
, n1
);
11004 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11005 if (t1
!= NULL_TREE
)
11008 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11010 This results in more efficient code for machines without a NAND
11011 instruction. Combine will canonicalize to the first form
11012 which will allow use of NAND instructions provided by the
11013 backend if they exist. */
11014 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11015 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11018 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11019 build2 (BIT_AND_EXPR
, type
,
11020 fold_convert_loc (loc
, type
,
11021 TREE_OPERAND (arg0
, 0)),
11022 fold_convert_loc (loc
, type
,
11023 TREE_OPERAND (arg1
, 0))));
11026 /* See if this can be simplified into a rotate first. If that
11027 is unsuccessful continue in the association code. */
11031 if (integer_zerop (arg1
))
11032 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11033 if (integer_all_onesp (arg1
))
11034 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, op0
);
11035 if (operand_equal_p (arg0
, arg1
, 0))
11036 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11038 /* ~X ^ X is -1. */
11039 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11040 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11042 t1
= build_zero_cst (type
);
11043 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11044 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
11047 /* X ^ ~X is -1. */
11048 if (TREE_CODE (arg1
) == BIT_NOT_EXPR
11049 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11051 t1
= build_zero_cst (type
);
11052 t1
= fold_unary_loc (loc
, BIT_NOT_EXPR
, type
, t1
);
11053 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
11056 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11057 with a constant, and the two constants have no bits in common,
11058 we should treat this as a BIT_IOR_EXPR since this may produce more
11059 simplifications. */
11060 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11061 && TREE_CODE (arg1
) == BIT_AND_EXPR
11062 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
11063 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
11064 && integer_zerop (const_binop (BIT_AND_EXPR
,
11065 TREE_OPERAND (arg0
, 1),
11066 TREE_OPERAND (arg1
, 1))))
11068 code
= BIT_IOR_EXPR
;
11072 /* (X | Y) ^ X -> Y & ~ X*/
11073 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11074 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11076 tree t2
= TREE_OPERAND (arg0
, 1);
11077 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11079 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11080 fold_convert_loc (loc
, type
, t2
),
11081 fold_convert_loc (loc
, type
, t1
));
11085 /* (Y | X) ^ X -> Y & ~ X*/
11086 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11087 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11089 tree t2
= TREE_OPERAND (arg0
, 0);
11090 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
),
11092 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11093 fold_convert_loc (loc
, type
, t2
),
11094 fold_convert_loc (loc
, type
, t1
));
11098 /* X ^ (X | Y) -> Y & ~ X*/
11099 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11100 && operand_equal_p (TREE_OPERAND (arg1
, 0), arg0
, 0))
11102 tree t2
= TREE_OPERAND (arg1
, 1);
11103 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11105 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11106 fold_convert_loc (loc
, type
, t2
),
11107 fold_convert_loc (loc
, type
, t1
));
11111 /* X ^ (Y | X) -> Y & ~ X*/
11112 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11113 && operand_equal_p (TREE_OPERAND (arg1
, 1), arg0
, 0))
11115 tree t2
= TREE_OPERAND (arg1
, 0);
11116 t1
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg0
),
11118 t1
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11119 fold_convert_loc (loc
, type
, t2
),
11120 fold_convert_loc (loc
, type
, t1
));
11124 /* Convert ~X ^ ~Y to X ^ Y. */
11125 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11126 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11127 return fold_build2_loc (loc
, code
, type
,
11128 fold_convert_loc (loc
, type
,
11129 TREE_OPERAND (arg0
, 0)),
11130 fold_convert_loc (loc
, type
,
11131 TREE_OPERAND (arg1
, 0)));
11133 /* Convert ~X ^ C to X ^ ~C. */
11134 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11135 && TREE_CODE (arg1
) == INTEGER_CST
)
11136 return fold_build2_loc (loc
, code
, type
,
11137 fold_convert_loc (loc
, type
,
11138 TREE_OPERAND (arg0
, 0)),
11139 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, arg1
));
11141 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11142 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11143 && integer_onep (TREE_OPERAND (arg0
, 1))
11144 && integer_onep (arg1
))
11145 return fold_build2_loc (loc
, EQ_EXPR
, type
, arg0
,
11146 build_int_cst (TREE_TYPE (arg0
), 0));
11148 /* Fold (X & Y) ^ Y as ~X & Y. */
11149 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11150 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11152 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11153 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11154 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11155 fold_convert_loc (loc
, type
, arg1
));
11157 /* Fold (X & Y) ^ X as ~Y & X. */
11158 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11159 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11160 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11162 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11163 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11164 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11165 fold_convert_loc (loc
, type
, arg1
));
11167 /* Fold X ^ (X & Y) as X & ~Y. */
11168 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11169 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11171 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11172 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11173 fold_convert_loc (loc
, type
, arg0
),
11174 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11176 /* Fold X ^ (Y & X) as ~Y & X. */
11177 if (TREE_CODE (arg1
) == BIT_AND_EXPR
11178 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11179 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11181 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11182 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11183 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11184 fold_convert_loc (loc
, type
, arg0
));
11187 /* See if this can be simplified into a rotate first. If that
11188 is unsuccessful continue in the association code. */
11192 if (integer_all_onesp (arg1
))
11193 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11194 if (integer_zerop (arg1
))
11195 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
11196 if (operand_equal_p (arg0
, arg1
, 0))
11197 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11199 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11200 if ((TREE_CODE (arg0
) == BIT_NOT_EXPR
11201 || TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11202 || (TREE_CODE (arg0
) == EQ_EXPR
11203 && integer_zerop (TREE_OPERAND (arg0
, 1))))
11204 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
11205 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
11207 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11208 if ((TREE_CODE (arg1
) == BIT_NOT_EXPR
11209 || TREE_CODE (arg1
) == TRUTH_NOT_EXPR
11210 || (TREE_CODE (arg1
) == EQ_EXPR
11211 && integer_zerop (TREE_OPERAND (arg1
, 1))))
11212 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11213 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11215 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11216 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11217 && TREE_CODE (arg1
) == INTEGER_CST
11218 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
11220 tree tmp1
= fold_convert_loc (loc
, type
, arg1
);
11221 tree tmp2
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11222 tree tmp3
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11223 tmp2
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp2
, tmp1
);
11224 tmp3
= fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tmp3
, tmp1
);
11226 fold_convert_loc (loc
, type
,
11227 fold_build2_loc (loc
, BIT_IOR_EXPR
,
11228 type
, tmp2
, tmp3
));
11231 /* (X | Y) & Y is (X, Y). */
11232 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11233 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11234 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 0));
11235 /* (X | Y) & X is (Y, X). */
11236 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
11237 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11238 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11239 return omit_one_operand_loc (loc
, type
, arg1
, TREE_OPERAND (arg0
, 1));
11240 /* X & (X | Y) is (Y, X). */
11241 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11242 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0)
11243 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 1)))
11244 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 1));
11245 /* X & (Y | X) is (Y, X). */
11246 if (TREE_CODE (arg1
) == BIT_IOR_EXPR
11247 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11248 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11249 return omit_one_operand_loc (loc
, type
, arg0
, TREE_OPERAND (arg1
, 0));
11251 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11252 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11253 && integer_onep (TREE_OPERAND (arg0
, 1))
11254 && integer_onep (arg1
))
11256 tem
= TREE_OPERAND (arg0
, 0);
11257 return fold_build2_loc (loc
, EQ_EXPR
, type
,
11258 fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
11259 build_int_cst (TREE_TYPE (tem
), 1)),
11260 build_int_cst (TREE_TYPE (tem
), 0));
11262 /* Fold ~X & 1 as (X & 1) == 0. */
11263 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11264 && integer_onep (arg1
))
11266 tem
= TREE_OPERAND (arg0
, 0);
11267 return fold_build2_loc (loc
, EQ_EXPR
, type
,
11268 fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (tem
), tem
,
11269 build_int_cst (TREE_TYPE (tem
), 1)),
11270 build_int_cst (TREE_TYPE (tem
), 0));
11272 /* Fold !X & 1 as X == 0. */
11273 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
11274 && integer_onep (arg1
))
11276 tem
= TREE_OPERAND (arg0
, 0);
11277 return fold_build2_loc (loc
, EQ_EXPR
, type
, tem
,
11278 build_int_cst (TREE_TYPE (tem
), 0));
11281 /* Fold (X ^ Y) & Y as ~X & Y. */
11282 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11283 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
11285 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11286 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11287 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11288 fold_convert_loc (loc
, type
, arg1
));
11290 /* Fold (X ^ Y) & X as ~Y & X. */
11291 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
11292 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
11293 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
11295 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
11296 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11297 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11298 fold_convert_loc (loc
, type
, arg1
));
11300 /* Fold X & (X ^ Y) as X & ~Y. */
11301 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11302 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
11304 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
11305 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11306 fold_convert_loc (loc
, type
, arg0
),
11307 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
));
11309 /* Fold X & (Y ^ X) as ~Y & X. */
11310 if (TREE_CODE (arg1
) == BIT_XOR_EXPR
11311 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 1), 0)
11312 && reorder_operands_p (arg0
, TREE_OPERAND (arg1
, 0)))
11314 tem
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
11315 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
11316 fold_build1_loc (loc
, BIT_NOT_EXPR
, type
, tem
),
11317 fold_convert_loc (loc
, type
, arg0
));
11320 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11321 ((A & N) + B) & M -> (A + B) & M
11322 Similarly if (N & M) == 0,
11323 ((A | N) + B) & M -> (A + B) & M
11324 and for - instead of + (or unary - instead of +)
11325 and/or ^ instead of |.
11326 If B is constant and (B & M) == 0, fold into A & M. */
11327 if (host_integerp (arg1
, 1))
11329 unsigned HOST_WIDE_INT cst1
= tree_low_cst (arg1
, 1);
11330 if (~cst1
&& (cst1
& (cst1
+ 1)) == 0
11331 && INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
11332 && (TREE_CODE (arg0
) == PLUS_EXPR
11333 || TREE_CODE (arg0
) == MINUS_EXPR
11334 || TREE_CODE (arg0
) == NEGATE_EXPR
)
11335 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
))
11336 || TREE_CODE (TREE_TYPE (arg0
)) == INTEGER_TYPE
))
11340 unsigned HOST_WIDE_INT cst0
;
11342 /* Now we know that arg0 is (C + D) or (C - D) or
11343 -C and arg1 (M) is == (1LL << cst) - 1.
11344 Store C into PMOP[0] and D into PMOP[1]. */
11345 pmop
[0] = TREE_OPERAND (arg0
, 0);
11347 if (TREE_CODE (arg0
) != NEGATE_EXPR
)
11349 pmop
[1] = TREE_OPERAND (arg0
, 1);
11353 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11354 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0
)), 1)
11358 for (; which
>= 0; which
--)
11359 switch (TREE_CODE (pmop
[which
]))
11364 if (TREE_CODE (TREE_OPERAND (pmop
[which
], 1))
11367 /* tree_low_cst not used, because we don't care about
11369 cst0
= TREE_INT_CST_LOW (TREE_OPERAND (pmop
[which
], 1));
11371 if (TREE_CODE (pmop
[which
]) == BIT_AND_EXPR
)
11376 else if (cst0
!= 0)
11378 /* If C or D is of the form (A & N) where
11379 (N & M) == M, or of the form (A | N) or
11380 (A ^ N) where (N & M) == 0, replace it with A. */
11381 pmop
[which
] = TREE_OPERAND (pmop
[which
], 0);
11384 /* If C or D is a N where (N & M) == 0, it can be
11385 omitted (assumed 0). */
11386 if ((TREE_CODE (arg0
) == PLUS_EXPR
11387 || (TREE_CODE (arg0
) == MINUS_EXPR
&& which
== 0))
11388 && (TREE_INT_CST_LOW (pmop
[which
]) & cst1
) == 0)
11389 pmop
[which
] = NULL
;
11395 /* Only build anything new if we optimized one or both arguments
11397 if (pmop
[0] != TREE_OPERAND (arg0
, 0)
11398 || (TREE_CODE (arg0
) != NEGATE_EXPR
11399 && pmop
[1] != TREE_OPERAND (arg0
, 1)))
11401 tree utype
= TREE_TYPE (arg0
);
11402 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0
)))
11404 /* Perform the operations in a type that has defined
11405 overflow behavior. */
11406 utype
= unsigned_type_for (TREE_TYPE (arg0
));
11407 if (pmop
[0] != NULL
)
11408 pmop
[0] = fold_convert_loc (loc
, utype
, pmop
[0]);
11409 if (pmop
[1] != NULL
)
11410 pmop
[1] = fold_convert_loc (loc
, utype
, pmop
[1]);
11413 if (TREE_CODE (arg0
) == NEGATE_EXPR
)
11414 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[0]);
11415 else if (TREE_CODE (arg0
) == PLUS_EXPR
)
11417 if (pmop
[0] != NULL
&& pmop
[1] != NULL
)
11418 tem
= fold_build2_loc (loc
, PLUS_EXPR
, utype
,
11420 else if (pmop
[0] != NULL
)
11422 else if (pmop
[1] != NULL
)
11425 return build_int_cst (type
, 0);
11427 else if (pmop
[0] == NULL
)
11428 tem
= fold_build1_loc (loc
, NEGATE_EXPR
, utype
, pmop
[1]);
11430 tem
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
11432 /* TEM is now the new binary +, - or unary - replacement. */
11433 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, utype
, tem
,
11434 fold_convert_loc (loc
, utype
, arg1
));
11435 return fold_convert_loc (loc
, type
, tem
);
11440 t1
= distribute_bit_expr (loc
, code
, type
, arg0
, arg1
);
11441 if (t1
!= NULL_TREE
)
11443 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11444 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
11445 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
11448 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
11450 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
11451 && (~TREE_INT_CST_LOW (arg1
)
11452 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
11454 fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
11457 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11459 This results in more efficient code for machines without a NOR
11460 instruction. Combine will canonicalize to the first form
11461 which will allow use of NOR instructions provided by the
11462 backend if they exist. */
11463 if (TREE_CODE (arg0
) == BIT_NOT_EXPR
11464 && TREE_CODE (arg1
) == BIT_NOT_EXPR
)
11466 return fold_build1_loc (loc
, BIT_NOT_EXPR
, type
,
11467 build2 (BIT_IOR_EXPR
, type
,
11468 fold_convert_loc (loc
, type
,
11469 TREE_OPERAND (arg0
, 0)),
11470 fold_convert_loc (loc
, type
,
11471 TREE_OPERAND (arg1
, 0))));
11474 /* If arg0 is derived from the address of an object or function, we may
11475 be able to fold this expression using the object or function's
11477 if (POINTER_TYPE_P (TREE_TYPE (arg0
)) && host_integerp (arg1
, 1))
11479 unsigned HOST_WIDE_INT modulus
, residue
;
11480 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (arg1
);
11482 modulus
= get_pointer_modulus_and_residue (arg0
, &residue
,
11483 integer_onep (arg1
));
11485 /* This works because modulus is a power of 2. If this weren't the
11486 case, we'd have to replace it by its greatest power-of-2
11487 divisor: modulus & -modulus. */
11489 return build_int_cst (type
, residue
& low
);
11492 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11493 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11494 if the new mask might be further optimized. */
11495 if ((TREE_CODE (arg0
) == LSHIFT_EXPR
11496 || TREE_CODE (arg0
) == RSHIFT_EXPR
)
11497 && host_integerp (TREE_OPERAND (arg0
, 1), 1)
11498 && host_integerp (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)))
11499 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1)
11500 < TYPE_PRECISION (TREE_TYPE (arg0
))
11501 && TYPE_PRECISION (TREE_TYPE (arg0
)) <= HOST_BITS_PER_WIDE_INT
11502 && tree_low_cst (TREE_OPERAND (arg0
, 1), 1) > 0)
11504 unsigned int shiftc
= tree_low_cst (TREE_OPERAND (arg0
, 1), 1);
11505 unsigned HOST_WIDE_INT mask
11506 = tree_low_cst (arg1
, TYPE_UNSIGNED (TREE_TYPE (arg1
)));
11507 unsigned HOST_WIDE_INT newmask
, zerobits
= 0;
11508 tree shift_type
= TREE_TYPE (arg0
);
11510 if (TREE_CODE (arg0
) == LSHIFT_EXPR
)
11511 zerobits
= ((((unsigned HOST_WIDE_INT
) 1) << shiftc
) - 1);
11512 else if (TREE_CODE (arg0
) == RSHIFT_EXPR
11513 && TYPE_PRECISION (TREE_TYPE (arg0
))
11514 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0
))))
11516 unsigned int prec
= TYPE_PRECISION (TREE_TYPE (arg0
));
11517 tree arg00
= TREE_OPERAND (arg0
, 0);
11518 /* See if more bits can be proven as zero because of
11520 if (TREE_CODE (arg00
) == NOP_EXPR
11521 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00
, 0))))
11523 tree inner_type
= TREE_TYPE (TREE_OPERAND (arg00
, 0));
11524 if (TYPE_PRECISION (inner_type
)
11525 == GET_MODE_BITSIZE (TYPE_MODE (inner_type
))
11526 && TYPE_PRECISION (inner_type
) < prec
)
11528 prec
= TYPE_PRECISION (inner_type
);
11529 /* See if we can shorten the right shift. */
11531 shift_type
= inner_type
;
11534 zerobits
= ~(unsigned HOST_WIDE_INT
) 0;
11535 zerobits
>>= HOST_BITS_PER_WIDE_INT
- shiftc
;
11536 zerobits
<<= prec
- shiftc
;
11537 /* For arithmetic shift if sign bit could be set, zerobits
11538 can contain actually sign bits, so no transformation is
11539 possible, unless MASK masks them all away. In that
11540 case the shift needs to be converted into logical shift. */
11541 if (!TYPE_UNSIGNED (TREE_TYPE (arg0
))
11542 && prec
== TYPE_PRECISION (TREE_TYPE (arg0
)))
11544 if ((mask
& zerobits
) == 0)
11545 shift_type
= unsigned_type_for (TREE_TYPE (arg0
));
11551 /* ((X << 16) & 0xff00) is (X, 0). */
11552 if ((mask
& zerobits
) == mask
)
11553 return omit_one_operand_loc (loc
, type
,
11554 build_int_cst (type
, 0), arg0
);
11556 newmask
= mask
| zerobits
;
11557 if (newmask
!= mask
&& (newmask
& (newmask
+ 1)) == 0)
11561 /* Only do the transformation if NEWMASK is some integer
11563 for (prec
= BITS_PER_UNIT
;
11564 prec
< HOST_BITS_PER_WIDE_INT
; prec
<<= 1)
11565 if (newmask
== (((unsigned HOST_WIDE_INT
) 1) << prec
) - 1)
11567 if (prec
< HOST_BITS_PER_WIDE_INT
11568 || newmask
== ~(unsigned HOST_WIDE_INT
) 0)
11572 if (shift_type
!= TREE_TYPE (arg0
))
11574 tem
= fold_build2_loc (loc
, TREE_CODE (arg0
), shift_type
,
11575 fold_convert_loc (loc
, shift_type
,
11576 TREE_OPERAND (arg0
, 0)),
11577 TREE_OPERAND (arg0
, 1));
11578 tem
= fold_convert_loc (loc
, type
, tem
);
11582 newmaskt
= build_int_cst_type (TREE_TYPE (op1
), newmask
);
11583 if (!tree_int_cst_equal (newmaskt
, arg1
))
11584 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, tem
, newmaskt
);
11592 /* Don't touch a floating-point divide by zero unless the mode
11593 of the constant can represent infinity. */
11594 if (TREE_CODE (arg1
) == REAL_CST
11595 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1
)))
11596 && real_zerop (arg1
))
11599 /* Optimize A / A to 1.0 if we don't care about
11600 NaNs or Infinities. Skip the transformation
11601 for non-real operands. */
11602 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11603 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
11604 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0
)))
11605 && operand_equal_p (arg0
, arg1
, 0))
11607 tree r
= build_real (TREE_TYPE (arg0
), dconst1
);
11609 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11612 /* The complex version of the above A / A optimization. */
11613 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0
))
11614 && operand_equal_p (arg0
, arg1
, 0))
11616 tree elem_type
= TREE_TYPE (TREE_TYPE (arg0
));
11617 if (! HONOR_NANS (TYPE_MODE (elem_type
))
11618 && ! HONOR_INFINITIES (TYPE_MODE (elem_type
)))
11620 tree r
= build_real (elem_type
, dconst1
);
11621 /* omit_two_operands will call fold_convert for us. */
11622 return omit_two_operands_loc (loc
, type
, r
, arg0
, arg1
);
11626 /* (-A) / (-B) -> A / B */
11627 if (TREE_CODE (arg0
) == NEGATE_EXPR
&& negate_expr_p (arg1
))
11628 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11629 TREE_OPERAND (arg0
, 0),
11630 negate_expr (arg1
));
11631 if (TREE_CODE (arg1
) == NEGATE_EXPR
&& negate_expr_p (arg0
))
11632 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11633 negate_expr (arg0
),
11634 TREE_OPERAND (arg1
, 0));
11636 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11637 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11638 && real_onep (arg1
))
11639 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11641 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11642 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
11643 && real_minus_onep (arg1
))
11644 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
,
11645 negate_expr (arg0
)));
11647 /* If ARG1 is a constant, we can convert this to a multiply by the
11648 reciprocal. This does not have the same rounding properties,
11649 so only do this if -freciprocal-math. We can actually
11650 always safely do it if ARG1 is a power of two, but it's hard to
11651 tell if it is or not in a portable manner. */
11652 if (TREE_CODE (arg1
) == REAL_CST
)
11654 if (flag_reciprocal_math
11655 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
11657 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tem
);
11658 /* Find the reciprocal if optimizing and the result is exact. */
11662 r
= TREE_REAL_CST (arg1
);
11663 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0
)), &r
))
11665 tem
= build_real (type
, r
);
11666 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11667 fold_convert_loc (loc
, type
, arg0
), tem
);
11671 /* Convert A/B/C to A/(B*C). */
11672 if (flag_reciprocal_math
11673 && TREE_CODE (arg0
) == RDIV_EXPR
)
11674 return fold_build2_loc (loc
, RDIV_EXPR
, type
, TREE_OPERAND (arg0
, 0),
11675 fold_build2_loc (loc
, MULT_EXPR
, type
,
11676 TREE_OPERAND (arg0
, 1), arg1
));
11678 /* Convert A/(B/C) to (A/B)*C. */
11679 if (flag_reciprocal_math
11680 && TREE_CODE (arg1
) == RDIV_EXPR
)
11681 return fold_build2_loc (loc
, MULT_EXPR
, type
,
11682 fold_build2_loc (loc
, RDIV_EXPR
, type
, arg0
,
11683 TREE_OPERAND (arg1
, 0)),
11684 TREE_OPERAND (arg1
, 1));
11686 /* Convert C1/(X*C2) into (C1/C2)/X. */
11687 if (flag_reciprocal_math
11688 && TREE_CODE (arg1
) == MULT_EXPR
11689 && TREE_CODE (arg0
) == REAL_CST
11690 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == REAL_CST
)
11692 tree tem
= const_binop (RDIV_EXPR
, arg0
,
11693 TREE_OPERAND (arg1
, 1));
11695 return fold_build2_loc (loc
, RDIV_EXPR
, type
, tem
,
11696 TREE_OPERAND (arg1
, 0));
11699 if (flag_unsafe_math_optimizations
)
11701 enum built_in_function fcode0
= builtin_mathfn_code (arg0
);
11702 enum built_in_function fcode1
= builtin_mathfn_code (arg1
);
11704 /* Optimize sin(x)/cos(x) as tan(x). */
11705 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_COS
)
11706 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_COSF
)
11707 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_COSL
))
11708 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11709 CALL_EXPR_ARG (arg1
, 0), 0))
11711 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11713 if (tanfn
!= NULL_TREE
)
11714 return build_call_expr_loc (loc
, tanfn
, 1, CALL_EXPR_ARG (arg0
, 0));
11717 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11718 if (((fcode0
== BUILT_IN_COS
&& fcode1
== BUILT_IN_SIN
)
11719 || (fcode0
== BUILT_IN_COSF
&& fcode1
== BUILT_IN_SINF
)
11720 || (fcode0
== BUILT_IN_COSL
&& fcode1
== BUILT_IN_SINL
))
11721 && operand_equal_p (CALL_EXPR_ARG (arg0
, 0),
11722 CALL_EXPR_ARG (arg1
, 0), 0))
11724 tree tanfn
= mathfn_built_in (type
, BUILT_IN_TAN
);
11726 if (tanfn
!= NULL_TREE
)
11728 tree tmp
= build_call_expr_loc (loc
, tanfn
, 1,
11729 CALL_EXPR_ARG (arg0
, 0));
11730 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11731 build_real (type
, dconst1
), tmp
);
11735 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11736 NaNs or Infinities. */
11737 if (((fcode0
== BUILT_IN_SIN
&& fcode1
== BUILT_IN_TAN
)
11738 || (fcode0
== BUILT_IN_SINF
&& fcode1
== BUILT_IN_TANF
)
11739 || (fcode0
== BUILT_IN_SINL
&& fcode1
== BUILT_IN_TANL
)))
11741 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11742 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11744 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11745 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11746 && operand_equal_p (arg00
, arg01
, 0))
11748 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11750 if (cosfn
!= NULL_TREE
)
11751 return build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11755 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11756 NaNs or Infinities. */
11757 if (((fcode0
== BUILT_IN_TAN
&& fcode1
== BUILT_IN_SIN
)
11758 || (fcode0
== BUILT_IN_TANF
&& fcode1
== BUILT_IN_SINF
)
11759 || (fcode0
== BUILT_IN_TANL
&& fcode1
== BUILT_IN_SINL
)))
11761 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11762 tree arg01
= CALL_EXPR_ARG (arg1
, 0);
11764 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00
)))
11765 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00
)))
11766 && operand_equal_p (arg00
, arg01
, 0))
11768 tree cosfn
= mathfn_built_in (type
, BUILT_IN_COS
);
11770 if (cosfn
!= NULL_TREE
)
11772 tree tmp
= build_call_expr_loc (loc
, cosfn
, 1, arg00
);
11773 return fold_build2_loc (loc
, RDIV_EXPR
, type
,
11774 build_real (type
, dconst1
),
11780 /* Optimize pow(x,c)/x as pow(x,c-1). */
11781 if (fcode0
== BUILT_IN_POW
11782 || fcode0
== BUILT_IN_POWF
11783 || fcode0
== BUILT_IN_POWL
)
11785 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
11786 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
11787 if (TREE_CODE (arg01
) == REAL_CST
11788 && !TREE_OVERFLOW (arg01
)
11789 && operand_equal_p (arg1
, arg00
, 0))
11791 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
11795 c
= TREE_REAL_CST (arg01
);
11796 real_arithmetic (&c
, MINUS_EXPR
, &c
, &dconst1
);
11797 arg
= build_real (type
, c
);
11798 return build_call_expr_loc (loc
, powfn
, 2, arg1
, arg
);
11802 /* Optimize a/root(b/c) into a*root(c/b). */
11803 if (BUILTIN_ROOT_P (fcode1
))
11805 tree rootarg
= CALL_EXPR_ARG (arg1
, 0);
11807 if (TREE_CODE (rootarg
) == RDIV_EXPR
)
11809 tree rootfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11810 tree b
= TREE_OPERAND (rootarg
, 0);
11811 tree c
= TREE_OPERAND (rootarg
, 1);
11813 tree tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, c
, b
);
11815 tmp
= build_call_expr_loc (loc
, rootfn
, 1, tmp
);
11816 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, tmp
);
11820 /* Optimize x/expN(y) into x*expN(-y). */
11821 if (BUILTIN_EXPONENT_P (fcode1
))
11823 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11824 tree arg
= negate_expr (CALL_EXPR_ARG (arg1
, 0));
11825 arg1
= build_call_expr_loc (loc
,
11827 fold_convert_loc (loc
, type
, arg
));
11828 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11831 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11832 if (fcode1
== BUILT_IN_POW
11833 || fcode1
== BUILT_IN_POWF
11834 || fcode1
== BUILT_IN_POWL
)
11836 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg1
), 0);
11837 tree arg10
= CALL_EXPR_ARG (arg1
, 0);
11838 tree arg11
= CALL_EXPR_ARG (arg1
, 1);
11839 tree neg11
= fold_convert_loc (loc
, type
,
11840 negate_expr (arg11
));
11841 arg1
= build_call_expr_loc (loc
, powfn
, 2, arg10
, neg11
);
11842 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
11847 case TRUNC_DIV_EXPR
:
11848 /* Optimize (X & (-A)) / A where A is a power of 2,
11850 if (TREE_CODE (arg0
) == BIT_AND_EXPR
11851 && !TYPE_UNSIGNED (type
) && TREE_CODE (arg1
) == INTEGER_CST
11852 && integer_pow2p (arg1
) && tree_int_cst_sgn (arg1
) > 0)
11854 tree sum
= fold_binary_loc (loc
, PLUS_EXPR
, TREE_TYPE (arg1
),
11855 arg1
, TREE_OPERAND (arg0
, 1));
11856 if (sum
&& integer_zerop (sum
)) {
11857 unsigned long pow2
;
11859 if (TREE_INT_CST_LOW (arg1
))
11860 pow2
= exact_log2 (TREE_INT_CST_LOW (arg1
));
11862 pow2
= exact_log2 (TREE_INT_CST_HIGH (arg1
))
11863 + HOST_BITS_PER_WIDE_INT
;
11865 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11866 TREE_OPERAND (arg0
, 0),
11867 build_int_cst (integer_type_node
, pow2
));
11873 case FLOOR_DIV_EXPR
:
11874 /* Simplify A / (B << N) where A and B are positive and B is
11875 a power of 2, to A >> (N + log2(B)). */
11876 strict_overflow_p
= false;
11877 if (TREE_CODE (arg1
) == LSHIFT_EXPR
11878 && (TYPE_UNSIGNED (type
)
11879 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
11881 tree sval
= TREE_OPERAND (arg1
, 0);
11882 if (integer_pow2p (sval
) && tree_int_cst_sgn (sval
) > 0)
11884 tree sh_cnt
= TREE_OPERAND (arg1
, 1);
11885 unsigned long pow2
;
11887 if (TREE_INT_CST_LOW (sval
))
11888 pow2
= exact_log2 (TREE_INT_CST_LOW (sval
));
11890 pow2
= exact_log2 (TREE_INT_CST_HIGH (sval
))
11891 + HOST_BITS_PER_WIDE_INT
;
11893 if (strict_overflow_p
)
11894 fold_overflow_warning (("assuming signed overflow does not "
11895 "occur when simplifying A / (B << N)"),
11896 WARN_STRICT_OVERFLOW_MISC
);
11898 sh_cnt
= fold_build2_loc (loc
, PLUS_EXPR
, TREE_TYPE (sh_cnt
),
11900 build_int_cst (TREE_TYPE (sh_cnt
),
11902 return fold_build2_loc (loc
, RSHIFT_EXPR
, type
,
11903 fold_convert_loc (loc
, type
, arg0
), sh_cnt
);
11907 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11908 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11909 if (INTEGRAL_TYPE_P (type
)
11910 && TYPE_UNSIGNED (type
)
11911 && code
== FLOOR_DIV_EXPR
)
11912 return fold_build2_loc (loc
, TRUNC_DIV_EXPR
, type
, op0
, op1
);
11916 case ROUND_DIV_EXPR
:
11917 case CEIL_DIV_EXPR
:
11918 case EXACT_DIV_EXPR
:
11919 if (integer_onep (arg1
))
11920 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
11921 if (integer_zerop (arg1
))
11923 /* X / -1 is -X. */
11924 if (!TYPE_UNSIGNED (type
)
11925 && TREE_CODE (arg1
) == INTEGER_CST
11926 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
11927 && TREE_INT_CST_HIGH (arg1
) == -1)
11928 return fold_convert_loc (loc
, type
, negate_expr (arg0
));
11930 /* Convert -A / -B to A / B when the type is signed and overflow is
11932 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11933 && TREE_CODE (arg0
) == NEGATE_EXPR
11934 && negate_expr_p (arg1
))
11936 if (INTEGRAL_TYPE_P (type
))
11937 fold_overflow_warning (("assuming signed overflow does not occur "
11938 "when distributing negation across "
11940 WARN_STRICT_OVERFLOW_MISC
);
11941 return fold_build2_loc (loc
, code
, type
,
11942 fold_convert_loc (loc
, type
,
11943 TREE_OPERAND (arg0
, 0)),
11944 fold_convert_loc (loc
, type
,
11945 negate_expr (arg1
)));
11947 if ((!INTEGRAL_TYPE_P (type
) || TYPE_OVERFLOW_UNDEFINED (type
))
11948 && TREE_CODE (arg1
) == NEGATE_EXPR
11949 && negate_expr_p (arg0
))
11951 if (INTEGRAL_TYPE_P (type
))
11952 fold_overflow_warning (("assuming signed overflow does not occur "
11953 "when distributing negation across "
11955 WARN_STRICT_OVERFLOW_MISC
);
11956 return fold_build2_loc (loc
, code
, type
,
11957 fold_convert_loc (loc
, type
,
11958 negate_expr (arg0
)),
11959 fold_convert_loc (loc
, type
,
11960 TREE_OPERAND (arg1
, 0)));
11963 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11964 operation, EXACT_DIV_EXPR.
11966 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11967 At one time others generated faster code, it's not clear if they do
11968 after the last round to changes to the DIV code in expmed.c. */
11969 if ((code
== CEIL_DIV_EXPR
|| code
== FLOOR_DIV_EXPR
)
11970 && multiple_of_p (type
, arg0
, arg1
))
11971 return fold_build2_loc (loc
, EXACT_DIV_EXPR
, type
, arg0
, arg1
);
11973 strict_overflow_p
= false;
11974 if (TREE_CODE (arg1
) == INTEGER_CST
11975 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
11976 &strict_overflow_p
)))
11978 if (strict_overflow_p
)
11979 fold_overflow_warning (("assuming signed overflow does not occur "
11980 "when simplifying division"),
11981 WARN_STRICT_OVERFLOW_MISC
);
11982 return fold_convert_loc (loc
, type
, tem
);
11987 case CEIL_MOD_EXPR
:
11988 case FLOOR_MOD_EXPR
:
11989 case ROUND_MOD_EXPR
:
11990 case TRUNC_MOD_EXPR
:
11991 /* X % 1 is always zero, but be sure to preserve any side
11993 if (integer_onep (arg1
))
11994 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
11996 /* X % 0, return X % 0 unchanged so that we can get the
11997 proper warnings and errors. */
11998 if (integer_zerop (arg1
))
12001 /* 0 % X is always zero, but be sure to preserve any side
12002 effects in X. Place this after checking for X == 0. */
12003 if (integer_zerop (arg0
))
12004 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12006 /* X % -1 is zero. */
12007 if (!TYPE_UNSIGNED (type
)
12008 && TREE_CODE (arg1
) == INTEGER_CST
12009 && TREE_INT_CST_LOW (arg1
) == (unsigned HOST_WIDE_INT
) -1
12010 && TREE_INT_CST_HIGH (arg1
) == -1)
12011 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12013 /* X % -C is the same as X % C. */
12014 if (code
== TRUNC_MOD_EXPR
12015 && !TYPE_UNSIGNED (type
)
12016 && TREE_CODE (arg1
) == INTEGER_CST
12017 && !TREE_OVERFLOW (arg1
)
12018 && TREE_INT_CST_HIGH (arg1
) < 0
12019 && !TYPE_OVERFLOW_TRAPS (type
)
12020 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12021 && !sign_bit_p (arg1
, arg1
))
12022 return fold_build2_loc (loc
, code
, type
,
12023 fold_convert_loc (loc
, type
, arg0
),
12024 fold_convert_loc (loc
, type
,
12025 negate_expr (arg1
)));
12027 /* X % -Y is the same as X % Y. */
12028 if (code
== TRUNC_MOD_EXPR
12029 && !TYPE_UNSIGNED (type
)
12030 && TREE_CODE (arg1
) == NEGATE_EXPR
12031 && !TYPE_OVERFLOW_TRAPS (type
))
12032 return fold_build2_loc (loc
, code
, type
, fold_convert_loc (loc
, type
, arg0
),
12033 fold_convert_loc (loc
, type
,
12034 TREE_OPERAND (arg1
, 0)));
12036 strict_overflow_p
= false;
12037 if (TREE_CODE (arg1
) == INTEGER_CST
12038 && 0 != (tem
= extract_muldiv (op0
, arg1
, code
, NULL_TREE
,
12039 &strict_overflow_p
)))
12041 if (strict_overflow_p
)
12042 fold_overflow_warning (("assuming signed overflow does not occur "
12043 "when simplifying modulus"),
12044 WARN_STRICT_OVERFLOW_MISC
);
12045 return fold_convert_loc (loc
, type
, tem
);
12048 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12049 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12050 if ((code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
)
12051 && (TYPE_UNSIGNED (type
)
12052 || tree_expr_nonnegative_warnv_p (op0
, &strict_overflow_p
)))
12055 /* Also optimize A % (C << N) where C is a power of 2,
12056 to A & ((C << N) - 1). */
12057 if (TREE_CODE (arg1
) == LSHIFT_EXPR
)
12058 c
= TREE_OPERAND (arg1
, 0);
12060 if (integer_pow2p (c
) && tree_int_cst_sgn (c
) > 0)
12063 = fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (arg1
), arg1
,
12064 build_int_cst (TREE_TYPE (arg1
), 1));
12065 if (strict_overflow_p
)
12066 fold_overflow_warning (("assuming signed overflow does not "
12067 "occur when simplifying "
12068 "X % (power of two)"),
12069 WARN_STRICT_OVERFLOW_MISC
);
12070 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
12071 fold_convert_loc (loc
, type
, arg0
),
12072 fold_convert_loc (loc
, type
, mask
));
12080 if (integer_all_onesp (arg0
))
12081 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12085 /* Optimize -1 >> x for arithmetic right shifts. */
12086 if (integer_all_onesp (arg0
) && !TYPE_UNSIGNED (type
)
12087 && tree_expr_nonnegative_p (arg1
))
12088 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12089 /* ... fall through ... */
12093 if (integer_zerop (arg1
))
12094 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12095 if (integer_zerop (arg0
))
12096 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12098 /* Since negative shift count is not well-defined,
12099 don't try to compute it in the compiler. */
12100 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
12103 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12104 if (TREE_CODE (op0
) == code
&& host_integerp (arg1
, false)
12105 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12106 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12107 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12109 HOST_WIDE_INT low
= (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
12110 + TREE_INT_CST_LOW (arg1
));
12112 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12113 being well defined. */
12114 if (low
>= TYPE_PRECISION (type
))
12116 if (code
== LROTATE_EXPR
|| code
== RROTATE_EXPR
)
12117 low
= low
% TYPE_PRECISION (type
);
12118 else if (TYPE_UNSIGNED (type
) || code
== LSHIFT_EXPR
)
12119 return omit_one_operand_loc (loc
, type
, build_int_cst (type
, 0),
12120 TREE_OPERAND (arg0
, 0));
12122 low
= TYPE_PRECISION (type
) - 1;
12125 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12126 build_int_cst (type
, low
));
12129 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12130 into x & ((unsigned)-1 >> c) for unsigned types. */
12131 if (((code
== LSHIFT_EXPR
&& TREE_CODE (arg0
) == RSHIFT_EXPR
)
12132 || (TYPE_UNSIGNED (type
)
12133 && code
== RSHIFT_EXPR
&& TREE_CODE (arg0
) == LSHIFT_EXPR
))
12134 && host_integerp (arg1
, false)
12135 && TREE_INT_CST_LOW (arg1
) < TYPE_PRECISION (type
)
12136 && host_integerp (TREE_OPERAND (arg0
, 1), false)
12137 && TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) < TYPE_PRECISION (type
))
12139 HOST_WIDE_INT low0
= TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1));
12140 HOST_WIDE_INT low1
= TREE_INT_CST_LOW (arg1
);
12146 arg00
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12148 lshift
= build_int_cst (type
, -1);
12149 lshift
= int_const_binop (code
, lshift
, arg1
);
12151 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
, arg00
, lshift
);
12155 /* Rewrite an LROTATE_EXPR by a constant into an
12156 RROTATE_EXPR by a new constant. */
12157 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
12159 tree tem
= build_int_cst (TREE_TYPE (arg1
),
12160 TYPE_PRECISION (type
));
12161 tem
= const_binop (MINUS_EXPR
, tem
, arg1
);
12162 return fold_build2_loc (loc
, RROTATE_EXPR
, type
, op0
, tem
);
12165 /* If we have a rotate of a bit operation with the rotate count and
12166 the second operand of the bit operation both constant,
12167 permute the two operations. */
12168 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12169 && (TREE_CODE (arg0
) == BIT_AND_EXPR
12170 || TREE_CODE (arg0
) == BIT_IOR_EXPR
12171 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12172 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12173 return fold_build2_loc (loc
, TREE_CODE (arg0
), type
,
12174 fold_build2_loc (loc
, code
, type
,
12175 TREE_OPERAND (arg0
, 0), arg1
),
12176 fold_build2_loc (loc
, code
, type
,
12177 TREE_OPERAND (arg0
, 1), arg1
));
12179 /* Two consecutive rotates adding up to the precision of the
12180 type can be ignored. */
12181 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
12182 && TREE_CODE (arg0
) == RROTATE_EXPR
12183 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12184 && TREE_INT_CST_HIGH (arg1
) == 0
12185 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
12186 && ((TREE_INT_CST_LOW (arg1
)
12187 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
12188 == (unsigned int) TYPE_PRECISION (type
)))
12189 return TREE_OPERAND (arg0
, 0);
12191 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12192 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12193 if the latter can be further optimized. */
12194 if ((code
== LSHIFT_EXPR
|| code
== RSHIFT_EXPR
)
12195 && TREE_CODE (arg0
) == BIT_AND_EXPR
12196 && TREE_CODE (arg1
) == INTEGER_CST
12197 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12199 tree mask
= fold_build2_loc (loc
, code
, type
,
12200 fold_convert_loc (loc
, type
,
12201 TREE_OPERAND (arg0
, 1)),
12203 tree shift
= fold_build2_loc (loc
, code
, type
,
12204 fold_convert_loc (loc
, type
,
12205 TREE_OPERAND (arg0
, 0)),
12207 tem
= fold_binary_loc (loc
, BIT_AND_EXPR
, type
, shift
, mask
);
12215 if (operand_equal_p (arg0
, arg1
, 0))
12216 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12217 if (INTEGRAL_TYPE_P (type
)
12218 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), OEP_ONLY_CONST
))
12219 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12220 tem
= fold_minmax (loc
, MIN_EXPR
, type
, arg0
, arg1
);
12226 if (operand_equal_p (arg0
, arg1
, 0))
12227 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12228 if (INTEGRAL_TYPE_P (type
)
12229 && TYPE_MAX_VALUE (type
)
12230 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), OEP_ONLY_CONST
))
12231 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12232 tem
= fold_minmax (loc
, MAX_EXPR
, type
, arg0
, arg1
);
12237 case TRUTH_ANDIF_EXPR
:
12238 /* Note that the operands of this must be ints
12239 and their values must be 0 or 1.
12240 ("true" is a fixed value perhaps depending on the language.) */
12241 /* If first arg is constant zero, return it. */
12242 if (integer_zerop (arg0
))
12243 return fold_convert_loc (loc
, type
, arg0
);
12244 case TRUTH_AND_EXPR
:
12245 /* If either arg is constant true, drop it. */
12246 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12247 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12248 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
)
12249 /* Preserve sequence points. */
12250 && (code
!= TRUTH_ANDIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12251 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12252 /* If second arg is constant zero, result is zero, but first arg
12253 must be evaluated. */
12254 if (integer_zerop (arg1
))
12255 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12256 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12257 case will be handled here. */
12258 if (integer_zerop (arg0
))
12259 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12261 /* !X && X is always false. */
12262 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12263 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12264 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg1
);
12265 /* X && !X is always false. */
12266 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12267 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12268 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12270 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12271 means A >= Y && A != MAX, but in this case we know that
12274 if (!TREE_SIDE_EFFECTS (arg0
)
12275 && !TREE_SIDE_EFFECTS (arg1
))
12277 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg0
, arg1
);
12278 if (tem
&& !operand_equal_p (tem
, arg0
, 0))
12279 return fold_build2_loc (loc
, code
, type
, tem
, arg1
);
12281 tem
= fold_to_nonsharp_ineq_using_bound (loc
, arg1
, arg0
);
12282 if (tem
&& !operand_equal_p (tem
, arg1
, 0))
12283 return fold_build2_loc (loc
, code
, type
, arg0
, tem
);
12286 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12292 case TRUTH_ORIF_EXPR
:
12293 /* Note that the operands of this must be ints
12294 and their values must be 0 or true.
12295 ("true" is a fixed value perhaps depending on the language.) */
12296 /* If first arg is constant true, return it. */
12297 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12298 return fold_convert_loc (loc
, type
, arg0
);
12299 case TRUTH_OR_EXPR
:
12300 /* If either arg is constant zero, drop it. */
12301 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
12302 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg1
));
12303 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
)
12304 /* Preserve sequence points. */
12305 && (code
!= TRUTH_ORIF_EXPR
|| ! TREE_SIDE_EFFECTS (arg0
)))
12306 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12307 /* If second arg is constant true, result is true, but we must
12308 evaluate first arg. */
12309 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
12310 return omit_one_operand_loc (loc
, type
, arg1
, arg0
);
12311 /* Likewise for first arg, but note this only occurs here for
12313 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
12314 return omit_one_operand_loc (loc
, type
, arg0
, arg1
);
12316 /* !X || X is always true. */
12317 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12318 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12319 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12320 /* X || !X is always true. */
12321 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12322 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12323 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12325 /* (X && !Y) || (!X && Y) is X ^ Y */
12326 if (TREE_CODE (arg0
) == TRUTH_AND_EXPR
12327 && TREE_CODE (arg1
) == TRUTH_AND_EXPR
)
12329 tree a0
, a1
, l0
, l1
, n0
, n1
;
12331 a0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 0));
12332 a1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg1
, 1));
12334 l0
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 0));
12335 l1
= fold_convert_loc (loc
, type
, TREE_OPERAND (arg0
, 1));
12337 n0
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l0
);
12338 n1
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
, l1
);
12340 if ((operand_equal_p (n0
, a0
, 0)
12341 && operand_equal_p (n1
, a1
, 0))
12342 || (operand_equal_p (n0
, a1
, 0)
12343 && operand_equal_p (n1
, a0
, 0)))
12344 return fold_build2_loc (loc
, TRUTH_XOR_EXPR
, type
, l0
, n1
);
12347 if ((tem
= fold_truth_andor (loc
, code
, type
, arg0
, arg1
, op0
, op1
))
12353 case TRUTH_XOR_EXPR
:
12354 /* If the second arg is constant zero, drop it. */
12355 if (integer_zerop (arg1
))
12356 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12357 /* If the second arg is constant true, this is a logical inversion. */
12358 if (integer_onep (arg1
))
12360 /* Only call invert_truthvalue if operand is a truth value. */
12361 if (TREE_CODE (TREE_TYPE (arg0
)) != BOOLEAN_TYPE
)
12362 tem
= fold_build1_loc (loc
, TRUTH_NOT_EXPR
, TREE_TYPE (arg0
), arg0
);
12364 tem
= invert_truthvalue_loc (loc
, arg0
);
12365 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, tem
));
12367 /* Identical arguments cancel to zero. */
12368 if (operand_equal_p (arg0
, arg1
, 0))
12369 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
12371 /* !X ^ X is always true. */
12372 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
12373 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0))
12374 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg1
);
12376 /* X ^ !X is always true. */
12377 if (TREE_CODE (arg1
) == TRUTH_NOT_EXPR
12378 && operand_equal_p (arg0
, TREE_OPERAND (arg1
, 0), 0))
12379 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
12388 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12389 if (tem
!= NULL_TREE
)
12392 /* bool_var != 0 becomes bool_var. */
12393 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12394 && code
== NE_EXPR
)
12395 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12397 /* bool_var == 1 becomes bool_var. */
12398 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12399 && code
== EQ_EXPR
)
12400 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12402 /* bool_var != 1 becomes !bool_var. */
12403 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_onep (arg1
)
12404 && code
== NE_EXPR
)
12405 return fold_convert_loc (loc
, type
,
12406 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12407 TREE_TYPE (arg0
), arg0
));
12409 /* bool_var == 0 becomes !bool_var. */
12410 if (TREE_CODE (TREE_TYPE (arg0
)) == BOOLEAN_TYPE
&& integer_zerop (arg1
)
12411 && code
== EQ_EXPR
)
12412 return fold_convert_loc (loc
, type
,
12413 fold_build1_loc (loc
, TRUTH_NOT_EXPR
,
12414 TREE_TYPE (arg0
), arg0
));
12416 /* !exp != 0 becomes !exp */
12417 if (TREE_CODE (arg0
) == TRUTH_NOT_EXPR
&& integer_zerop (arg1
)
12418 && code
== NE_EXPR
)
12419 return non_lvalue_loc (loc
, fold_convert_loc (loc
, type
, arg0
));
12421 /* If this is an equality comparison of the address of two non-weak,
12422 unaliased symbols neither of which are extern (since we do not
12423 have access to attributes for externs), then we know the result. */
12424 if (TREE_CODE (arg0
) == ADDR_EXPR
12425 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0
, 0))
12426 && ! DECL_WEAK (TREE_OPERAND (arg0
, 0))
12427 && ! lookup_attribute ("alias",
12428 DECL_ATTRIBUTES (TREE_OPERAND (arg0
, 0)))
12429 && ! DECL_EXTERNAL (TREE_OPERAND (arg0
, 0))
12430 && TREE_CODE (arg1
) == ADDR_EXPR
12431 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1
, 0))
12432 && ! DECL_WEAK (TREE_OPERAND (arg1
, 0))
12433 && ! lookup_attribute ("alias",
12434 DECL_ATTRIBUTES (TREE_OPERAND (arg1
, 0)))
12435 && ! DECL_EXTERNAL (TREE_OPERAND (arg1
, 0)))
12437 /* We know that we're looking at the address of two
12438 non-weak, unaliased, static _DECL nodes.
12440 It is both wasteful and incorrect to call operand_equal_p
12441 to compare the two ADDR_EXPR nodes. It is wasteful in that
12442 all we need to do is test pointer equality for the arguments
12443 to the two ADDR_EXPR nodes. It is incorrect to use
12444 operand_equal_p as that function is NOT equivalent to a
12445 C equality test. It can in fact return false for two
12446 objects which would test as equal using the C equality
12448 bool equal
= TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0);
12449 return constant_boolean_node (equal
12450 ? code
== EQ_EXPR
: code
!= EQ_EXPR
,
12454 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12455 a MINUS_EXPR of a constant, we can convert it into a comparison with
12456 a revised constant as long as no overflow occurs. */
12457 if (TREE_CODE (arg1
) == INTEGER_CST
12458 && (TREE_CODE (arg0
) == PLUS_EXPR
12459 || TREE_CODE (arg0
) == MINUS_EXPR
)
12460 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
12461 && 0 != (tem
= const_binop (TREE_CODE (arg0
) == PLUS_EXPR
12462 ? MINUS_EXPR
: PLUS_EXPR
,
12463 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12465 TREE_OPERAND (arg0
, 1)))
12466 && !TREE_OVERFLOW (tem
))
12467 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12469 /* Similarly for a NEGATE_EXPR. */
12470 if (TREE_CODE (arg0
) == NEGATE_EXPR
12471 && TREE_CODE (arg1
) == INTEGER_CST
12472 && 0 != (tem
= negate_expr (fold_convert_loc (loc
, TREE_TYPE (arg0
),
12474 && TREE_CODE (tem
) == INTEGER_CST
12475 && !TREE_OVERFLOW (tem
))
12476 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), tem
);
12478 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12479 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12480 && TREE_CODE (arg1
) == INTEGER_CST
12481 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12482 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12483 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg0
),
12484 fold_convert_loc (loc
,
12487 TREE_OPERAND (arg0
, 1)));
12489 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12490 if ((TREE_CODE (arg0
) == PLUS_EXPR
12491 || TREE_CODE (arg0
) == POINTER_PLUS_EXPR
12492 || TREE_CODE (arg0
) == MINUS_EXPR
)
12493 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12496 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
12497 || POINTER_TYPE_P (TREE_TYPE (arg0
))))
12499 tree val
= TREE_OPERAND (arg0
, 1);
12500 return omit_two_operands_loc (loc
, type
,
12501 fold_build2_loc (loc
, code
, type
,
12503 build_int_cst (TREE_TYPE (val
),
12505 TREE_OPERAND (arg0
, 0), arg1
);
12508 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12509 if (TREE_CODE (arg0
) == MINUS_EXPR
12510 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == INTEGER_CST
12511 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0
,
12514 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 0)) & 1) == 1)
12516 return omit_two_operands_loc (loc
, type
,
12518 ? boolean_true_node
: boolean_false_node
,
12519 TREE_OPERAND (arg0
, 1), arg1
);
12522 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12523 for !=. Don't do this for ordered comparisons due to overflow. */
12524 if (TREE_CODE (arg0
) == MINUS_EXPR
12525 && integer_zerop (arg1
))
12526 return fold_build2_loc (loc
, code
, type
,
12527 TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg0
, 1));
12529 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12530 if (TREE_CODE (arg0
) == ABS_EXPR
12531 && (integer_zerop (arg1
) || real_zerop (arg1
)))
12532 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0), arg1
);
12534 /* If this is an EQ or NE comparison with zero and ARG0 is
12535 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12536 two operations, but the latter can be done in one less insn
12537 on machines that have only two-operand insns or on which a
12538 constant cannot be the first operand. */
12539 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12540 && integer_zerop (arg1
))
12542 tree arg00
= TREE_OPERAND (arg0
, 0);
12543 tree arg01
= TREE_OPERAND (arg0
, 1);
12544 if (TREE_CODE (arg00
) == LSHIFT_EXPR
12545 && integer_onep (TREE_OPERAND (arg00
, 0)))
12547 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg00
),
12548 arg01
, TREE_OPERAND (arg00
, 1));
12549 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12550 build_int_cst (TREE_TYPE (arg0
), 1));
12551 return fold_build2_loc (loc
, code
, type
,
12552 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12555 else if (TREE_CODE (arg01
) == LSHIFT_EXPR
12556 && integer_onep (TREE_OPERAND (arg01
, 0)))
12558 tree tem
= fold_build2_loc (loc
, RSHIFT_EXPR
, TREE_TYPE (arg01
),
12559 arg00
, TREE_OPERAND (arg01
, 1));
12560 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
), tem
,
12561 build_int_cst (TREE_TYPE (arg0
), 1));
12562 return fold_build2_loc (loc
, code
, type
,
12563 fold_convert_loc (loc
, TREE_TYPE (arg1
), tem
),
12568 /* If this is an NE or EQ comparison of zero against the result of a
12569 signed MOD operation whose second operand is a power of 2, make
12570 the MOD operation unsigned since it is simpler and equivalent. */
12571 if (integer_zerop (arg1
)
12572 && !TYPE_UNSIGNED (TREE_TYPE (arg0
))
12573 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
12574 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
12575 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
12576 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
12577 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12579 tree newtype
= unsigned_type_for (TREE_TYPE (arg0
));
12580 tree newmod
= fold_build2_loc (loc
, TREE_CODE (arg0
), newtype
,
12581 fold_convert_loc (loc
, newtype
,
12582 TREE_OPERAND (arg0
, 0)),
12583 fold_convert_loc (loc
, newtype
,
12584 TREE_OPERAND (arg0
, 1)));
12586 return fold_build2_loc (loc
, code
, type
, newmod
,
12587 fold_convert_loc (loc
, newtype
, arg1
));
12590 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12591 C1 is a valid shift constant, and C2 is a power of two, i.e.
12593 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12594 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == RSHIFT_EXPR
12595 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1))
12597 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12598 && integer_zerop (arg1
))
12600 tree itype
= TREE_TYPE (arg0
);
12601 unsigned HOST_WIDE_INT prec
= TYPE_PRECISION (itype
);
12602 tree arg001
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1);
12604 /* Check for a valid shift count. */
12605 if (TREE_INT_CST_HIGH (arg001
) == 0
12606 && TREE_INT_CST_LOW (arg001
) < prec
)
12608 tree arg01
= TREE_OPERAND (arg0
, 1);
12609 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12610 unsigned HOST_WIDE_INT log2
= tree_log2 (arg01
);
12611 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12612 can be rewritten as (X & (C2 << C1)) != 0. */
12613 if ((log2
+ TREE_INT_CST_LOW (arg001
)) < prec
)
12615 tem
= fold_build2_loc (loc
, LSHIFT_EXPR
, itype
, arg01
, arg001
);
12616 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, itype
, arg000
, tem
);
12617 return fold_build2_loc (loc
, code
, type
, tem
,
12618 fold_convert_loc (loc
, itype
, arg1
));
12620 /* Otherwise, for signed (arithmetic) shifts,
12621 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12622 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12623 else if (!TYPE_UNSIGNED (itype
))
12624 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
, type
,
12625 arg000
, build_int_cst (itype
, 0));
12626 /* Otherwise, of unsigned (logical) shifts,
12627 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12628 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12630 return omit_one_operand_loc (loc
, type
,
12631 code
== EQ_EXPR
? integer_one_node
12632 : integer_zero_node
,
12637 /* If we have (A & C) == C where C is a power of 2, convert this into
12638 (A & C) != 0. Similarly for NE_EXPR. */
12639 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12640 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12641 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12642 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12643 arg0
, fold_convert_loc (loc
, TREE_TYPE (arg0
),
12644 integer_zero_node
));
12646 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12647 bit, then fold the expression into A < 0 or A >= 0. */
12648 tem
= fold_single_bit_test_into_sign_test (loc
, code
, arg0
, arg1
, type
);
12652 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12653 Similarly for NE_EXPR. */
12654 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12655 && TREE_CODE (arg1
) == INTEGER_CST
12656 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12658 tree notc
= fold_build1_loc (loc
, BIT_NOT_EXPR
,
12659 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
12660 TREE_OPERAND (arg0
, 1));
12662 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12663 fold_convert_loc (loc
, TREE_TYPE (arg0
), arg1
),
12665 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12666 if (integer_nonzerop (dandnotc
))
12667 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12670 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12671 Similarly for NE_EXPR. */
12672 if (TREE_CODE (arg0
) == BIT_IOR_EXPR
12673 && TREE_CODE (arg1
) == INTEGER_CST
12674 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12676 tree notd
= fold_build1_loc (loc
, BIT_NOT_EXPR
, TREE_TYPE (arg1
), arg1
);
12678 = fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12679 TREE_OPERAND (arg0
, 1),
12680 fold_convert_loc (loc
, TREE_TYPE (arg0
), notd
));
12681 tree rslt
= code
== EQ_EXPR
? integer_zero_node
: integer_one_node
;
12682 if (integer_nonzerop (candnotd
))
12683 return omit_one_operand_loc (loc
, type
, rslt
, arg0
);
12686 /* If this is a comparison of a field, we may be able to simplify it. */
12687 if ((TREE_CODE (arg0
) == COMPONENT_REF
12688 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
12689 /* Handle the constant case even without -O
12690 to make sure the warnings are given. */
12691 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
12693 t1
= optimize_bit_field_compare (loc
, code
, type
, arg0
, arg1
);
12698 /* Optimize comparisons of strlen vs zero to a compare of the
12699 first character of the string vs zero. To wit,
12700 strlen(ptr) == 0 => *ptr == 0
12701 strlen(ptr) != 0 => *ptr != 0
12702 Other cases should reduce to one of these two (or a constant)
12703 due to the return value of strlen being unsigned. */
12704 if (TREE_CODE (arg0
) == CALL_EXPR
12705 && integer_zerop (arg1
))
12707 tree fndecl
= get_callee_fndecl (arg0
);
12710 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
12711 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_STRLEN
12712 && call_expr_nargs (arg0
) == 1
12713 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0
, 0))) == POINTER_TYPE
)
12715 tree iref
= build_fold_indirect_ref_loc (loc
,
12716 CALL_EXPR_ARG (arg0
, 0));
12717 return fold_build2_loc (loc
, code
, type
, iref
,
12718 build_int_cst (TREE_TYPE (iref
), 0));
12722 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12723 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12724 if (TREE_CODE (arg0
) == RSHIFT_EXPR
12725 && integer_zerop (arg1
)
12726 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12728 tree arg00
= TREE_OPERAND (arg0
, 0);
12729 tree arg01
= TREE_OPERAND (arg0
, 1);
12730 tree itype
= TREE_TYPE (arg00
);
12731 if (TREE_INT_CST_HIGH (arg01
) == 0
12732 && TREE_INT_CST_LOW (arg01
)
12733 == (unsigned HOST_WIDE_INT
) (TYPE_PRECISION (itype
) - 1))
12735 if (TYPE_UNSIGNED (itype
))
12737 itype
= signed_type_for (itype
);
12738 arg00
= fold_convert_loc (loc
, itype
, arg00
);
12740 return fold_build2_loc (loc
, code
== EQ_EXPR
? GE_EXPR
: LT_EXPR
,
12741 type
, arg00
, build_int_cst (itype
, 0));
12745 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12746 if (integer_zerop (arg1
)
12747 && TREE_CODE (arg0
) == BIT_XOR_EXPR
)
12748 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12749 TREE_OPERAND (arg0
, 1));
12751 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12752 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12753 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
12754 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12755 build_int_cst (TREE_TYPE (arg0
), 0));
12756 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12757 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12758 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
12759 && reorder_operands_p (TREE_OPERAND (arg0
, 1), arg1
))
12760 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 1),
12761 build_int_cst (TREE_TYPE (arg0
), 0));
12763 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12764 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12765 && TREE_CODE (arg1
) == INTEGER_CST
12766 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
12767 return fold_build2_loc (loc
, code
, type
, TREE_OPERAND (arg0
, 0),
12768 fold_build2_loc (loc
, BIT_XOR_EXPR
, TREE_TYPE (arg1
),
12769 TREE_OPERAND (arg0
, 1), arg1
));
12771 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12772 (X & C) == 0 when C is a single bit. */
12773 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12774 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_NOT_EXPR
12775 && integer_zerop (arg1
)
12776 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
12778 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg0
),
12779 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0),
12780 TREE_OPERAND (arg0
, 1));
12781 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
,
12783 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12787 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12788 constant C is a power of two, i.e. a single bit. */
12789 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12790 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
12791 && integer_zerop (arg1
)
12792 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12793 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12794 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12796 tree arg00
= TREE_OPERAND (arg0
, 0);
12797 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12798 arg00
, build_int_cst (TREE_TYPE (arg00
), 0));
12801 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12802 when is C is a power of two, i.e. a single bit. */
12803 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12804 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_XOR_EXPR
12805 && integer_zerop (arg1
)
12806 && integer_pow2p (TREE_OPERAND (arg0
, 1))
12807 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
12808 TREE_OPERAND (arg0
, 1), OEP_ONLY_CONST
))
12810 tree arg000
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
12811 tem
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg000
),
12812 arg000
, TREE_OPERAND (arg0
, 1));
12813 return fold_build2_loc (loc
, code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
12814 tem
, build_int_cst (TREE_TYPE (tem
), 0));
12817 if (integer_zerop (arg1
)
12818 && tree_expr_nonzero_p (arg0
))
12820 tree res
= constant_boolean_node (code
==NE_EXPR
, type
);
12821 return omit_one_operand_loc (loc
, type
, res
, arg0
);
12824 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12825 if (TREE_CODE (arg0
) == NEGATE_EXPR
12826 && TREE_CODE (arg1
) == NEGATE_EXPR
)
12827 return fold_build2_loc (loc
, code
, type
,
12828 TREE_OPERAND (arg0
, 0),
12829 fold_convert_loc (loc
, TREE_TYPE (arg0
),
12830 TREE_OPERAND (arg1
, 0)));
12832 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12833 if (TREE_CODE (arg0
) == BIT_AND_EXPR
12834 && TREE_CODE (arg1
) == BIT_AND_EXPR
)
12836 tree arg00
= TREE_OPERAND (arg0
, 0);
12837 tree arg01
= TREE_OPERAND (arg0
, 1);
12838 tree arg10
= TREE_OPERAND (arg1
, 0);
12839 tree arg11
= TREE_OPERAND (arg1
, 1);
12840 tree itype
= TREE_TYPE (arg0
);
12842 if (operand_equal_p (arg01
, arg11
, 0))
12843 return fold_build2_loc (loc
, code
, type
,
12844 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12845 fold_build2_loc (loc
,
12846 BIT_XOR_EXPR
, itype
,
12849 build_int_cst (itype
, 0));
12851 if (operand_equal_p (arg01
, arg10
, 0))
12852 return fold_build2_loc (loc
, code
, type
,
12853 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12854 fold_build2_loc (loc
,
12855 BIT_XOR_EXPR
, itype
,
12858 build_int_cst (itype
, 0));
12860 if (operand_equal_p (arg00
, arg11
, 0))
12861 return fold_build2_loc (loc
, code
, type
,
12862 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12863 fold_build2_loc (loc
,
12864 BIT_XOR_EXPR
, itype
,
12867 build_int_cst (itype
, 0));
12869 if (operand_equal_p (arg00
, arg10
, 0))
12870 return fold_build2_loc (loc
, code
, type
,
12871 fold_build2_loc (loc
, BIT_AND_EXPR
, itype
,
12872 fold_build2_loc (loc
,
12873 BIT_XOR_EXPR
, itype
,
12876 build_int_cst (itype
, 0));
12879 if (TREE_CODE (arg0
) == BIT_XOR_EXPR
12880 && TREE_CODE (arg1
) == BIT_XOR_EXPR
)
12882 tree arg00
= TREE_OPERAND (arg0
, 0);
12883 tree arg01
= TREE_OPERAND (arg0
, 1);
12884 tree arg10
= TREE_OPERAND (arg1
, 0);
12885 tree arg11
= TREE_OPERAND (arg1
, 1);
12886 tree itype
= TREE_TYPE (arg0
);
12888 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12889 operand_equal_p guarantees no side-effects so we don't need
12890 to use omit_one_operand on Z. */
12891 if (operand_equal_p (arg01
, arg11
, 0))
12892 return fold_build2_loc (loc
, code
, type
, arg00
,
12893 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12895 if (operand_equal_p (arg01
, arg10
, 0))
12896 return fold_build2_loc (loc
, code
, type
, arg00
,
12897 fold_convert_loc (loc
, TREE_TYPE (arg00
),
12899 if (operand_equal_p (arg00
, arg11
, 0))
12900 return fold_build2_loc (loc
, code
, type
, arg01
,
12901 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12903 if (operand_equal_p (arg00
, arg10
, 0))
12904 return fold_build2_loc (loc
, code
, type
, arg01
,
12905 fold_convert_loc (loc
, TREE_TYPE (arg01
),
12908 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12909 if (TREE_CODE (arg01
) == INTEGER_CST
12910 && TREE_CODE (arg11
) == INTEGER_CST
)
12912 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg01
,
12913 fold_convert_loc (loc
, itype
, arg11
));
12914 tem
= fold_build2_loc (loc
, BIT_XOR_EXPR
, itype
, arg00
, tem
);
12915 return fold_build2_loc (loc
, code
, type
, tem
,
12916 fold_convert_loc (loc
, itype
, arg10
));
12920 /* Attempt to simplify equality/inequality comparisons of complex
12921 values. Only lower the comparison if the result is known or
12922 can be simplified to a single scalar comparison. */
12923 if ((TREE_CODE (arg0
) == COMPLEX_EXPR
12924 || TREE_CODE (arg0
) == COMPLEX_CST
)
12925 && (TREE_CODE (arg1
) == COMPLEX_EXPR
12926 || TREE_CODE (arg1
) == COMPLEX_CST
))
12928 tree real0
, imag0
, real1
, imag1
;
12931 if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
12933 real0
= TREE_OPERAND (arg0
, 0);
12934 imag0
= TREE_OPERAND (arg0
, 1);
12938 real0
= TREE_REALPART (arg0
);
12939 imag0
= TREE_IMAGPART (arg0
);
12942 if (TREE_CODE (arg1
) == COMPLEX_EXPR
)
12944 real1
= TREE_OPERAND (arg1
, 0);
12945 imag1
= TREE_OPERAND (arg1
, 1);
12949 real1
= TREE_REALPART (arg1
);
12950 imag1
= TREE_IMAGPART (arg1
);
12953 rcond
= fold_binary_loc (loc
, code
, type
, real0
, real1
);
12954 if (rcond
&& TREE_CODE (rcond
) == INTEGER_CST
)
12956 if (integer_zerop (rcond
))
12958 if (code
== EQ_EXPR
)
12959 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12961 return fold_build2_loc (loc
, NE_EXPR
, type
, imag0
, imag1
);
12965 if (code
== NE_EXPR
)
12966 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12968 return fold_build2_loc (loc
, EQ_EXPR
, type
, imag0
, imag1
);
12972 icond
= fold_binary_loc (loc
, code
, type
, imag0
, imag1
);
12973 if (icond
&& TREE_CODE (icond
) == INTEGER_CST
)
12975 if (integer_zerop (icond
))
12977 if (code
== EQ_EXPR
)
12978 return omit_two_operands_loc (loc
, type
, boolean_false_node
,
12980 return fold_build2_loc (loc
, NE_EXPR
, type
, real0
, real1
);
12984 if (code
== NE_EXPR
)
12985 return omit_two_operands_loc (loc
, type
, boolean_true_node
,
12987 return fold_build2_loc (loc
, EQ_EXPR
, type
, real0
, real1
);
12998 tem
= fold_comparison (loc
, code
, type
, op0
, op1
);
12999 if (tem
!= NULL_TREE
)
13002 /* Transform comparisons of the form X +- C CMP X. */
13003 if ((TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
13004 && operand_equal_p (TREE_OPERAND (arg0
, 0), arg1
, 0)
13005 && ((TREE_CODE (TREE_OPERAND (arg0
, 1)) == REAL_CST
13006 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
))))
13007 || (TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
13008 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))))
13010 tree arg01
= TREE_OPERAND (arg0
, 1);
13011 enum tree_code code0
= TREE_CODE (arg0
);
13014 if (TREE_CODE (arg01
) == REAL_CST
)
13015 is_positive
= REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01
)) ? -1 : 1;
13017 is_positive
= tree_int_cst_sgn (arg01
);
13019 /* (X - c) > X becomes false. */
13020 if (code
== GT_EXPR
13021 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13022 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13024 if (TREE_CODE (arg01
) == INTEGER_CST
13025 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13026 fold_overflow_warning (("assuming signed overflow does not "
13027 "occur when assuming that (X - c) > X "
13028 "is always false"),
13029 WARN_STRICT_OVERFLOW_ALL
);
13030 return constant_boolean_node (0, type
);
13033 /* Likewise (X + c) < X becomes false. */
13034 if (code
== LT_EXPR
13035 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13036 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13038 if (TREE_CODE (arg01
) == INTEGER_CST
13039 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13040 fold_overflow_warning (("assuming signed overflow does not "
13041 "occur when assuming that "
13042 "(X + c) < X is always false"),
13043 WARN_STRICT_OVERFLOW_ALL
);
13044 return constant_boolean_node (0, type
);
13047 /* Convert (X - c) <= X to true. */
13048 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13050 && ((code0
== MINUS_EXPR
&& is_positive
>= 0)
13051 || (code0
== PLUS_EXPR
&& is_positive
<= 0)))
13053 if (TREE_CODE (arg01
) == INTEGER_CST
13054 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13055 fold_overflow_warning (("assuming signed overflow does not "
13056 "occur when assuming that "
13057 "(X - c) <= X is always true"),
13058 WARN_STRICT_OVERFLOW_ALL
);
13059 return constant_boolean_node (1, type
);
13062 /* Convert (X + c) >= X to true. */
13063 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1
)))
13065 && ((code0
== PLUS_EXPR
&& is_positive
>= 0)
13066 || (code0
== MINUS_EXPR
&& is_positive
<= 0)))
13068 if (TREE_CODE (arg01
) == INTEGER_CST
13069 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13070 fold_overflow_warning (("assuming signed overflow does not "
13071 "occur when assuming that "
13072 "(X + c) >= X is always true"),
13073 WARN_STRICT_OVERFLOW_ALL
);
13074 return constant_boolean_node (1, type
);
13077 if (TREE_CODE (arg01
) == INTEGER_CST
)
13079 /* Convert X + c > X and X - c < X to true for integers. */
13080 if (code
== GT_EXPR
13081 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13082 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13084 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13085 fold_overflow_warning (("assuming signed overflow does "
13086 "not occur when assuming that "
13087 "(X + c) > X is always true"),
13088 WARN_STRICT_OVERFLOW_ALL
);
13089 return constant_boolean_node (1, type
);
13092 if (code
== LT_EXPR
13093 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13094 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13096 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13097 fold_overflow_warning (("assuming signed overflow does "
13098 "not occur when assuming that "
13099 "(X - c) < X is always true"),
13100 WARN_STRICT_OVERFLOW_ALL
);
13101 return constant_boolean_node (1, type
);
13104 /* Convert X + c <= X and X - c >= X to false for integers. */
13105 if (code
== LE_EXPR
13106 && ((code0
== PLUS_EXPR
&& is_positive
> 0)
13107 || (code0
== MINUS_EXPR
&& is_positive
< 0)))
13109 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13110 fold_overflow_warning (("assuming signed overflow does "
13111 "not occur when assuming that "
13112 "(X + c) <= X is always false"),
13113 WARN_STRICT_OVERFLOW_ALL
);
13114 return constant_boolean_node (0, type
);
13117 if (code
== GE_EXPR
13118 && ((code0
== MINUS_EXPR
&& is_positive
> 0)
13119 || (code0
== PLUS_EXPR
&& is_positive
< 0)))
13121 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1
)))
13122 fold_overflow_warning (("assuming signed overflow does "
13123 "not occur when assuming that "
13124 "(X - c) >= X is always false"),
13125 WARN_STRICT_OVERFLOW_ALL
);
13126 return constant_boolean_node (0, type
);
13131 /* Comparisons with the highest or lowest possible integer of
13132 the specified precision will have known values. */
13134 tree arg1_type
= TREE_TYPE (arg1
);
13135 unsigned int width
= TYPE_PRECISION (arg1_type
);
13137 if (TREE_CODE (arg1
) == INTEGER_CST
13138 && width
<= 2 * HOST_BITS_PER_WIDE_INT
13139 && (INTEGRAL_TYPE_P (arg1_type
) || POINTER_TYPE_P (arg1_type
)))
13141 HOST_WIDE_INT signed_max_hi
;
13142 unsigned HOST_WIDE_INT signed_max_lo
;
13143 unsigned HOST_WIDE_INT max_hi
, max_lo
, min_hi
, min_lo
;
13145 if (width
<= HOST_BITS_PER_WIDE_INT
)
13147 signed_max_lo
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13152 if (TYPE_UNSIGNED (arg1_type
))
13154 max_lo
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13160 max_lo
= signed_max_lo
;
13161 min_lo
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13167 width
-= HOST_BITS_PER_WIDE_INT
;
13168 signed_max_lo
= -1;
13169 signed_max_hi
= ((unsigned HOST_WIDE_INT
) 1 << (width
- 1))
13174 if (TYPE_UNSIGNED (arg1_type
))
13176 max_hi
= ((unsigned HOST_WIDE_INT
) 2 << (width
- 1)) - 1;
13181 max_hi
= signed_max_hi
;
13182 min_hi
= ((unsigned HOST_WIDE_INT
) -1 << (width
- 1));
13186 if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
) == max_hi
13187 && TREE_INT_CST_LOW (arg1
) == max_lo
)
13191 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13194 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13197 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13200 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13202 /* The GE_EXPR and LT_EXPR cases above are not normally
13203 reached because of previous transformations. */
13208 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13210 && TREE_INT_CST_LOW (arg1
) == max_lo
- 1)
13214 arg1
= const_binop (PLUS_EXPR
, arg1
,
13215 build_int_cst (TREE_TYPE (arg1
), 1));
13216 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13217 fold_convert_loc (loc
,
13218 TREE_TYPE (arg1
), arg0
),
13221 arg1
= const_binop (PLUS_EXPR
, arg1
,
13222 build_int_cst (TREE_TYPE (arg1
), 1));
13223 return fold_build2_loc (loc
, NE_EXPR
, type
,
13224 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13230 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13232 && TREE_INT_CST_LOW (arg1
) == min_lo
)
13236 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13239 return fold_build2_loc (loc
, EQ_EXPR
, type
, op0
, op1
);
13242 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13245 return fold_build2_loc (loc
, NE_EXPR
, type
, op0
, op1
);
13250 else if ((unsigned HOST_WIDE_INT
) TREE_INT_CST_HIGH (arg1
)
13252 && TREE_INT_CST_LOW (arg1
) == min_lo
+ 1)
13256 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13257 return fold_build2_loc (loc
, NE_EXPR
, type
,
13258 fold_convert_loc (loc
,
13259 TREE_TYPE (arg1
), arg0
),
13262 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
);
13263 return fold_build2_loc (loc
, EQ_EXPR
, type
,
13264 fold_convert_loc (loc
, TREE_TYPE (arg1
),
13271 else if (TREE_INT_CST_HIGH (arg1
) == signed_max_hi
13272 && TREE_INT_CST_LOW (arg1
) == signed_max_lo
13273 && TYPE_UNSIGNED (arg1_type
)
13274 /* We will flip the signedness of the comparison operator
13275 associated with the mode of arg1, so the sign bit is
13276 specified by this mode. Check that arg1 is the signed
13277 max associated with this sign bit. */
13278 && width
== GET_MODE_BITSIZE (TYPE_MODE (arg1_type
))
13279 /* signed_type does not work on pointer types. */
13280 && INTEGRAL_TYPE_P (arg1_type
))
13282 /* The following case also applies to X < signed_max+1
13283 and X >= signed_max+1 because previous transformations. */
13284 if (code
== LE_EXPR
|| code
== GT_EXPR
)
13287 st
= signed_type_for (TREE_TYPE (arg1
));
13288 return fold_build2_loc (loc
,
13289 code
== LE_EXPR
? GE_EXPR
: LT_EXPR
,
13290 type
, fold_convert_loc (loc
, st
, arg0
),
13291 build_int_cst (st
, 0));
13297 /* If we are comparing an ABS_EXPR with a constant, we can
13298 convert all the cases into explicit comparisons, but they may
13299 well not be faster than doing the ABS and one comparison.
13300 But ABS (X) <= C is a range comparison, which becomes a subtraction
13301 and a comparison, and is probably faster. */
13302 if (code
== LE_EXPR
13303 && TREE_CODE (arg1
) == INTEGER_CST
13304 && TREE_CODE (arg0
) == ABS_EXPR
13305 && ! TREE_SIDE_EFFECTS (arg0
)
13306 && (0 != (tem
= negate_expr (arg1
)))
13307 && TREE_CODE (tem
) == INTEGER_CST
13308 && !TREE_OVERFLOW (tem
))
13309 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13310 build2 (GE_EXPR
, type
,
13311 TREE_OPERAND (arg0
, 0), tem
),
13312 build2 (LE_EXPR
, type
,
13313 TREE_OPERAND (arg0
, 0), arg1
));
13315 /* Convert ABS_EXPR<x> >= 0 to true. */
13316 strict_overflow_p
= false;
13317 if (code
== GE_EXPR
13318 && (integer_zerop (arg1
)
13319 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
)))
13320 && real_zerop (arg1
)))
13321 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13323 if (strict_overflow_p
)
13324 fold_overflow_warning (("assuming signed overflow does not occur "
13325 "when simplifying comparison of "
13326 "absolute value and zero"),
13327 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13328 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg0
);
13331 /* Convert ABS_EXPR<x> < 0 to false. */
13332 strict_overflow_p
= false;
13333 if (code
== LT_EXPR
13334 && (integer_zerop (arg1
) || real_zerop (arg1
))
13335 && tree_expr_nonnegative_warnv_p (arg0
, &strict_overflow_p
))
13337 if (strict_overflow_p
)
13338 fold_overflow_warning (("assuming signed overflow does not occur "
13339 "when simplifying comparison of "
13340 "absolute value and zero"),
13341 WARN_STRICT_OVERFLOW_CONDITIONAL
);
13342 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg0
);
13345 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13346 and similarly for >= into !=. */
13347 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13348 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13349 && TREE_CODE (arg1
) == LSHIFT_EXPR
13350 && integer_onep (TREE_OPERAND (arg1
, 0)))
13351 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13352 build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13353 TREE_OPERAND (arg1
, 1)),
13354 build_int_cst (TREE_TYPE (arg0
), 0));
13356 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
13357 && TYPE_UNSIGNED (TREE_TYPE (arg0
))
13358 && CONVERT_EXPR_P (arg1
)
13359 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
13360 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
13362 tem
= build2 (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
13363 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1));
13364 return build2_loc (loc
, code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
13365 fold_convert_loc (loc
, TREE_TYPE (arg0
), tem
),
13366 build_int_cst (TREE_TYPE (arg0
), 0));
13371 case UNORDERED_EXPR
:
13379 if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
13381 t1
= fold_relational_const (code
, type
, arg0
, arg1
);
13382 if (t1
!= NULL_TREE
)
13386 /* If the first operand is NaN, the result is constant. */
13387 if (TREE_CODE (arg0
) == REAL_CST
13388 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
13389 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13391 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13392 ? integer_zero_node
13393 : integer_one_node
;
13394 return omit_one_operand_loc (loc
, type
, t1
, arg1
);
13397 /* If the second operand is NaN, the result is constant. */
13398 if (TREE_CODE (arg1
) == REAL_CST
13399 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
))
13400 && (code
!= LTGT_EXPR
|| ! flag_trapping_math
))
13402 t1
= (code
== ORDERED_EXPR
|| code
== LTGT_EXPR
)
13403 ? integer_zero_node
13404 : integer_one_node
;
13405 return omit_one_operand_loc (loc
, type
, t1
, arg0
);
13408 /* Simplify unordered comparison of something with itself. */
13409 if ((code
== UNLE_EXPR
|| code
== UNGE_EXPR
|| code
== UNEQ_EXPR
)
13410 && operand_equal_p (arg0
, arg1
, 0))
13411 return constant_boolean_node (1, type
);
13413 if (code
== LTGT_EXPR
13414 && !flag_trapping_math
13415 && operand_equal_p (arg0
, arg1
, 0))
13416 return constant_boolean_node (0, type
);
13418 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13420 tree targ0
= strip_float_extensions (arg0
);
13421 tree targ1
= strip_float_extensions (arg1
);
13422 tree newtype
= TREE_TYPE (targ0
);
13424 if (TYPE_PRECISION (TREE_TYPE (targ1
)) > TYPE_PRECISION (newtype
))
13425 newtype
= TREE_TYPE (targ1
);
13427 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (TREE_TYPE (arg0
)))
13428 return fold_build2_loc (loc
, code
, type
,
13429 fold_convert_loc (loc
, newtype
, targ0
),
13430 fold_convert_loc (loc
, newtype
, targ1
));
13435 case COMPOUND_EXPR
:
13436 /* When pedantic, a compound expression can be neither an lvalue
13437 nor an integer constant expression. */
13438 if (TREE_SIDE_EFFECTS (arg0
) || TREE_CONSTANT (arg1
))
13440 /* Don't let (0, 0) be null pointer constant. */
13441 tem
= integer_zerop (arg1
) ? build1 (NOP_EXPR
, type
, arg1
)
13442 : fold_convert_loc (loc
, type
, arg1
);
13443 return pedantic_non_lvalue_loc (loc
, tem
);
13446 if ((TREE_CODE (arg0
) == REAL_CST
13447 && TREE_CODE (arg1
) == REAL_CST
)
13448 || (TREE_CODE (arg0
) == INTEGER_CST
13449 && TREE_CODE (arg1
) == INTEGER_CST
))
13450 return build_complex (type
, arg0
, arg1
);
13451 if (TREE_CODE (arg0
) == REALPART_EXPR
13452 && TREE_CODE (arg1
) == IMAGPART_EXPR
13453 && TREE_TYPE (TREE_OPERAND (arg0
, 0)) == type
13454 && operand_equal_p (TREE_OPERAND (arg0
, 0),
13455 TREE_OPERAND (arg1
, 0), 0))
13456 return omit_one_operand_loc (loc
, type
, TREE_OPERAND (arg0
, 0),
13457 TREE_OPERAND (arg1
, 0));
13461 /* An ASSERT_EXPR should never be passed to fold_binary. */
13462 gcc_unreachable ();
13464 case VEC_EXTRACT_EVEN_EXPR
:
13465 case VEC_EXTRACT_ODD_EXPR
:
13466 case VEC_INTERLEAVE_HIGH_EXPR
:
13467 case VEC_INTERLEAVE_LOW_EXPR
:
13468 if ((TREE_CODE (arg0
) == VECTOR_CST
13469 || TREE_CODE (arg0
) == CONSTRUCTOR
)
13470 && (TREE_CODE (arg1
) == VECTOR_CST
13471 || TREE_CODE (arg1
) == CONSTRUCTOR
))
13473 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13474 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
13476 for (i
= 0; i
< nelts
; i
++)
13479 case VEC_EXTRACT_EVEN_EXPR
:
13482 case VEC_EXTRACT_ODD_EXPR
:
13483 sel
[i
] = i
* 2 + 1;
13485 case VEC_INTERLEAVE_HIGH_EXPR
:
13486 sel
[i
] = (i
+ (BYTES_BIG_ENDIAN
? 0 : nelts
)) / 2
13487 + ((i
& 1) ? nelts
: 0);
13489 case VEC_INTERLEAVE_LOW_EXPR
:
13490 sel
[i
] = (i
+ (BYTES_BIG_ENDIAN
? nelts
: 0)) / 2
13491 + ((i
& 1) ? nelts
: 0);
13494 gcc_unreachable ();
13497 return fold_vec_perm (type
, arg0
, arg1
, sel
);
13503 } /* switch (code) */
13506 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13507 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13511 contains_label_1 (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
13513 switch (TREE_CODE (*tp
))
13519 *walk_subtrees
= 0;
13521 /* ... fall through ... */
13528 /* Return whether the sub-tree ST contains a label which is accessible from
13529 outside the sub-tree. */
13532 contains_label_p (tree st
)
13535 (walk_tree_without_duplicates (&st
, contains_label_1
, NULL
) != NULL_TREE
);
13538 /* Fold a ternary expression of code CODE and type TYPE with operands
13539 OP0, OP1, and OP2. Return the folded expression if folding is
13540 successful. Otherwise, return NULL_TREE. */
13543 fold_ternary_loc (location_t loc
, enum tree_code code
, tree type
,
13544 tree op0
, tree op1
, tree op2
)
13547 tree arg0
= NULL_TREE
, arg1
= NULL_TREE
, arg2
= NULL_TREE
;
13548 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13550 gcc_assert (IS_EXPR_CODE_CLASS (kind
)
13551 && TREE_CODE_LENGTH (code
) == 3);
13553 /* Strip any conversions that don't change the mode. This is safe
13554 for every expression, except for a comparison expression because
13555 its signedness is derived from its operands. So, in the latter
13556 case, only strip conversions that don't change the signedness.
13558 Note that this is done as an internal manipulation within the
13559 constant folder, in order to find the simplest representation of
13560 the arguments so that their form can be studied. In any cases,
13561 the appropriate type conversions should be put back in the tree
13562 that will get out of the constant folder. */
13583 case COMPONENT_REF
:
13584 if (TREE_CODE (arg0
) == CONSTRUCTOR
13585 && ! type_contains_placeholder_p (TREE_TYPE (arg0
)))
13587 unsigned HOST_WIDE_INT idx
;
13589 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0
), idx
, field
, value
)
13596 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13597 so all simple results must be passed through pedantic_non_lvalue. */
13598 if (TREE_CODE (arg0
) == INTEGER_CST
)
13600 tree unused_op
= integer_zerop (arg0
) ? op1
: op2
;
13601 tem
= integer_zerop (arg0
) ? op2
: op1
;
13602 /* Only optimize constant conditions when the selected branch
13603 has the same type as the COND_EXPR. This avoids optimizing
13604 away "c ? x : throw", where the throw has a void type.
13605 Avoid throwing away that operand which contains label. */
13606 if ((!TREE_SIDE_EFFECTS (unused_op
)
13607 || !contains_label_p (unused_op
))
13608 && (! VOID_TYPE_P (TREE_TYPE (tem
))
13609 || VOID_TYPE_P (type
)))
13610 return pedantic_non_lvalue_loc (loc
, tem
);
13613 if (operand_equal_p (arg1
, op2
, 0))
13614 return pedantic_omit_one_operand_loc (loc
, type
, arg1
, arg0
);
13616 /* If we have A op B ? A : C, we may be able to convert this to a
13617 simpler expression, depending on the operation and the values
13618 of B and C. Signed zeros prevent all of these transformations,
13619 for reasons given above each one.
13621 Also try swapping the arguments and inverting the conditional. */
13622 if (COMPARISON_CLASS_P (arg0
)
13623 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13624 arg1
, TREE_OPERAND (arg0
, 1))
13625 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1
))))
13627 tem
= fold_cond_expr_with_comparison (loc
, type
, arg0
, op1
, op2
);
13632 if (COMPARISON_CLASS_P (arg0
)
13633 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
13635 TREE_OPERAND (arg0
, 1))
13636 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2
))))
13638 location_t loc0
= expr_location_or (arg0
, loc
);
13639 tem
= fold_truth_not_expr (loc0
, arg0
);
13640 if (tem
&& COMPARISON_CLASS_P (tem
))
13642 tem
= fold_cond_expr_with_comparison (loc
, type
, tem
, op2
, op1
);
13648 /* If the second operand is simpler than the third, swap them
13649 since that produces better jump optimization results. */
13650 if (truth_value_p (TREE_CODE (arg0
))
13651 && tree_swap_operands_p (op1
, op2
, false))
13653 location_t loc0
= expr_location_or (arg0
, loc
);
13654 /* See if this can be inverted. If it can't, possibly because
13655 it was a floating-point inequality comparison, don't do
13657 tem
= fold_truth_not_expr (loc0
, arg0
);
13659 return fold_build3_loc (loc
, code
, type
, tem
, op2
, op1
);
13662 /* Convert A ? 1 : 0 to simply A. */
13663 if (integer_onep (op1
)
13664 && integer_zerop (op2
)
13665 /* If we try to convert OP0 to our type, the
13666 call to fold will try to move the conversion inside
13667 a COND, which will recurse. In that case, the COND_EXPR
13668 is probably the best choice, so leave it alone. */
13669 && type
== TREE_TYPE (arg0
))
13670 return pedantic_non_lvalue_loc (loc
, arg0
);
13672 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13673 over COND_EXPR in cases such as floating point comparisons. */
13674 if (integer_zerop (op1
)
13675 && integer_onep (op2
)
13676 && truth_value_p (TREE_CODE (arg0
)))
13677 return pedantic_non_lvalue_loc (loc
,
13678 fold_convert_loc (loc
, type
,
13679 invert_truthvalue_loc (loc
,
13682 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13683 if (TREE_CODE (arg0
) == LT_EXPR
13684 && integer_zerop (TREE_OPERAND (arg0
, 1))
13685 && integer_zerop (op2
)
13686 && (tem
= sign_bit_p (TREE_OPERAND (arg0
, 0), arg1
)))
13688 /* sign_bit_p only checks ARG1 bits within A's precision.
13689 If <sign bit of A> has wider type than A, bits outside
13690 of A's precision in <sign bit of A> need to be checked.
13691 If they are all 0, this optimization needs to be done
13692 in unsigned A's type, if they are all 1 in signed A's type,
13693 otherwise this can't be done. */
13694 if (TYPE_PRECISION (TREE_TYPE (tem
))
13695 < TYPE_PRECISION (TREE_TYPE (arg1
))
13696 && TYPE_PRECISION (TREE_TYPE (tem
))
13697 < TYPE_PRECISION (type
))
13699 unsigned HOST_WIDE_INT mask_lo
;
13700 HOST_WIDE_INT mask_hi
;
13701 int inner_width
, outer_width
;
13704 inner_width
= TYPE_PRECISION (TREE_TYPE (tem
));
13705 outer_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
13706 if (outer_width
> TYPE_PRECISION (type
))
13707 outer_width
= TYPE_PRECISION (type
);
13709 if (outer_width
> HOST_BITS_PER_WIDE_INT
)
13711 mask_hi
= ((unsigned HOST_WIDE_INT
) -1
13712 >> (2 * HOST_BITS_PER_WIDE_INT
- outer_width
));
13718 mask_lo
= ((unsigned HOST_WIDE_INT
) -1
13719 >> (HOST_BITS_PER_WIDE_INT
- outer_width
));
13721 if (inner_width
> HOST_BITS_PER_WIDE_INT
)
13723 mask_hi
&= ~((unsigned HOST_WIDE_INT
) -1
13724 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13728 mask_lo
&= ~((unsigned HOST_WIDE_INT
) -1
13729 >> (HOST_BITS_PER_WIDE_INT
- inner_width
));
13731 if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == mask_hi
13732 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == mask_lo
)
13734 tem_type
= signed_type_for (TREE_TYPE (tem
));
13735 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13737 else if ((TREE_INT_CST_HIGH (arg1
) & mask_hi
) == 0
13738 && (TREE_INT_CST_LOW (arg1
) & mask_lo
) == 0)
13740 tem_type
= unsigned_type_for (TREE_TYPE (tem
));
13741 tem
= fold_convert_loc (loc
, tem_type
, tem
);
13749 fold_convert_loc (loc
, type
,
13750 fold_build2_loc (loc
, BIT_AND_EXPR
,
13751 TREE_TYPE (tem
), tem
,
13752 fold_convert_loc (loc
,
13757 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13758 already handled above. */
13759 if (TREE_CODE (arg0
) == BIT_AND_EXPR
13760 && integer_onep (TREE_OPERAND (arg0
, 1))
13761 && integer_zerop (op2
)
13762 && integer_pow2p (arg1
))
13764 tree tem
= TREE_OPERAND (arg0
, 0);
13766 if (TREE_CODE (tem
) == RSHIFT_EXPR
13767 && TREE_CODE (TREE_OPERAND (tem
, 1)) == INTEGER_CST
13768 && (unsigned HOST_WIDE_INT
) tree_log2 (arg1
) ==
13769 TREE_INT_CST_LOW (TREE_OPERAND (tem
, 1)))
13770 return fold_build2_loc (loc
, BIT_AND_EXPR
, type
,
13771 TREE_OPERAND (tem
, 0), arg1
);
13774 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13775 is probably obsolete because the first operand should be a
13776 truth value (that's why we have the two cases above), but let's
13777 leave it in until we can confirm this for all front-ends. */
13778 if (integer_zerop (op2
)
13779 && TREE_CODE (arg0
) == NE_EXPR
13780 && integer_zerop (TREE_OPERAND (arg0
, 1))
13781 && integer_pow2p (arg1
)
13782 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
13783 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
13784 arg1
, OEP_ONLY_CONST
))
13785 return pedantic_non_lvalue_loc (loc
,
13786 fold_convert_loc (loc
, type
,
13787 TREE_OPERAND (arg0
, 0)));
13789 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13790 if (integer_zerop (op2
)
13791 && truth_value_p (TREE_CODE (arg0
))
13792 && truth_value_p (TREE_CODE (arg1
)))
13793 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13794 fold_convert_loc (loc
, type
, arg0
),
13797 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13798 if (integer_onep (op2
)
13799 && truth_value_p (TREE_CODE (arg0
))
13800 && truth_value_p (TREE_CODE (arg1
)))
13802 location_t loc0
= expr_location_or (arg0
, loc
);
13803 /* Only perform transformation if ARG0 is easily inverted. */
13804 tem
= fold_truth_not_expr (loc0
, arg0
);
13806 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
13807 fold_convert_loc (loc
, type
, tem
),
13811 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13812 if (integer_zerop (arg1
)
13813 && truth_value_p (TREE_CODE (arg0
))
13814 && truth_value_p (TREE_CODE (op2
)))
13816 location_t loc0
= expr_location_or (arg0
, loc
);
13817 /* Only perform transformation if ARG0 is easily inverted. */
13818 tem
= fold_truth_not_expr (loc0
, arg0
);
13820 return fold_build2_loc (loc
, TRUTH_ANDIF_EXPR
, type
,
13821 fold_convert_loc (loc
, type
, tem
),
13825 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13826 if (integer_onep (arg1
)
13827 && truth_value_p (TREE_CODE (arg0
))
13828 && truth_value_p (TREE_CODE (op2
)))
13829 return fold_build2_loc (loc
, TRUTH_ORIF_EXPR
, type
,
13830 fold_convert_loc (loc
, type
, arg0
),
13836 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13837 of fold_ternary on them. */
13838 gcc_unreachable ();
13840 case BIT_FIELD_REF
:
13841 if ((TREE_CODE (arg0
) == VECTOR_CST
13842 || TREE_CODE (arg0
) == CONSTRUCTOR
)
13843 && type
== TREE_TYPE (TREE_TYPE (arg0
)))
13845 unsigned HOST_WIDE_INT width
= tree_low_cst (arg1
, 1);
13846 unsigned HOST_WIDE_INT idx
= tree_low_cst (op2
, 1);
13849 && simple_cst_equal (arg1
, TYPE_SIZE (type
)) == 1
13850 && (idx
% width
) == 0
13851 && (idx
= idx
/ width
)
13852 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0
)))
13854 if (TREE_CODE (arg0
) == VECTOR_CST
)
13856 tree elements
= TREE_VECTOR_CST_ELTS (arg0
);
13857 while (idx
-- > 0 && elements
)
13858 elements
= TREE_CHAIN (elements
);
13860 return TREE_VALUE (elements
);
13862 else if (idx
< CONSTRUCTOR_NELTS (arg0
))
13863 return CONSTRUCTOR_ELT (arg0
, idx
)->value
;
13864 return build_zero_cst (type
);
13868 /* A bit-field-ref that referenced the full argument can be stripped. */
13869 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
13870 && TYPE_PRECISION (TREE_TYPE (arg0
)) == tree_low_cst (arg1
, 1)
13871 && integer_zerop (op2
))
13872 return fold_convert_loc (loc
, type
, arg0
);
13877 /* For integers we can decompose the FMA if possible. */
13878 if (TREE_CODE (arg0
) == INTEGER_CST
13879 && TREE_CODE (arg1
) == INTEGER_CST
)
13880 return fold_build2_loc (loc
, PLUS_EXPR
, type
,
13881 const_binop (MULT_EXPR
, arg0
, arg1
), arg2
);
13882 if (integer_zerop (arg2
))
13883 return fold_build2_loc (loc
, MULT_EXPR
, type
, arg0
, arg1
);
13885 return fold_fma (loc
, type
, arg0
, arg1
, arg2
);
13887 case VEC_PERM_EXPR
:
13888 if (TREE_CODE (arg2
) == VECTOR_CST
)
13890 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
), i
;
13891 unsigned char *sel
= XALLOCAVEC (unsigned char, nelts
);
13893 bool need_mask_canon
= false;
13895 gcc_assert (nelts
== TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2
)));
13896 for (i
= 0, t
= TREE_VECTOR_CST_ELTS (arg2
);
13897 i
< nelts
&& t
; i
++, t
= TREE_CHAIN (t
))
13899 if (TREE_CODE (TREE_VALUE (t
)) != INTEGER_CST
)
13902 sel
[i
] = TREE_INT_CST_LOW (TREE_VALUE (t
)) & (2 * nelts
- 1);
13903 if (TREE_INT_CST_HIGH (TREE_VALUE (t
))
13904 || ((unsigned HOST_WIDE_INT
)
13905 TREE_INT_CST_LOW (TREE_VALUE (t
)) != sel
[i
]))
13906 need_mask_canon
= true;
13910 for (; i
< nelts
; i
++)
13913 if ((TREE_CODE (arg0
) == VECTOR_CST
13914 || TREE_CODE (arg0
) == CONSTRUCTOR
)
13915 && (TREE_CODE (arg1
) == VECTOR_CST
13916 || TREE_CODE (arg1
) == CONSTRUCTOR
))
13918 t
= fold_vec_perm (type
, arg0
, arg1
, sel
);
13919 if (t
!= NULL_TREE
)
13923 if (need_mask_canon
&& arg2
== op2
)
13925 tree list
= NULL_TREE
, eltype
= TREE_TYPE (TREE_TYPE (arg2
));
13926 for (i
= 0; i
< nelts
; i
++)
13927 list
= tree_cons (NULL_TREE
,
13928 build_int_cst (eltype
, sel
[nelts
- i
- 1]),
13930 t
= build_vector (TREE_TYPE (arg2
), list
);
13931 return build3_loc (loc
, VEC_PERM_EXPR
, type
, op0
, op1
, t
);
13938 } /* switch (code) */
13941 /* Perform constant folding and related simplification of EXPR.
13942 The related simplifications include x*1 => x, x*0 => 0, etc.,
13943 and application of the associative law.
13944 NOP_EXPR conversions may be removed freely (as long as we
13945 are careful not to change the type of the overall expression).
13946 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13947 but we can constant-fold them if they have constant operands. */
13949 #ifdef ENABLE_FOLD_CHECKING
13950 # define fold(x) fold_1 (x)
13951 static tree
fold_1 (tree
);
13957 const tree t
= expr
;
13958 enum tree_code code
= TREE_CODE (t
);
13959 enum tree_code_class kind
= TREE_CODE_CLASS (code
);
13961 location_t loc
= EXPR_LOCATION (expr
);
13963 /* Return right away if a constant. */
13964 if (kind
== tcc_constant
)
13967 /* CALL_EXPR-like objects with variable numbers of operands are
13968 treated specially. */
13969 if (kind
== tcc_vl_exp
)
13971 if (code
== CALL_EXPR
)
13973 tem
= fold_call_expr (loc
, expr
, false);
13974 return tem
? tem
: expr
;
13979 if (IS_EXPR_CODE_CLASS (kind
))
13981 tree type
= TREE_TYPE (t
);
13982 tree op0
, op1
, op2
;
13984 switch (TREE_CODE_LENGTH (code
))
13987 op0
= TREE_OPERAND (t
, 0);
13988 tem
= fold_unary_loc (loc
, code
, type
, op0
);
13989 return tem
? tem
: expr
;
13991 op0
= TREE_OPERAND (t
, 0);
13992 op1
= TREE_OPERAND (t
, 1);
13993 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
13994 return tem
? tem
: expr
;
13996 op0
= TREE_OPERAND (t
, 0);
13997 op1
= TREE_OPERAND (t
, 1);
13998 op2
= TREE_OPERAND (t
, 2);
13999 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14000 return tem
? tem
: expr
;
14010 tree op0
= TREE_OPERAND (t
, 0);
14011 tree op1
= TREE_OPERAND (t
, 1);
14013 if (TREE_CODE (op1
) == INTEGER_CST
14014 && TREE_CODE (op0
) == CONSTRUCTOR
14015 && ! type_contains_placeholder_p (TREE_TYPE (op0
)))
14017 VEC(constructor_elt
,gc
) *elts
= CONSTRUCTOR_ELTS (op0
);
14018 unsigned HOST_WIDE_INT end
= VEC_length (constructor_elt
, elts
);
14019 unsigned HOST_WIDE_INT begin
= 0;
14021 /* Find a matching index by means of a binary search. */
14022 while (begin
!= end
)
14024 unsigned HOST_WIDE_INT middle
= (begin
+ end
) / 2;
14025 tree index
= VEC_index (constructor_elt
, elts
, middle
)->index
;
14027 if (TREE_CODE (index
) == INTEGER_CST
14028 && tree_int_cst_lt (index
, op1
))
14029 begin
= middle
+ 1;
14030 else if (TREE_CODE (index
) == INTEGER_CST
14031 && tree_int_cst_lt (op1
, index
))
14033 else if (TREE_CODE (index
) == RANGE_EXPR
14034 && tree_int_cst_lt (TREE_OPERAND (index
, 1), op1
))
14035 begin
= middle
+ 1;
14036 else if (TREE_CODE (index
) == RANGE_EXPR
14037 && tree_int_cst_lt (op1
, TREE_OPERAND (index
, 0)))
14040 return VEC_index (constructor_elt
, elts
, middle
)->value
;
14048 return fold (DECL_INITIAL (t
));
14052 } /* switch (code) */
14055 #ifdef ENABLE_FOLD_CHECKING
14058 static void fold_checksum_tree (const_tree
, struct md5_ctx
*, htab_t
);
14059 static void fold_check_failed (const_tree
, const_tree
);
14060 void print_fold_checksum (const_tree
);
14062 /* When --enable-checking=fold, compute a digest of expr before
14063 and after actual fold call to see if fold did not accidentally
14064 change original expr. */
14070 struct md5_ctx ctx
;
14071 unsigned char checksum_before
[16], checksum_after
[16];
14074 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14075 md5_init_ctx (&ctx
);
14076 fold_checksum_tree (expr
, &ctx
, ht
);
14077 md5_finish_ctx (&ctx
, checksum_before
);
14080 ret
= fold_1 (expr
);
14082 md5_init_ctx (&ctx
);
14083 fold_checksum_tree (expr
, &ctx
, ht
);
14084 md5_finish_ctx (&ctx
, checksum_after
);
14087 if (memcmp (checksum_before
, checksum_after
, 16))
14088 fold_check_failed (expr
, ret
);
14094 print_fold_checksum (const_tree expr
)
14096 struct md5_ctx ctx
;
14097 unsigned char checksum
[16], cnt
;
14100 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14101 md5_init_ctx (&ctx
);
14102 fold_checksum_tree (expr
, &ctx
, ht
);
14103 md5_finish_ctx (&ctx
, checksum
);
14105 for (cnt
= 0; cnt
< 16; ++cnt
)
14106 fprintf (stderr
, "%02x", checksum
[cnt
]);
14107 putc ('\n', stderr
);
14111 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED
, const_tree ret ATTRIBUTE_UNUSED
)
14113 internal_error ("fold check: original tree changed by fold");
14117 fold_checksum_tree (const_tree expr
, struct md5_ctx
*ctx
, htab_t ht
)
14120 enum tree_code code
;
14121 union tree_node buf
;
14127 slot
= (void **) htab_find_slot (ht
, expr
, INSERT
);
14130 *slot
= CONST_CAST_TREE (expr
);
14131 code
= TREE_CODE (expr
);
14132 if (TREE_CODE_CLASS (code
) == tcc_declaration
14133 && DECL_ASSEMBLER_NAME_SET_P (expr
))
14135 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14136 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14137 SET_DECL_ASSEMBLER_NAME ((tree
)&buf
, NULL
);
14138 expr
= (tree
) &buf
;
14140 else if (TREE_CODE_CLASS (code
) == tcc_type
14141 && (TYPE_POINTER_TO (expr
)
14142 || TYPE_REFERENCE_TO (expr
)
14143 || TYPE_CACHED_VALUES_P (expr
)
14144 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr
)
14145 || TYPE_NEXT_VARIANT (expr
)))
14147 /* Allow these fields to be modified. */
14149 memcpy ((char *) &buf
, expr
, tree_size (expr
));
14150 expr
= tmp
= (tree
) &buf
;
14151 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp
) = 0;
14152 TYPE_POINTER_TO (tmp
) = NULL
;
14153 TYPE_REFERENCE_TO (tmp
) = NULL
;
14154 TYPE_NEXT_VARIANT (tmp
) = NULL
;
14155 if (TYPE_CACHED_VALUES_P (tmp
))
14157 TYPE_CACHED_VALUES_P (tmp
) = 0;
14158 TYPE_CACHED_VALUES (tmp
) = NULL
;
14161 md5_process_bytes (expr
, tree_size (expr
), ctx
);
14162 if (CODE_CONTAINS_STRUCT (code
, TS_TYPED
))
14163 fold_checksum_tree (TREE_TYPE (expr
), ctx
, ht
);
14164 if (TREE_CODE_CLASS (code
) != tcc_type
14165 && TREE_CODE_CLASS (code
) != tcc_declaration
14166 && code
!= TREE_LIST
14167 && code
!= SSA_NAME
14168 && CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
14169 fold_checksum_tree (TREE_CHAIN (expr
), ctx
, ht
);
14170 switch (TREE_CODE_CLASS (code
))
14176 md5_process_bytes (TREE_STRING_POINTER (expr
),
14177 TREE_STRING_LENGTH (expr
), ctx
);
14180 fold_checksum_tree (TREE_REALPART (expr
), ctx
, ht
);
14181 fold_checksum_tree (TREE_IMAGPART (expr
), ctx
, ht
);
14184 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr
), ctx
, ht
);
14190 case tcc_exceptional
:
14194 fold_checksum_tree (TREE_PURPOSE (expr
), ctx
, ht
);
14195 fold_checksum_tree (TREE_VALUE (expr
), ctx
, ht
);
14196 expr
= TREE_CHAIN (expr
);
14197 goto recursive_label
;
14200 for (i
= 0; i
< TREE_VEC_LENGTH (expr
); ++i
)
14201 fold_checksum_tree (TREE_VEC_ELT (expr
, i
), ctx
, ht
);
14207 case tcc_expression
:
14208 case tcc_reference
:
14209 case tcc_comparison
:
14212 case tcc_statement
:
14214 len
= TREE_OPERAND_LENGTH (expr
);
14215 for (i
= 0; i
< len
; ++i
)
14216 fold_checksum_tree (TREE_OPERAND (expr
, i
), ctx
, ht
);
14218 case tcc_declaration
:
14219 fold_checksum_tree (DECL_NAME (expr
), ctx
, ht
);
14220 fold_checksum_tree (DECL_CONTEXT (expr
), ctx
, ht
);
14221 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_COMMON
))
14223 fold_checksum_tree (DECL_SIZE (expr
), ctx
, ht
);
14224 fold_checksum_tree (DECL_SIZE_UNIT (expr
), ctx
, ht
);
14225 fold_checksum_tree (DECL_INITIAL (expr
), ctx
, ht
);
14226 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr
), ctx
, ht
);
14227 fold_checksum_tree (DECL_ATTRIBUTES (expr
), ctx
, ht
);
14229 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_WITH_VIS
))
14230 fold_checksum_tree (DECL_SECTION_NAME (expr
), ctx
, ht
);
14232 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr
), TS_DECL_NON_COMMON
))
14234 fold_checksum_tree (DECL_VINDEX (expr
), ctx
, ht
);
14235 fold_checksum_tree (DECL_RESULT_FLD (expr
), ctx
, ht
);
14236 fold_checksum_tree (DECL_ARGUMENT_FLD (expr
), ctx
, ht
);
14240 if (TREE_CODE (expr
) == ENUMERAL_TYPE
)
14241 fold_checksum_tree (TYPE_VALUES (expr
), ctx
, ht
);
14242 fold_checksum_tree (TYPE_SIZE (expr
), ctx
, ht
);
14243 fold_checksum_tree (TYPE_SIZE_UNIT (expr
), ctx
, ht
);
14244 fold_checksum_tree (TYPE_ATTRIBUTES (expr
), ctx
, ht
);
14245 fold_checksum_tree (TYPE_NAME (expr
), ctx
, ht
);
14246 if (INTEGRAL_TYPE_P (expr
)
14247 || SCALAR_FLOAT_TYPE_P (expr
))
14249 fold_checksum_tree (TYPE_MIN_VALUE (expr
), ctx
, ht
);
14250 fold_checksum_tree (TYPE_MAX_VALUE (expr
), ctx
, ht
);
14252 fold_checksum_tree (TYPE_MAIN_VARIANT (expr
), ctx
, ht
);
14253 if (TREE_CODE (expr
) == RECORD_TYPE
14254 || TREE_CODE (expr
) == UNION_TYPE
14255 || TREE_CODE (expr
) == QUAL_UNION_TYPE
)
14256 fold_checksum_tree (TYPE_BINFO (expr
), ctx
, ht
);
14257 fold_checksum_tree (TYPE_CONTEXT (expr
), ctx
, ht
);
14264 /* Helper function for outputting the checksum of a tree T. When
14265 debugging with gdb, you can "define mynext" to be "next" followed
14266 by "call debug_fold_checksum (op0)", then just trace down till the
14269 DEBUG_FUNCTION
void
14270 debug_fold_checksum (const_tree t
)
14273 unsigned char checksum
[16];
14274 struct md5_ctx ctx
;
14275 htab_t ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14277 md5_init_ctx (&ctx
);
14278 fold_checksum_tree (t
, &ctx
, ht
);
14279 md5_finish_ctx (&ctx
, checksum
);
14282 for (i
= 0; i
< 16; i
++)
14283 fprintf (stderr
, "%d ", checksum
[i
]);
14285 fprintf (stderr
, "\n");
14290 /* Fold a unary tree expression with code CODE of type TYPE with an
14291 operand OP0. LOC is the location of the resulting expression.
14292 Return a folded expression if successful. Otherwise, return a tree
14293 expression with code CODE of type TYPE with an operand OP0. */
14296 fold_build1_stat_loc (location_t loc
,
14297 enum tree_code code
, tree type
, tree op0 MEM_STAT_DECL
)
14300 #ifdef ENABLE_FOLD_CHECKING
14301 unsigned char checksum_before
[16], checksum_after
[16];
14302 struct md5_ctx ctx
;
14305 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14306 md5_init_ctx (&ctx
);
14307 fold_checksum_tree (op0
, &ctx
, ht
);
14308 md5_finish_ctx (&ctx
, checksum_before
);
14312 tem
= fold_unary_loc (loc
, code
, type
, op0
);
14314 tem
= build1_stat_loc (loc
, code
, type
, op0 PASS_MEM_STAT
);
14316 #ifdef ENABLE_FOLD_CHECKING
14317 md5_init_ctx (&ctx
);
14318 fold_checksum_tree (op0
, &ctx
, ht
);
14319 md5_finish_ctx (&ctx
, checksum_after
);
14322 if (memcmp (checksum_before
, checksum_after
, 16))
14323 fold_check_failed (op0
, tem
);
14328 /* Fold a binary tree expression with code CODE of type TYPE with
14329 operands OP0 and OP1. LOC is the location of the resulting
14330 expression. Return a folded expression if successful. Otherwise,
14331 return a tree expression with code CODE of type TYPE with operands
14335 fold_build2_stat_loc (location_t loc
,
14336 enum tree_code code
, tree type
, tree op0
, tree op1
14340 #ifdef ENABLE_FOLD_CHECKING
14341 unsigned char checksum_before_op0
[16],
14342 checksum_before_op1
[16],
14343 checksum_after_op0
[16],
14344 checksum_after_op1
[16];
14345 struct md5_ctx ctx
;
14348 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14349 md5_init_ctx (&ctx
);
14350 fold_checksum_tree (op0
, &ctx
, ht
);
14351 md5_finish_ctx (&ctx
, checksum_before_op0
);
14354 md5_init_ctx (&ctx
);
14355 fold_checksum_tree (op1
, &ctx
, ht
);
14356 md5_finish_ctx (&ctx
, checksum_before_op1
);
14360 tem
= fold_binary_loc (loc
, code
, type
, op0
, op1
);
14362 tem
= build2_stat_loc (loc
, code
, type
, op0
, op1 PASS_MEM_STAT
);
14364 #ifdef ENABLE_FOLD_CHECKING
14365 md5_init_ctx (&ctx
);
14366 fold_checksum_tree (op0
, &ctx
, ht
);
14367 md5_finish_ctx (&ctx
, checksum_after_op0
);
14370 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14371 fold_check_failed (op0
, tem
);
14373 md5_init_ctx (&ctx
);
14374 fold_checksum_tree (op1
, &ctx
, ht
);
14375 md5_finish_ctx (&ctx
, checksum_after_op1
);
14378 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14379 fold_check_failed (op1
, tem
);
14384 /* Fold a ternary tree expression with code CODE of type TYPE with
14385 operands OP0, OP1, and OP2. Return a folded expression if
14386 successful. Otherwise, return a tree expression with code CODE of
14387 type TYPE with operands OP0, OP1, and OP2. */
14390 fold_build3_stat_loc (location_t loc
, enum tree_code code
, tree type
,
14391 tree op0
, tree op1
, tree op2 MEM_STAT_DECL
)
14394 #ifdef ENABLE_FOLD_CHECKING
14395 unsigned char checksum_before_op0
[16],
14396 checksum_before_op1
[16],
14397 checksum_before_op2
[16],
14398 checksum_after_op0
[16],
14399 checksum_after_op1
[16],
14400 checksum_after_op2
[16];
14401 struct md5_ctx ctx
;
14404 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14405 md5_init_ctx (&ctx
);
14406 fold_checksum_tree (op0
, &ctx
, ht
);
14407 md5_finish_ctx (&ctx
, checksum_before_op0
);
14410 md5_init_ctx (&ctx
);
14411 fold_checksum_tree (op1
, &ctx
, ht
);
14412 md5_finish_ctx (&ctx
, checksum_before_op1
);
14415 md5_init_ctx (&ctx
);
14416 fold_checksum_tree (op2
, &ctx
, ht
);
14417 md5_finish_ctx (&ctx
, checksum_before_op2
);
14421 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
14422 tem
= fold_ternary_loc (loc
, code
, type
, op0
, op1
, op2
);
14424 tem
= build3_stat_loc (loc
, code
, type
, op0
, op1
, op2 PASS_MEM_STAT
);
14426 #ifdef ENABLE_FOLD_CHECKING
14427 md5_init_ctx (&ctx
);
14428 fold_checksum_tree (op0
, &ctx
, ht
);
14429 md5_finish_ctx (&ctx
, checksum_after_op0
);
14432 if (memcmp (checksum_before_op0
, checksum_after_op0
, 16))
14433 fold_check_failed (op0
, tem
);
14435 md5_init_ctx (&ctx
);
14436 fold_checksum_tree (op1
, &ctx
, ht
);
14437 md5_finish_ctx (&ctx
, checksum_after_op1
);
14440 if (memcmp (checksum_before_op1
, checksum_after_op1
, 16))
14441 fold_check_failed (op1
, tem
);
14443 md5_init_ctx (&ctx
);
14444 fold_checksum_tree (op2
, &ctx
, ht
);
14445 md5_finish_ctx (&ctx
, checksum_after_op2
);
14448 if (memcmp (checksum_before_op2
, checksum_after_op2
, 16))
14449 fold_check_failed (op2
, tem
);
14454 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14455 arguments in ARGARRAY, and a null static chain.
14456 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14457 of type TYPE from the given operands as constructed by build_call_array. */
14460 fold_build_call_array_loc (location_t loc
, tree type
, tree fn
,
14461 int nargs
, tree
*argarray
)
14464 #ifdef ENABLE_FOLD_CHECKING
14465 unsigned char checksum_before_fn
[16],
14466 checksum_before_arglist
[16],
14467 checksum_after_fn
[16],
14468 checksum_after_arglist
[16];
14469 struct md5_ctx ctx
;
14473 ht
= htab_create (32, htab_hash_pointer
, htab_eq_pointer
, NULL
);
14474 md5_init_ctx (&ctx
);
14475 fold_checksum_tree (fn
, &ctx
, ht
);
14476 md5_finish_ctx (&ctx
, checksum_before_fn
);
14479 md5_init_ctx (&ctx
);
14480 for (i
= 0; i
< nargs
; i
++)
14481 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14482 md5_finish_ctx (&ctx
, checksum_before_arglist
);
14486 tem
= fold_builtin_call_array (loc
, type
, fn
, nargs
, argarray
);
14488 #ifdef ENABLE_FOLD_CHECKING
14489 md5_init_ctx (&ctx
);
14490 fold_checksum_tree (fn
, &ctx
, ht
);
14491 md5_finish_ctx (&ctx
, checksum_after_fn
);
14494 if (memcmp (checksum_before_fn
, checksum_after_fn
, 16))
14495 fold_check_failed (fn
, tem
);
14497 md5_init_ctx (&ctx
);
14498 for (i
= 0; i
< nargs
; i
++)
14499 fold_checksum_tree (argarray
[i
], &ctx
, ht
);
14500 md5_finish_ctx (&ctx
, checksum_after_arglist
);
14503 if (memcmp (checksum_before_arglist
, checksum_after_arglist
, 16))
14504 fold_check_failed (NULL_TREE
, tem
);
14509 /* Perform constant folding and related simplification of initializer
14510 expression EXPR. These behave identically to "fold_buildN" but ignore
14511 potential run-time traps and exceptions that fold must preserve. */
14513 #define START_FOLD_INIT \
14514 int saved_signaling_nans = flag_signaling_nans;\
14515 int saved_trapping_math = flag_trapping_math;\
14516 int saved_rounding_math = flag_rounding_math;\
14517 int saved_trapv = flag_trapv;\
14518 int saved_folding_initializer = folding_initializer;\
14519 flag_signaling_nans = 0;\
14520 flag_trapping_math = 0;\
14521 flag_rounding_math = 0;\
14523 folding_initializer = 1;
14525 #define END_FOLD_INIT \
14526 flag_signaling_nans = saved_signaling_nans;\
14527 flag_trapping_math = saved_trapping_math;\
14528 flag_rounding_math = saved_rounding_math;\
14529 flag_trapv = saved_trapv;\
14530 folding_initializer = saved_folding_initializer;
14533 fold_build1_initializer_loc (location_t loc
, enum tree_code code
,
14534 tree type
, tree op
)
14539 result
= fold_build1_loc (loc
, code
, type
, op
);
14546 fold_build2_initializer_loc (location_t loc
, enum tree_code code
,
14547 tree type
, tree op0
, tree op1
)
14552 result
= fold_build2_loc (loc
, code
, type
, op0
, op1
);
14559 fold_build3_initializer_loc (location_t loc
, enum tree_code code
,
14560 tree type
, tree op0
, tree op1
, tree op2
)
14565 result
= fold_build3_loc (loc
, code
, type
, op0
, op1
, op2
);
14572 fold_build_call_array_initializer_loc (location_t loc
, tree type
, tree fn
,
14573 int nargs
, tree
*argarray
)
14578 result
= fold_build_call_array_loc (loc
, type
, fn
, nargs
, argarray
);
14584 #undef START_FOLD_INIT
14585 #undef END_FOLD_INIT
14587 /* Determine if first argument is a multiple of second argument. Return 0 if
14588 it is not, or we cannot easily determined it to be.
14590 An example of the sort of thing we care about (at this point; this routine
14591 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14592 fold cases do now) is discovering that
14594 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14600 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14602 This code also handles discovering that
14604 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14606 is a multiple of 8 so we don't have to worry about dealing with a
14607 possible remainder.
14609 Note that we *look* inside a SAVE_EXPR only to determine how it was
14610 calculated; it is not safe for fold to do much of anything else with the
14611 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14612 at run time. For example, the latter example above *cannot* be implemented
14613 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14614 evaluation time of the original SAVE_EXPR is not necessarily the same at
14615 the time the new expression is evaluated. The only optimization of this
14616 sort that would be valid is changing
14618 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14622 SAVE_EXPR (I) * SAVE_EXPR (J)
14624 (where the same SAVE_EXPR (J) is used in the original and the
14625 transformed version). */
14628 multiple_of_p (tree type
, const_tree top
, const_tree bottom
)
14630 if (operand_equal_p (top
, bottom
, 0))
14633 if (TREE_CODE (type
) != INTEGER_TYPE
)
14636 switch (TREE_CODE (top
))
14639 /* Bitwise and provides a power of two multiple. If the mask is
14640 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14641 if (!integer_pow2p (bottom
))
14646 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14647 || multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14651 return (multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
)
14652 && multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
));
14655 if (TREE_CODE (TREE_OPERAND (top
, 1)) == INTEGER_CST
)
14659 op1
= TREE_OPERAND (top
, 1);
14660 /* const_binop may not detect overflow correctly,
14661 so check for it explicitly here. */
14662 if (TYPE_PRECISION (TREE_TYPE (size_one_node
))
14663 > TREE_INT_CST_LOW (op1
)
14664 && TREE_INT_CST_HIGH (op1
) == 0
14665 && 0 != (t1
= fold_convert (type
,
14666 const_binop (LSHIFT_EXPR
,
14669 && !TREE_OVERFLOW (t1
))
14670 return multiple_of_p (type
, t1
, bottom
);
14675 /* Can't handle conversions from non-integral or wider integral type. */
14676 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top
, 0))) != INTEGER_TYPE
)
14677 || (TYPE_PRECISION (type
)
14678 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top
, 0)))))
14681 /* .. fall through ... */
14684 return multiple_of_p (type
, TREE_OPERAND (top
, 0), bottom
);
14687 return (multiple_of_p (type
, TREE_OPERAND (top
, 1), bottom
)
14688 && multiple_of_p (type
, TREE_OPERAND (top
, 2), bottom
));
14691 if (TREE_CODE (bottom
) != INTEGER_CST
14692 || integer_zerop (bottom
)
14693 || (TYPE_UNSIGNED (type
)
14694 && (tree_int_cst_sgn (top
) < 0
14695 || tree_int_cst_sgn (bottom
) < 0)))
14697 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR
,
14705 /* Return true if CODE or TYPE is known to be non-negative. */
14708 tree_simple_nonnegative_warnv_p (enum tree_code code
, tree type
)
14710 if ((TYPE_PRECISION (type
) != 1 || TYPE_UNSIGNED (type
))
14711 && truth_value_p (code
))
14712 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14713 have a signed:1 type (where the value is -1 and 0). */
14718 /* Return true if (CODE OP0) is known to be non-negative. If the return
14719 value is based on the assumption that signed overflow is undefined,
14720 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14721 *STRICT_OVERFLOW_P. */
14724 tree_unary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14725 bool *strict_overflow_p
)
14727 if (TYPE_UNSIGNED (type
))
14733 /* We can't return 1 if flag_wrapv is set because
14734 ABS_EXPR<INT_MIN> = INT_MIN. */
14735 if (!INTEGRAL_TYPE_P (type
))
14737 if (TYPE_OVERFLOW_UNDEFINED (type
))
14739 *strict_overflow_p
= true;
14744 case NON_LVALUE_EXPR
:
14746 case FIX_TRUNC_EXPR
:
14747 return tree_expr_nonnegative_warnv_p (op0
,
14748 strict_overflow_p
);
14752 tree inner_type
= TREE_TYPE (op0
);
14753 tree outer_type
= type
;
14755 if (TREE_CODE (outer_type
) == REAL_TYPE
)
14757 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14758 return tree_expr_nonnegative_warnv_p (op0
,
14759 strict_overflow_p
);
14760 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14762 if (TYPE_UNSIGNED (inner_type
))
14764 return tree_expr_nonnegative_warnv_p (op0
,
14765 strict_overflow_p
);
14768 else if (TREE_CODE (outer_type
) == INTEGER_TYPE
)
14770 if (TREE_CODE (inner_type
) == REAL_TYPE
)
14771 return tree_expr_nonnegative_warnv_p (op0
,
14772 strict_overflow_p
);
14773 if (TREE_CODE (inner_type
) == INTEGER_TYPE
)
14774 return TYPE_PRECISION (inner_type
) < TYPE_PRECISION (outer_type
)
14775 && TYPE_UNSIGNED (inner_type
);
14781 return tree_simple_nonnegative_warnv_p (code
, type
);
14784 /* We don't know sign of `t', so be conservative and return false. */
14788 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14789 value is based on the assumption that signed overflow is undefined,
14790 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14791 *STRICT_OVERFLOW_P. */
14794 tree_binary_nonnegative_warnv_p (enum tree_code code
, tree type
, tree op0
,
14795 tree op1
, bool *strict_overflow_p
)
14797 if (TYPE_UNSIGNED (type
))
14802 case POINTER_PLUS_EXPR
:
14804 if (FLOAT_TYPE_P (type
))
14805 return (tree_expr_nonnegative_warnv_p (op0
,
14807 && tree_expr_nonnegative_warnv_p (op1
,
14808 strict_overflow_p
));
14810 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14811 both unsigned and at least 2 bits shorter than the result. */
14812 if (TREE_CODE (type
) == INTEGER_TYPE
14813 && TREE_CODE (op0
) == NOP_EXPR
14814 && TREE_CODE (op1
) == NOP_EXPR
)
14816 tree inner1
= TREE_TYPE (TREE_OPERAND (op0
, 0));
14817 tree inner2
= TREE_TYPE (TREE_OPERAND (op1
, 0));
14818 if (TREE_CODE (inner1
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner1
)
14819 && TREE_CODE (inner2
) == INTEGER_TYPE
&& TYPE_UNSIGNED (inner2
))
14821 unsigned int prec
= MAX (TYPE_PRECISION (inner1
),
14822 TYPE_PRECISION (inner2
)) + 1;
14823 return prec
< TYPE_PRECISION (type
);
14829 if (FLOAT_TYPE_P (type
))
14831 /* x * x for floating point x is always non-negative. */
14832 if (operand_equal_p (op0
, op1
, 0))
14834 return (tree_expr_nonnegative_warnv_p (op0
,
14836 && tree_expr_nonnegative_warnv_p (op1
,
14837 strict_overflow_p
));
14840 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14841 both unsigned and their total bits is shorter than the result. */
14842 if (TREE_CODE (type
) == INTEGER_TYPE
14843 && (TREE_CODE (op0
) == NOP_EXPR
|| TREE_CODE (op0
) == INTEGER_CST
)
14844 && (TREE_CODE (op1
) == NOP_EXPR
|| TREE_CODE (op1
) == INTEGER_CST
))
14846 tree inner0
= (TREE_CODE (op0
) == NOP_EXPR
)
14847 ? TREE_TYPE (TREE_OPERAND (op0
, 0))
14849 tree inner1
= (TREE_CODE (op1
) == NOP_EXPR
)
14850 ? TREE_TYPE (TREE_OPERAND (op1
, 0))
14853 bool unsigned0
= TYPE_UNSIGNED (inner0
);
14854 bool unsigned1
= TYPE_UNSIGNED (inner1
);
14856 if (TREE_CODE (op0
) == INTEGER_CST
)
14857 unsigned0
= unsigned0
|| tree_int_cst_sgn (op0
) >= 0;
14859 if (TREE_CODE (op1
) == INTEGER_CST
)
14860 unsigned1
= unsigned1
|| tree_int_cst_sgn (op1
) >= 0;
14862 if (TREE_CODE (inner0
) == INTEGER_TYPE
&& unsigned0
14863 && TREE_CODE (inner1
) == INTEGER_TYPE
&& unsigned1
)
14865 unsigned int precision0
= (TREE_CODE (op0
) == INTEGER_CST
)
14866 ? tree_int_cst_min_precision (op0
, /*unsignedp=*/true)
14867 : TYPE_PRECISION (inner0
);
14869 unsigned int precision1
= (TREE_CODE (op1
) == INTEGER_CST
)
14870 ? tree_int_cst_min_precision (op1
, /*unsignedp=*/true)
14871 : TYPE_PRECISION (inner1
);
14873 return precision0
+ precision1
< TYPE_PRECISION (type
);
14880 return (tree_expr_nonnegative_warnv_p (op0
,
14882 || tree_expr_nonnegative_warnv_p (op1
,
14883 strict_overflow_p
));
14889 case TRUNC_DIV_EXPR
:
14890 case CEIL_DIV_EXPR
:
14891 case FLOOR_DIV_EXPR
:
14892 case ROUND_DIV_EXPR
:
14893 return (tree_expr_nonnegative_warnv_p (op0
,
14895 && tree_expr_nonnegative_warnv_p (op1
,
14896 strict_overflow_p
));
14898 case TRUNC_MOD_EXPR
:
14899 case CEIL_MOD_EXPR
:
14900 case FLOOR_MOD_EXPR
:
14901 case ROUND_MOD_EXPR
:
14902 return tree_expr_nonnegative_warnv_p (op0
,
14903 strict_overflow_p
);
14905 return tree_simple_nonnegative_warnv_p (code
, type
);
14908 /* We don't know sign of `t', so be conservative and return false. */
14912 /* Return true if T is known to be non-negative. If the return
14913 value is based on the assumption that signed overflow is undefined,
14914 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14915 *STRICT_OVERFLOW_P. */
14918 tree_single_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
14920 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
14923 switch (TREE_CODE (t
))
14926 return tree_int_cst_sgn (t
) >= 0;
14929 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t
));
14932 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t
));
14935 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
14937 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 2),
14938 strict_overflow_p
));
14940 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
14943 /* We don't know sign of `t', so be conservative and return false. */
14947 /* Return true if T is known to be non-negative. If the return
14948 value is based on the assumption that signed overflow is undefined,
14949 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14950 *STRICT_OVERFLOW_P. */
14953 tree_call_nonnegative_warnv_p (tree type
, tree fndecl
,
14954 tree arg0
, tree arg1
, bool *strict_overflow_p
)
14956 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
14957 switch (DECL_FUNCTION_CODE (fndecl
))
14959 CASE_FLT_FN (BUILT_IN_ACOS
):
14960 CASE_FLT_FN (BUILT_IN_ACOSH
):
14961 CASE_FLT_FN (BUILT_IN_CABS
):
14962 CASE_FLT_FN (BUILT_IN_COSH
):
14963 CASE_FLT_FN (BUILT_IN_ERFC
):
14964 CASE_FLT_FN (BUILT_IN_EXP
):
14965 CASE_FLT_FN (BUILT_IN_EXP10
):
14966 CASE_FLT_FN (BUILT_IN_EXP2
):
14967 CASE_FLT_FN (BUILT_IN_FABS
):
14968 CASE_FLT_FN (BUILT_IN_FDIM
):
14969 CASE_FLT_FN (BUILT_IN_HYPOT
):
14970 CASE_FLT_FN (BUILT_IN_POW10
):
14971 CASE_INT_FN (BUILT_IN_FFS
):
14972 CASE_INT_FN (BUILT_IN_PARITY
):
14973 CASE_INT_FN (BUILT_IN_POPCOUNT
):
14974 case BUILT_IN_BSWAP32
:
14975 case BUILT_IN_BSWAP64
:
14979 CASE_FLT_FN (BUILT_IN_SQRT
):
14980 /* sqrt(-0.0) is -0.0. */
14981 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type
)))
14983 return tree_expr_nonnegative_warnv_p (arg0
,
14984 strict_overflow_p
);
14986 CASE_FLT_FN (BUILT_IN_ASINH
):
14987 CASE_FLT_FN (BUILT_IN_ATAN
):
14988 CASE_FLT_FN (BUILT_IN_ATANH
):
14989 CASE_FLT_FN (BUILT_IN_CBRT
):
14990 CASE_FLT_FN (BUILT_IN_CEIL
):
14991 CASE_FLT_FN (BUILT_IN_ERF
):
14992 CASE_FLT_FN (BUILT_IN_EXPM1
):
14993 CASE_FLT_FN (BUILT_IN_FLOOR
):
14994 CASE_FLT_FN (BUILT_IN_FMOD
):
14995 CASE_FLT_FN (BUILT_IN_FREXP
):
14996 CASE_FLT_FN (BUILT_IN_ICEIL
):
14997 CASE_FLT_FN (BUILT_IN_IFLOOR
):
14998 CASE_FLT_FN (BUILT_IN_IRINT
):
14999 CASE_FLT_FN (BUILT_IN_IROUND
):
15000 CASE_FLT_FN (BUILT_IN_LCEIL
):
15001 CASE_FLT_FN (BUILT_IN_LDEXP
):
15002 CASE_FLT_FN (BUILT_IN_LFLOOR
):
15003 CASE_FLT_FN (BUILT_IN_LLCEIL
):
15004 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
15005 CASE_FLT_FN (BUILT_IN_LLRINT
):
15006 CASE_FLT_FN (BUILT_IN_LLROUND
):
15007 CASE_FLT_FN (BUILT_IN_LRINT
):
15008 CASE_FLT_FN (BUILT_IN_LROUND
):
15009 CASE_FLT_FN (BUILT_IN_MODF
):
15010 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
15011 CASE_FLT_FN (BUILT_IN_RINT
):
15012 CASE_FLT_FN (BUILT_IN_ROUND
):
15013 CASE_FLT_FN (BUILT_IN_SCALB
):
15014 CASE_FLT_FN (BUILT_IN_SCALBLN
):
15015 CASE_FLT_FN (BUILT_IN_SCALBN
):
15016 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
15017 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
15018 CASE_FLT_FN (BUILT_IN_SINH
):
15019 CASE_FLT_FN (BUILT_IN_TANH
):
15020 CASE_FLT_FN (BUILT_IN_TRUNC
):
15021 /* True if the 1st argument is nonnegative. */
15022 return tree_expr_nonnegative_warnv_p (arg0
,
15023 strict_overflow_p
);
15025 CASE_FLT_FN (BUILT_IN_FMAX
):
15026 /* True if the 1st OR 2nd arguments are nonnegative. */
15027 return (tree_expr_nonnegative_warnv_p (arg0
,
15029 || (tree_expr_nonnegative_warnv_p (arg1
,
15030 strict_overflow_p
)));
15032 CASE_FLT_FN (BUILT_IN_FMIN
):
15033 /* True if the 1st AND 2nd arguments are nonnegative. */
15034 return (tree_expr_nonnegative_warnv_p (arg0
,
15036 && (tree_expr_nonnegative_warnv_p (arg1
,
15037 strict_overflow_p
)));
15039 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
15040 /* True if the 2nd argument is nonnegative. */
15041 return tree_expr_nonnegative_warnv_p (arg1
,
15042 strict_overflow_p
);
15044 CASE_FLT_FN (BUILT_IN_POWI
):
15045 /* True if the 1st argument is nonnegative or the second
15046 argument is an even integer. */
15047 if (TREE_CODE (arg1
) == INTEGER_CST
15048 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
15050 return tree_expr_nonnegative_warnv_p (arg0
,
15051 strict_overflow_p
);
15053 CASE_FLT_FN (BUILT_IN_POW
):
15054 /* True if the 1st argument is nonnegative or the second
15055 argument is an even integer valued real. */
15056 if (TREE_CODE (arg1
) == REAL_CST
)
15061 c
= TREE_REAL_CST (arg1
);
15062 n
= real_to_integer (&c
);
15065 REAL_VALUE_TYPE cint
;
15066 real_from_integer (&cint
, VOIDmode
, n
,
15067 n
< 0 ? -1 : 0, 0);
15068 if (real_identical (&c
, &cint
))
15072 return tree_expr_nonnegative_warnv_p (arg0
,
15073 strict_overflow_p
);
15078 return tree_simple_nonnegative_warnv_p (CALL_EXPR
,
15082 /* Return true if T is known to be non-negative. If the return
15083 value is based on the assumption that signed overflow is undefined,
15084 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15085 *STRICT_OVERFLOW_P. */
15088 tree_invalid_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15090 enum tree_code code
= TREE_CODE (t
);
15091 if (TYPE_UNSIGNED (TREE_TYPE (t
)))
15098 tree temp
= TARGET_EXPR_SLOT (t
);
15099 t
= TARGET_EXPR_INITIAL (t
);
15101 /* If the initializer is non-void, then it's a normal expression
15102 that will be assigned to the slot. */
15103 if (!VOID_TYPE_P (t
))
15104 return tree_expr_nonnegative_warnv_p (t
, strict_overflow_p
);
15106 /* Otherwise, the initializer sets the slot in some way. One common
15107 way is an assignment statement at the end of the initializer. */
15110 if (TREE_CODE (t
) == BIND_EXPR
)
15111 t
= expr_last (BIND_EXPR_BODY (t
));
15112 else if (TREE_CODE (t
) == TRY_FINALLY_EXPR
15113 || TREE_CODE (t
) == TRY_CATCH_EXPR
)
15114 t
= expr_last (TREE_OPERAND (t
, 0));
15115 else if (TREE_CODE (t
) == STATEMENT_LIST
)
15120 if (TREE_CODE (t
) == MODIFY_EXPR
15121 && TREE_OPERAND (t
, 0) == temp
)
15122 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15123 strict_overflow_p
);
15130 tree arg0
= call_expr_nargs (t
) > 0 ? CALL_EXPR_ARG (t
, 0) : NULL_TREE
;
15131 tree arg1
= call_expr_nargs (t
) > 1 ? CALL_EXPR_ARG (t
, 1) : NULL_TREE
;
15133 return tree_call_nonnegative_warnv_p (TREE_TYPE (t
),
15134 get_callee_fndecl (t
),
15137 strict_overflow_p
);
15139 case COMPOUND_EXPR
:
15141 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 1),
15142 strict_overflow_p
);
15144 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t
, 1)),
15145 strict_overflow_p
);
15147 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t
, 0),
15148 strict_overflow_p
);
15151 return tree_simple_nonnegative_warnv_p (TREE_CODE (t
),
15155 /* We don't know sign of `t', so be conservative and return false. */
15159 /* Return true if T is known to be non-negative. If the return
15160 value is based on the assumption that signed overflow is undefined,
15161 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15162 *STRICT_OVERFLOW_P. */
15165 tree_expr_nonnegative_warnv_p (tree t
, bool *strict_overflow_p
)
15167 enum tree_code code
;
15168 if (t
== error_mark_node
)
15171 code
= TREE_CODE (t
);
15172 switch (TREE_CODE_CLASS (code
))
15175 case tcc_comparison
:
15176 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15178 TREE_OPERAND (t
, 0),
15179 TREE_OPERAND (t
, 1),
15180 strict_overflow_p
);
15183 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15185 TREE_OPERAND (t
, 0),
15186 strict_overflow_p
);
15189 case tcc_declaration
:
15190 case tcc_reference
:
15191 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15199 case TRUTH_AND_EXPR
:
15200 case TRUTH_OR_EXPR
:
15201 case TRUTH_XOR_EXPR
:
15202 return tree_binary_nonnegative_warnv_p (TREE_CODE (t
),
15204 TREE_OPERAND (t
, 0),
15205 TREE_OPERAND (t
, 1),
15206 strict_overflow_p
);
15207 case TRUTH_NOT_EXPR
:
15208 return tree_unary_nonnegative_warnv_p (TREE_CODE (t
),
15210 TREE_OPERAND (t
, 0),
15211 strict_overflow_p
);
15218 case WITH_SIZE_EXPR
:
15220 return tree_single_nonnegative_warnv_p (t
, strict_overflow_p
);
15223 return tree_invalid_nonnegative_warnv_p (t
, strict_overflow_p
);
15227 /* Return true if `t' is known to be non-negative. Handle warnings
15228 about undefined signed overflow. */
15231 tree_expr_nonnegative_p (tree t
)
15233 bool ret
, strict_overflow_p
;
15235 strict_overflow_p
= false;
15236 ret
= tree_expr_nonnegative_warnv_p (t
, &strict_overflow_p
);
15237 if (strict_overflow_p
)
15238 fold_overflow_warning (("assuming signed overflow does not occur when "
15239 "determining that expression is always "
15241 WARN_STRICT_OVERFLOW_MISC
);
15246 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15247 For floating point we further ensure that T is not denormal.
15248 Similar logic is present in nonzero_address in rtlanal.h.
15250 If the return value is based on the assumption that signed overflow
15251 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15252 change *STRICT_OVERFLOW_P. */
15255 tree_unary_nonzero_warnv_p (enum tree_code code
, tree type
, tree op0
,
15256 bool *strict_overflow_p
)
15261 return tree_expr_nonzero_warnv_p (op0
,
15262 strict_overflow_p
);
15266 tree inner_type
= TREE_TYPE (op0
);
15267 tree outer_type
= type
;
15269 return (TYPE_PRECISION (outer_type
) >= TYPE_PRECISION (inner_type
)
15270 && tree_expr_nonzero_warnv_p (op0
,
15271 strict_overflow_p
));
15275 case NON_LVALUE_EXPR
:
15276 return tree_expr_nonzero_warnv_p (op0
,
15277 strict_overflow_p
);
15286 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15287 For floating point we further ensure that T is not denormal.
15288 Similar logic is present in nonzero_address in rtlanal.h.
15290 If the return value is based on the assumption that signed overflow
15291 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15292 change *STRICT_OVERFLOW_P. */
15295 tree_binary_nonzero_warnv_p (enum tree_code code
,
15298 tree op1
, bool *strict_overflow_p
)
15300 bool sub_strict_overflow_p
;
15303 case POINTER_PLUS_EXPR
:
15305 if (TYPE_OVERFLOW_UNDEFINED (type
))
15307 /* With the presence of negative values it is hard
15308 to say something. */
15309 sub_strict_overflow_p
= false;
15310 if (!tree_expr_nonnegative_warnv_p (op0
,
15311 &sub_strict_overflow_p
)
15312 || !tree_expr_nonnegative_warnv_p (op1
,
15313 &sub_strict_overflow_p
))
15315 /* One of operands must be positive and the other non-negative. */
15316 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15317 overflows, on a twos-complement machine the sum of two
15318 nonnegative numbers can never be zero. */
15319 return (tree_expr_nonzero_warnv_p (op0
,
15321 || tree_expr_nonzero_warnv_p (op1
,
15322 strict_overflow_p
));
15327 if (TYPE_OVERFLOW_UNDEFINED (type
))
15329 if (tree_expr_nonzero_warnv_p (op0
,
15331 && tree_expr_nonzero_warnv_p (op1
,
15332 strict_overflow_p
))
15334 *strict_overflow_p
= true;
15341 sub_strict_overflow_p
= false;
15342 if (tree_expr_nonzero_warnv_p (op0
,
15343 &sub_strict_overflow_p
)
15344 && tree_expr_nonzero_warnv_p (op1
,
15345 &sub_strict_overflow_p
))
15347 if (sub_strict_overflow_p
)
15348 *strict_overflow_p
= true;
15353 sub_strict_overflow_p
= false;
15354 if (tree_expr_nonzero_warnv_p (op0
,
15355 &sub_strict_overflow_p
))
15357 if (sub_strict_overflow_p
)
15358 *strict_overflow_p
= true;
15360 /* When both operands are nonzero, then MAX must be too. */
15361 if (tree_expr_nonzero_warnv_p (op1
,
15362 strict_overflow_p
))
15365 /* MAX where operand 0 is positive is positive. */
15366 return tree_expr_nonnegative_warnv_p (op0
,
15367 strict_overflow_p
);
15369 /* MAX where operand 1 is positive is positive. */
15370 else if (tree_expr_nonzero_warnv_p (op1
,
15371 &sub_strict_overflow_p
)
15372 && tree_expr_nonnegative_warnv_p (op1
,
15373 &sub_strict_overflow_p
))
15375 if (sub_strict_overflow_p
)
15376 *strict_overflow_p
= true;
15382 return (tree_expr_nonzero_warnv_p (op1
,
15384 || tree_expr_nonzero_warnv_p (op0
,
15385 strict_overflow_p
));
15394 /* Return true when T is an address and is known to be nonzero.
15395 For floating point we further ensure that T is not denormal.
15396 Similar logic is present in nonzero_address in rtlanal.h.
15398 If the return value is based on the assumption that signed overflow
15399 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15400 change *STRICT_OVERFLOW_P. */
15403 tree_single_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15405 bool sub_strict_overflow_p
;
15406 switch (TREE_CODE (t
))
15409 return !integer_zerop (t
);
15413 tree base
= TREE_OPERAND (t
, 0);
15414 if (!DECL_P (base
))
15415 base
= get_base_address (base
);
15420 /* Weak declarations may link to NULL. Other things may also be NULL
15421 so protect with -fdelete-null-pointer-checks; but not variables
15422 allocated on the stack. */
15424 && (flag_delete_null_pointer_checks
15425 || (DECL_CONTEXT (base
)
15426 && TREE_CODE (DECL_CONTEXT (base
)) == FUNCTION_DECL
15427 && auto_var_in_fn_p (base
, DECL_CONTEXT (base
)))))
15428 return !VAR_OR_FUNCTION_DECL_P (base
) || !DECL_WEAK (base
);
15430 /* Constants are never weak. */
15431 if (CONSTANT_CLASS_P (base
))
15438 sub_strict_overflow_p
= false;
15439 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15440 &sub_strict_overflow_p
)
15441 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 2),
15442 &sub_strict_overflow_p
))
15444 if (sub_strict_overflow_p
)
15445 *strict_overflow_p
= true;
15456 /* Return true when T is an address and is known to be nonzero.
15457 For floating point we further ensure that T is not denormal.
15458 Similar logic is present in nonzero_address in rtlanal.h.
15460 If the return value is based on the assumption that signed overflow
15461 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15462 change *STRICT_OVERFLOW_P. */
15465 tree_expr_nonzero_warnv_p (tree t
, bool *strict_overflow_p
)
15467 tree type
= TREE_TYPE (t
);
15468 enum tree_code code
;
15470 /* Doing something useful for floating point would need more work. */
15471 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
15474 code
= TREE_CODE (t
);
15475 switch (TREE_CODE_CLASS (code
))
15478 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15479 strict_overflow_p
);
15481 case tcc_comparison
:
15482 return tree_binary_nonzero_warnv_p (code
, type
,
15483 TREE_OPERAND (t
, 0),
15484 TREE_OPERAND (t
, 1),
15485 strict_overflow_p
);
15487 case tcc_declaration
:
15488 case tcc_reference
:
15489 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15497 case TRUTH_NOT_EXPR
:
15498 return tree_unary_nonzero_warnv_p (code
, type
, TREE_OPERAND (t
, 0),
15499 strict_overflow_p
);
15501 case TRUTH_AND_EXPR
:
15502 case TRUTH_OR_EXPR
:
15503 case TRUTH_XOR_EXPR
:
15504 return tree_binary_nonzero_warnv_p (code
, type
,
15505 TREE_OPERAND (t
, 0),
15506 TREE_OPERAND (t
, 1),
15507 strict_overflow_p
);
15514 case WITH_SIZE_EXPR
:
15516 return tree_single_nonzero_warnv_p (t
, strict_overflow_p
);
15518 case COMPOUND_EXPR
:
15521 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 1),
15522 strict_overflow_p
);
15525 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t
, 0),
15526 strict_overflow_p
);
15529 return alloca_call_p (t
);
15537 /* Return true when T is an address and is known to be nonzero.
15538 Handle warnings about undefined signed overflow. */
15541 tree_expr_nonzero_p (tree t
)
15543 bool ret
, strict_overflow_p
;
15545 strict_overflow_p
= false;
15546 ret
= tree_expr_nonzero_warnv_p (t
, &strict_overflow_p
);
15547 if (strict_overflow_p
)
15548 fold_overflow_warning (("assuming signed overflow does not occur when "
15549 "determining that expression is always "
15551 WARN_STRICT_OVERFLOW_MISC
);
15555 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15556 attempt to fold the expression to a constant without modifying TYPE,
15559 If the expression could be simplified to a constant, then return
15560 the constant. If the expression would not be simplified to a
15561 constant, then return NULL_TREE. */
15564 fold_binary_to_constant (enum tree_code code
, tree type
, tree op0
, tree op1
)
15566 tree tem
= fold_binary (code
, type
, op0
, op1
);
15567 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15570 /* Given the components of a unary expression CODE, TYPE and OP0,
15571 attempt to fold the expression to a constant without modifying
15574 If the expression could be simplified to a constant, then return
15575 the constant. If the expression would not be simplified to a
15576 constant, then return NULL_TREE. */
15579 fold_unary_to_constant (enum tree_code code
, tree type
, tree op0
)
15581 tree tem
= fold_unary (code
, type
, op0
);
15582 return (tem
&& TREE_CONSTANT (tem
)) ? tem
: NULL_TREE
;
15585 /* If EXP represents referencing an element in a constant string
15586 (either via pointer arithmetic or array indexing), return the
15587 tree representing the value accessed, otherwise return NULL. */
15590 fold_read_from_constant_string (tree exp
)
15592 if ((TREE_CODE (exp
) == INDIRECT_REF
15593 || TREE_CODE (exp
) == ARRAY_REF
)
15594 && TREE_CODE (TREE_TYPE (exp
)) == INTEGER_TYPE
)
15596 tree exp1
= TREE_OPERAND (exp
, 0);
15599 location_t loc
= EXPR_LOCATION (exp
);
15601 if (TREE_CODE (exp
) == INDIRECT_REF
)
15602 string
= string_constant (exp1
, &index
);
15605 tree low_bound
= array_ref_low_bound (exp
);
15606 index
= fold_convert_loc (loc
, sizetype
, TREE_OPERAND (exp
, 1));
15608 /* Optimize the special-case of a zero lower bound.
15610 We convert the low_bound to sizetype to avoid some problems
15611 with constant folding. (E.g. suppose the lower bound is 1,
15612 and its mode is QI. Without the conversion,l (ARRAY
15613 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15614 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15615 if (! integer_zerop (low_bound
))
15616 index
= size_diffop_loc (loc
, index
,
15617 fold_convert_loc (loc
, sizetype
, low_bound
));
15623 && TYPE_MODE (TREE_TYPE (exp
)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))
15624 && TREE_CODE (string
) == STRING_CST
15625 && TREE_CODE (index
) == INTEGER_CST
15626 && compare_tree_int (index
, TREE_STRING_LENGTH (string
)) < 0
15627 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
))))
15629 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string
)))) == 1))
15630 return build_int_cst_type (TREE_TYPE (exp
),
15631 (TREE_STRING_POINTER (string
)
15632 [TREE_INT_CST_LOW (index
)]));
15637 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15638 an integer constant, real, or fixed-point constant.
15640 TYPE is the type of the result. */
15643 fold_negate_const (tree arg0
, tree type
)
15645 tree t
= NULL_TREE
;
15647 switch (TREE_CODE (arg0
))
15651 double_int val
= tree_to_double_int (arg0
);
15652 int overflow
= neg_double (val
.low
, val
.high
, &val
.low
, &val
.high
);
15654 t
= force_fit_type_double (type
, val
, 1,
15655 (overflow
| TREE_OVERFLOW (arg0
))
15656 && !TYPE_UNSIGNED (type
));
15661 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15666 FIXED_VALUE_TYPE f
;
15667 bool overflow_p
= fixed_arithmetic (&f
, NEGATE_EXPR
,
15668 &(TREE_FIXED_CST (arg0
)), NULL
,
15669 TYPE_SATURATING (type
));
15670 t
= build_fixed (type
, f
);
15671 /* Propagate overflow flags. */
15672 if (overflow_p
| TREE_OVERFLOW (arg0
))
15673 TREE_OVERFLOW (t
) = 1;
15678 gcc_unreachable ();
15684 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15685 an integer constant or real constant.
15687 TYPE is the type of the result. */
15690 fold_abs_const (tree arg0
, tree type
)
15692 tree t
= NULL_TREE
;
15694 switch (TREE_CODE (arg0
))
15698 double_int val
= tree_to_double_int (arg0
);
15700 /* If the value is unsigned or non-negative, then the absolute value
15701 is the same as the ordinary value. */
15702 if (TYPE_UNSIGNED (type
)
15703 || !double_int_negative_p (val
))
15706 /* If the value is negative, then the absolute value is
15712 overflow
= neg_double (val
.low
, val
.high
, &val
.low
, &val
.high
);
15713 t
= force_fit_type_double (type
, val
, -1,
15714 overflow
| TREE_OVERFLOW (arg0
));
15720 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
15721 t
= build_real (type
, real_value_negate (&TREE_REAL_CST (arg0
)));
15727 gcc_unreachable ();
15733 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15734 constant. TYPE is the type of the result. */
15737 fold_not_const (const_tree arg0
, tree type
)
15741 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
);
15743 val
= double_int_not (tree_to_double_int (arg0
));
15744 return force_fit_type_double (type
, val
, 0, TREE_OVERFLOW (arg0
));
15747 /* Given CODE, a relational operator, the target type, TYPE and two
15748 constant operands OP0 and OP1, return the result of the
15749 relational operation. If the result is not a compile time
15750 constant, then return NULL_TREE. */
15753 fold_relational_const (enum tree_code code
, tree type
, tree op0
, tree op1
)
15755 int result
, invert
;
15757 /* From here on, the only cases we handle are when the result is
15758 known to be a constant. */
15760 if (TREE_CODE (op0
) == REAL_CST
&& TREE_CODE (op1
) == REAL_CST
)
15762 const REAL_VALUE_TYPE
*c0
= TREE_REAL_CST_PTR (op0
);
15763 const REAL_VALUE_TYPE
*c1
= TREE_REAL_CST_PTR (op1
);
15765 /* Handle the cases where either operand is a NaN. */
15766 if (real_isnan (c0
) || real_isnan (c1
))
15776 case UNORDERED_EXPR
:
15790 if (flag_trapping_math
)
15796 gcc_unreachable ();
15799 return constant_boolean_node (result
, type
);
15802 return constant_boolean_node (real_compare (code
, c0
, c1
), type
);
15805 if (TREE_CODE (op0
) == FIXED_CST
&& TREE_CODE (op1
) == FIXED_CST
)
15807 const FIXED_VALUE_TYPE
*c0
= TREE_FIXED_CST_PTR (op0
);
15808 const FIXED_VALUE_TYPE
*c1
= TREE_FIXED_CST_PTR (op1
);
15809 return constant_boolean_node (fixed_compare (code
, c0
, c1
), type
);
15812 /* Handle equality/inequality of complex constants. */
15813 if (TREE_CODE (op0
) == COMPLEX_CST
&& TREE_CODE (op1
) == COMPLEX_CST
)
15815 tree rcond
= fold_relational_const (code
, type
,
15816 TREE_REALPART (op0
),
15817 TREE_REALPART (op1
));
15818 tree icond
= fold_relational_const (code
, type
,
15819 TREE_IMAGPART (op0
),
15820 TREE_IMAGPART (op1
));
15821 if (code
== EQ_EXPR
)
15822 return fold_build2 (TRUTH_ANDIF_EXPR
, type
, rcond
, icond
);
15823 else if (code
== NE_EXPR
)
15824 return fold_build2 (TRUTH_ORIF_EXPR
, type
, rcond
, icond
);
15829 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15831 To compute GT, swap the arguments and do LT.
15832 To compute GE, do LT and invert the result.
15833 To compute LE, swap the arguments, do LT and invert the result.
15834 To compute NE, do EQ and invert the result.
15836 Therefore, the code below must handle only EQ and LT. */
15838 if (code
== LE_EXPR
|| code
== GT_EXPR
)
15843 code
= swap_tree_comparison (code
);
15846 /* Note that it is safe to invert for real values here because we
15847 have already handled the one case that it matters. */
15850 if (code
== NE_EXPR
|| code
== GE_EXPR
)
15853 code
= invert_tree_comparison (code
, false);
15856 /* Compute a result for LT or EQ if args permit;
15857 Otherwise return T. */
15858 if (TREE_CODE (op0
) == INTEGER_CST
&& TREE_CODE (op1
) == INTEGER_CST
)
15860 if (code
== EQ_EXPR
)
15861 result
= tree_int_cst_equal (op0
, op1
);
15862 else if (TYPE_UNSIGNED (TREE_TYPE (op0
)))
15863 result
= INT_CST_LT_UNSIGNED (op0
, op1
);
15865 result
= INT_CST_LT (op0
, op1
);
15872 return constant_boolean_node (result
, type
);
15875 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15876 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15880 fold_build_cleanup_point_expr (tree type
, tree expr
)
15882 /* If the expression does not have side effects then we don't have to wrap
15883 it with a cleanup point expression. */
15884 if (!TREE_SIDE_EFFECTS (expr
))
15887 /* If the expression is a return, check to see if the expression inside the
15888 return has no side effects or the right hand side of the modify expression
15889 inside the return. If either don't have side effects set we don't need to
15890 wrap the expression in a cleanup point expression. Note we don't check the
15891 left hand side of the modify because it should always be a return decl. */
15892 if (TREE_CODE (expr
) == RETURN_EXPR
)
15894 tree op
= TREE_OPERAND (expr
, 0);
15895 if (!op
|| !TREE_SIDE_EFFECTS (op
))
15897 op
= TREE_OPERAND (op
, 1);
15898 if (!TREE_SIDE_EFFECTS (op
))
15902 return build1 (CLEANUP_POINT_EXPR
, type
, expr
);
15905 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15906 of an indirection through OP0, or NULL_TREE if no simplification is
15910 fold_indirect_ref_1 (location_t loc
, tree type
, tree op0
)
15916 subtype
= TREE_TYPE (sub
);
15917 if (!POINTER_TYPE_P (subtype
))
15920 if (TREE_CODE (sub
) == ADDR_EXPR
)
15922 tree op
= TREE_OPERAND (sub
, 0);
15923 tree optype
= TREE_TYPE (op
);
15924 /* *&CONST_DECL -> to the value of the const decl. */
15925 if (TREE_CODE (op
) == CONST_DECL
)
15926 return DECL_INITIAL (op
);
15927 /* *&p => p; make sure to handle *&"str"[cst] here. */
15928 if (type
== optype
)
15930 tree fop
= fold_read_from_constant_string (op
);
15936 /* *(foo *)&fooarray => fooarray[0] */
15937 else if (TREE_CODE (optype
) == ARRAY_TYPE
15938 && type
== TREE_TYPE (optype
)
15939 && (!in_gimple_form
15940 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
15942 tree type_domain
= TYPE_DOMAIN (optype
);
15943 tree min_val
= size_zero_node
;
15944 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
15945 min_val
= TYPE_MIN_VALUE (type_domain
);
15947 && TREE_CODE (min_val
) != INTEGER_CST
)
15949 return build4_loc (loc
, ARRAY_REF
, type
, op
, min_val
,
15950 NULL_TREE
, NULL_TREE
);
15952 /* *(foo *)&complexfoo => __real__ complexfoo */
15953 else if (TREE_CODE (optype
) == COMPLEX_TYPE
15954 && type
== TREE_TYPE (optype
))
15955 return fold_build1_loc (loc
, REALPART_EXPR
, type
, op
);
15956 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15957 else if (TREE_CODE (optype
) == VECTOR_TYPE
15958 && type
== TREE_TYPE (optype
))
15960 tree part_width
= TYPE_SIZE (type
);
15961 tree index
= bitsize_int (0);
15962 return fold_build3_loc (loc
, BIT_FIELD_REF
, type
, op
, part_width
, index
);
15966 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
15967 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
15969 tree op00
= TREE_OPERAND (sub
, 0);
15970 tree op01
= TREE_OPERAND (sub
, 1);
15973 if (TREE_CODE (op00
) == ADDR_EXPR
)
15976 op00
= TREE_OPERAND (op00
, 0);
15977 op00type
= TREE_TYPE (op00
);
15979 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15980 if (TREE_CODE (op00type
) == VECTOR_TYPE
15981 && type
== TREE_TYPE (op00type
))
15983 HOST_WIDE_INT offset
= tree_low_cst (op01
, 0);
15984 tree part_width
= TYPE_SIZE (type
);
15985 unsigned HOST_WIDE_INT part_widthi
= tree_low_cst (part_width
, 0)/BITS_PER_UNIT
;
15986 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
15987 tree index
= bitsize_int (indexi
);
15989 if (offset
/part_widthi
<= TYPE_VECTOR_SUBPARTS (op00type
))
15990 return fold_build3_loc (loc
,
15991 BIT_FIELD_REF
, type
, op00
,
15992 part_width
, index
);
15995 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15996 else if (TREE_CODE (op00type
) == COMPLEX_TYPE
15997 && type
== TREE_TYPE (op00type
))
15999 tree size
= TYPE_SIZE_UNIT (type
);
16000 if (tree_int_cst_equal (size
, op01
))
16001 return fold_build1_loc (loc
, IMAGPART_EXPR
, type
, op00
);
16003 /* ((foo *)&fooarray)[1] => fooarray[1] */
16004 else if (TREE_CODE (op00type
) == ARRAY_TYPE
16005 && type
== TREE_TYPE (op00type
))
16007 tree type_domain
= TYPE_DOMAIN (op00type
);
16008 tree min_val
= size_zero_node
;
16009 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16010 min_val
= TYPE_MIN_VALUE (type_domain
);
16011 op01
= size_binop_loc (loc
, EXACT_DIV_EXPR
, op01
,
16012 TYPE_SIZE_UNIT (type
));
16013 op01
= size_binop_loc (loc
, PLUS_EXPR
, op01
, min_val
);
16014 return build4_loc (loc
, ARRAY_REF
, type
, op00
, op01
,
16015 NULL_TREE
, NULL_TREE
);
16020 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16021 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
16022 && type
== TREE_TYPE (TREE_TYPE (subtype
))
16023 && (!in_gimple_form
16024 || TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
))
16027 tree min_val
= size_zero_node
;
16028 sub
= build_fold_indirect_ref_loc (loc
, sub
);
16029 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
16030 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
16031 min_val
= TYPE_MIN_VALUE (type_domain
);
16033 && TREE_CODE (min_val
) != INTEGER_CST
)
16035 return build4_loc (loc
, ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
,
16042 /* Builds an expression for an indirection through T, simplifying some
16046 build_fold_indirect_ref_loc (location_t loc
, tree t
)
16048 tree type
= TREE_TYPE (TREE_TYPE (t
));
16049 tree sub
= fold_indirect_ref_1 (loc
, type
, t
);
16054 return build1_loc (loc
, INDIRECT_REF
, type
, t
);
16057 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16060 fold_indirect_ref_loc (location_t loc
, tree t
)
16062 tree sub
= fold_indirect_ref_1 (loc
, TREE_TYPE (t
), TREE_OPERAND (t
, 0));
16070 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16071 whose result is ignored. The type of the returned tree need not be
16072 the same as the original expression. */
16075 fold_ignored_result (tree t
)
16077 if (!TREE_SIDE_EFFECTS (t
))
16078 return integer_zero_node
;
16081 switch (TREE_CODE_CLASS (TREE_CODE (t
)))
16084 t
= TREE_OPERAND (t
, 0);
16088 case tcc_comparison
:
16089 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16090 t
= TREE_OPERAND (t
, 0);
16091 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 0)))
16092 t
= TREE_OPERAND (t
, 1);
16097 case tcc_expression
:
16098 switch (TREE_CODE (t
))
16100 case COMPOUND_EXPR
:
16101 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1)))
16103 t
= TREE_OPERAND (t
, 0);
16107 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 1))
16108 || TREE_SIDE_EFFECTS (TREE_OPERAND (t
, 2)))
16110 t
= TREE_OPERAND (t
, 0);
16123 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16124 This can only be applied to objects of a sizetype. */
16127 round_up_loc (location_t loc
, tree value
, int divisor
)
16129 tree div
= NULL_TREE
;
16131 gcc_assert (divisor
> 0);
16135 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16136 have to do anything. Only do this when we are not given a const,
16137 because in that case, this check is more expensive than just
16139 if (TREE_CODE (value
) != INTEGER_CST
)
16141 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16143 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16147 /* If divisor is a power of two, simplify this to bit manipulation. */
16148 if (divisor
== (divisor
& -divisor
))
16150 if (TREE_CODE (value
) == INTEGER_CST
)
16152 double_int val
= tree_to_double_int (value
);
16155 if ((val
.low
& (divisor
- 1)) == 0)
16158 overflow_p
= TREE_OVERFLOW (value
);
16159 val
.low
&= ~(divisor
- 1);
16160 val
.low
+= divisor
;
16168 return force_fit_type_double (TREE_TYPE (value
), val
,
16175 t
= build_int_cst (TREE_TYPE (value
), divisor
- 1);
16176 value
= size_binop_loc (loc
, PLUS_EXPR
, value
, t
);
16177 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16178 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16184 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16185 value
= size_binop_loc (loc
, CEIL_DIV_EXPR
, value
, div
);
16186 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16192 /* Likewise, but round down. */
16195 round_down_loc (location_t loc
, tree value
, int divisor
)
16197 tree div
= NULL_TREE
;
16199 gcc_assert (divisor
> 0);
16203 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16204 have to do anything. Only do this when we are not given a const,
16205 because in that case, this check is more expensive than just
16207 if (TREE_CODE (value
) != INTEGER_CST
)
16209 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16211 if (multiple_of_p (TREE_TYPE (value
), value
, div
))
16215 /* If divisor is a power of two, simplify this to bit manipulation. */
16216 if (divisor
== (divisor
& -divisor
))
16220 t
= build_int_cst (TREE_TYPE (value
), -divisor
);
16221 value
= size_binop_loc (loc
, BIT_AND_EXPR
, value
, t
);
16226 div
= build_int_cst (TREE_TYPE (value
), divisor
);
16227 value
= size_binop_loc (loc
, FLOOR_DIV_EXPR
, value
, div
);
16228 value
= size_binop_loc (loc
, MULT_EXPR
, value
, div
);
16234 /* Returns the pointer to the base of the object addressed by EXP and
16235 extracts the information about the offset of the access, storing it
16236 to PBITPOS and POFFSET. */
16239 split_address_to_core_and_offset (tree exp
,
16240 HOST_WIDE_INT
*pbitpos
, tree
*poffset
)
16243 enum machine_mode mode
;
16244 int unsignedp
, volatilep
;
16245 HOST_WIDE_INT bitsize
;
16246 location_t loc
= EXPR_LOCATION (exp
);
16248 if (TREE_CODE (exp
) == ADDR_EXPR
)
16250 core
= get_inner_reference (TREE_OPERAND (exp
, 0), &bitsize
, pbitpos
,
16251 poffset
, &mode
, &unsignedp
, &volatilep
,
16253 core
= build_fold_addr_expr_loc (loc
, core
);
16259 *poffset
= NULL_TREE
;
16265 /* Returns true if addresses of E1 and E2 differ by a constant, false
16266 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16269 ptr_difference_const (tree e1
, tree e2
, HOST_WIDE_INT
*diff
)
16272 HOST_WIDE_INT bitpos1
, bitpos2
;
16273 tree toffset1
, toffset2
, tdiff
, type
;
16275 core1
= split_address_to_core_and_offset (e1
, &bitpos1
, &toffset1
);
16276 core2
= split_address_to_core_and_offset (e2
, &bitpos2
, &toffset2
);
16278 if (bitpos1
% BITS_PER_UNIT
!= 0
16279 || bitpos2
% BITS_PER_UNIT
!= 0
16280 || !operand_equal_p (core1
, core2
, 0))
16283 if (toffset1
&& toffset2
)
16285 type
= TREE_TYPE (toffset1
);
16286 if (type
!= TREE_TYPE (toffset2
))
16287 toffset2
= fold_convert (type
, toffset2
);
16289 tdiff
= fold_build2 (MINUS_EXPR
, type
, toffset1
, toffset2
);
16290 if (!cst_and_fits_in_hwi (tdiff
))
16293 *diff
= int_cst_value (tdiff
);
16295 else if (toffset1
|| toffset2
)
16297 /* If only one of the offsets is non-constant, the difference cannot
16304 *diff
+= (bitpos1
- bitpos2
) / BITS_PER_UNIT
;
16308 /* Simplify the floating point expression EXP when the sign of the
16309 result is not significant. Return NULL_TREE if no simplification
16313 fold_strip_sign_ops (tree exp
)
16316 location_t loc
= EXPR_LOCATION (exp
);
16318 switch (TREE_CODE (exp
))
16322 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16323 return arg0
? arg0
: TREE_OPERAND (exp
, 0);
16327 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp
))))
16329 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 0));
16330 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16331 if (arg0
!= NULL_TREE
|| arg1
!= NULL_TREE
)
16332 return fold_build2_loc (loc
, TREE_CODE (exp
), TREE_TYPE (exp
),
16333 arg0
? arg0
: TREE_OPERAND (exp
, 0),
16334 arg1
? arg1
: TREE_OPERAND (exp
, 1));
16337 case COMPOUND_EXPR
:
16338 arg0
= TREE_OPERAND (exp
, 0);
16339 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16341 return fold_build2_loc (loc
, COMPOUND_EXPR
, TREE_TYPE (exp
), arg0
, arg1
);
16345 arg0
= fold_strip_sign_ops (TREE_OPERAND (exp
, 1));
16346 arg1
= fold_strip_sign_ops (TREE_OPERAND (exp
, 2));
16348 return fold_build3_loc (loc
,
16349 COND_EXPR
, TREE_TYPE (exp
), TREE_OPERAND (exp
, 0),
16350 arg0
? arg0
: TREE_OPERAND (exp
, 1),
16351 arg1
? arg1
: TREE_OPERAND (exp
, 2));
16356 const enum built_in_function fcode
= builtin_mathfn_code (exp
);
16359 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
16360 /* Strip copysign function call, return the 1st argument. */
16361 arg0
= CALL_EXPR_ARG (exp
, 0);
16362 arg1
= CALL_EXPR_ARG (exp
, 1);
16363 return omit_one_operand_loc (loc
, TREE_TYPE (exp
), arg0
, arg1
);
16366 /* Strip sign ops from the argument of "odd" math functions. */
16367 if (negate_mathfn_p (fcode
))
16369 arg0
= fold_strip_sign_ops (CALL_EXPR_ARG (exp
, 0));
16371 return build_call_expr_loc (loc
, get_callee_fndecl (exp
), 1, arg0
);